metadata
dict | text
stringlengths 60
3.49M
|
---|---|
{
"source": "jeremynowell/django-simple-saga-task-manager",
"score": 3
}
|
#### File: django-simple-saga-task-manager/simple_saga_task_manager/__init__.py
```python
import sys
def inject_default_settings(name):
'''
Inject application default settings into config if not
already defined.
'''
try:
__import__('%s.settings' % name)
# Import this app defaults
app_settings = sys.modules['%s.settings' % name]
default_settings = sys.modules['django.conf.global_settings']
settings = sys.modules['django.conf'].settings
# Get our defaults
for k in dir(app_settings):
if k.isupper():
# Add to Django defaults
setattr(default_settings, k, getattr(app_settings, k))
# Add to settings if not defined
if not hasattr(settings, k):
setattr(settings, k, getattr(app_settings, k))
except ImportError:
# Skip failures
pass
inject_default_settings(__name__)
```
#### File: simple_saga_task_manager/tests/test_api_unit.py
```python
import os
from django.conf import settings
from django.test import TestCase, override_settings
import simple_saga_task_manager.api as api
from simple_saga_task_manager.models import Task, InputFile
from simple_saga_task_manager.tests.saga_test_case import SagaTestCase, TEST_DIR
INPUT_FILE = os.path.join(os.path.dirname(__file__), 'resources', 'input_test.txt')
OUTPUT_FILE = os.path.join(os.path.dirname(__file__), 'resources', 'output_test.txt')
STDOUT_FILE = os.path.join(os.path.dirname(__file__), 'resources', 'stdout_test.txt')
STDERR_FILE = os.path.join(os.path.dirname(__file__), 'resources', 'stderr_test.txt')
@override_settings(MEDIA_ROOT = TEST_DIR)
class SubmitTaskTests(SagaTestCase):
def test_submit_task_local(self):
with open(INPUT_FILE, 'r') as f:
task_id = api.submit_task('test_task', input_files=[f])
self.assertEqual(task_id, 1)
task = Task.objects.get(id=1)
self.assertEqual(task.name, 'test_task')
self.assertEqual(task.status, Task.QUEUED)
self.assertEqual(task.type, Task.LOCAL)
inputfile = InputFile.objects.get(id=1)
self.assertEqual(os.path.basename(inputfile.input_file.name), 'input_test.txt')
expected_path = os.path.join('tasks', str(1), 'inputs', 'input_test.txt')
self.check_file_exists(expected_path)
def test_submit_task_remote(self):
with open(INPUT_FILE, 'r') as f:
task_id = api.submit_task('test_task', input_files=[f], remote=True)
self.assertEqual(task_id, 1)
task = Task.objects.get(id=1)
self.assertEqual(task.name, 'test_task')
self.assertEqual(task.status, Task.QUEUED)
self.assertEqual(task.type, Task.REMOTE)
inputfile = InputFile.objects.get(id=1)
self.assertEqual(os.path.basename(inputfile.input_file.name), 'input_test.txt')
expected_path = os.path.join('tasks', str(1), 'inputs', 'input_test.txt')
self.check_file_exists(expected_path)
def test_get_task_status_queued(self):
self.create_local_queued_task()
actualStatus = api.get_task_status(1)
expectedStatus = Task.QUEUED
self.assertEqual(actualStatus, expectedStatus)
self.create_remote_queued_task()
actualStatus = api.get_task_status(2)
self.assertEqual(actualStatus, expectedStatus)
def test_get_task_status_running(self):
self.create_local_running_task()
actualStatus = api.get_task_status(1)
expectedStatus = Task.RUNNING
self.assertEqual(actualStatus, expectedStatus)
self.create_remote_running_task()
actualStatus = api.get_task_status(2)
self.assertEqual(actualStatus, expectedStatus)
def test_get_task_statuses(self):
self.create_local_queued_task()
self.create_remote_queued_task()
self.create_local_running_task()
self.create_remote_running_task()
statuses = api.get_task_statuses("test_task")
self.assertEqual(len(statuses), 4)
self.assertEqual(statuses[0]['id'], 1)
self.assertEqual(statuses[0]['status'], Task.QUEUED)
self.assertEqual(statuses[1]['id'], 2)
self.assertEqual(statuses[1]['status'], Task.QUEUED)
self.assertEqual(statuses[2]['id'], 3)
self.assertEqual(statuses[2]['status'], Task.RUNNING)
self.assertEqual(statuses[3]['id'], 4)
self.assertEqual(statuses[3]['status'], Task.RUNNING)
def test_get_task_results(self):
task = self.create_remote_complete_task()
self.add_input_file(INPUT_FILE, task)
self.add_stdout_file(STDOUT_FILE, task)
self.add_stderr_file(STDERR_FILE, task)
self.add_output_file(OUTPUT_FILE, task)
outputs = api.get_task_results(task.pk)
stdout_file = outputs['stdout']
self.assertEqual(os.path.basename(STDOUT_FILE), os.path.basename(stdout_file.name))
stdout_file.close()
stderr_file = outputs['stderr']
self.assertEqual(os.path.basename(STDERR_FILE), os.path.basename(stderr_file.name))
stderr_file.close()
output_files = outputs['output_files']
self.assertEqual(os.path.basename(OUTPUT_FILE), os.path.basename(output_files[0].name))
output_files[0].close()
```
#### File: simple_saga_task_manager/tests/test_saga_interface_end_to_end_remote_python_script.py
```python
import os
import time
from django.conf import settings
from simple_saga_task_manager.tests.saga_test_case import SagaTestCase
from simple_saga_task_manager.models import Task
from simple_saga_task_manager.saga_interface import SAGATaskInterface
class SagaInterfaceEndToEndRemotePythonScriptTests(SagaTestCase):
def test_end_to_end_remote_python(self):
task_name = 'python'
task_args = [os.path.join(settings.SAGA_TEST_REMOTE_PYTHON_PATH, settings.SAGA_TEST_PYTHON_SCRIPT)]
task_args = task_args + settings.SAGA_TEST_PYTHON_ARGS
task_environment = settings.SAGA_TEST_REMOTE_ENVIRONMENT
output_files = [settings.SAGA_TEST_OUTPUT_FILE]
task = Task.objects.create(name=task_name, arguments=task_args, status=Task.QUEUED, type=Task.REMOTE,
environment=task_environment, expected_output_files=output_files)
# Create task input file
with open(settings.SAGA_TEST_INPUT_FILE, 'r') as f:
task.addInputFile(f)
with (SAGATaskInterface(True)) as si:
si.submit_saga_task(task)
# Task should now be running
status = task.status
self.assertEqual(Task.RUNNING, status)
# Process running task
while (status == Task.RUNNING):
si.update_running_saga_task_status(task)
status = task.status
time.sleep(5)
# Task should now be finished running
self.assertEqual(Task.FINISHED_RUNNING, status)
si.process_finished_saga_task(task)
# Task should now be complete
self.assertEqual(Task.COMPLETE, task.status)
for f in output_files:
self.check_file_exists(os.path.join('tasks', str(task.id), 'outputs', f))
def test_end_to_end_remote_python_many(self):
task_name = 'python'
task_args = [os.path.join(settings.SAGA_TEST_REMOTE_PYTHON_PATH, settings.SAGA_TEST_PYTHON_SCRIPT)]
task_args = task_args + settings.SAGA_TEST_PYTHON_ARGS
task_environment = settings.SAGA_TEST_REMOTE_ENVIRONMENT
output_files = [settings.SAGA_TEST_OUTPUT_FILE]
number_of_jobs = 5
with (SAGATaskInterface(True)) as si:
for i in range(number_of_jobs):
task = Task.objects.create(name=task_name, arguments=task_args, status=Task.QUEUED, type=Task.REMOTE,
environment=task_environment, expected_output_files=output_files)
# Create task input file
with open(settings.SAGA_TEST_INPUT_FILE, 'r') as f:
task.addInputFile(f)
si.submit_saga_task(task)
# Task should now be running
status = task.status
self.assertEqual(Task.RUNNING, status)
running_jobs = True
while running_jobs:
with (SAGATaskInterface(True)) as si:
tasks = Task.objects.filter(status=Task.RUNNING)
if len(tasks) == 0:
running_jobs = False
else:
for task in tasks:
si.update_running_saga_task_status(task)
with (SAGATaskInterface(True)) as si:
tasks = Task.objects.filter(status=Task.FAILED)
self.assertEqual(len(tasks), 0)
tasks = Task.objects.filter(status=Task.FINISHED_RUNNING)
self.assertEqual(len(tasks), number_of_jobs)
for task in tasks:
si.process_finished_saga_task(task)
for f in output_files:
self.check_file_exists(os.path.join('tasks', str(task.id), 'outputs', f))
# this test simply tests that we can submit a job containing the project and nprocs parameters.
# it should really scrape the job output and determine these were set correctly by the batch system
def test_end_to_end_cdes_sim_remote_ncpus_project(self):
task_name = 'python'
task_args = [os.path.join(settings.SAGA_TEST_REMOTE_PYTHON_PATH, settings.SAGA_TEST_PYTHON_SCRIPT)]
task_args = task_args + settings.SAGA_TEST_PYTHON_ARGS
task_environment = settings.SAGA_TEST_REMOTE_ENVIRONMENT
output_files = [settings.SAGA_TEST_OUTPUT_FILE]
task = Task.objects.create(name=task_name,
arguments=task_args,
status=Task.QUEUED,
type=Task.REMOTE,
environment=task_environment,
expected_output_files=output_files,
wallclock_limit=10,
project="blah",
nprocs=4)
# Create task input file
with open(settings.SAGA_TEST_INPUT_FILE, 'r') as f:
task.addInputFile(f)
with (SAGATaskInterface(True)) as si:
si.submit_saga_task(task)
# Task should now be running
status = task.status
self.assertEqual(Task.RUNNING, status)
# Process running task
while (status == Task.RUNNING):
si.update_running_saga_task_status(task)
status = task.status
time.sleep(30)
# Task should now be finished running
self.assertEqual(Task.FINISHED_RUNNING, status)
si.process_finished_saga_task(task)
# Task should now be complete
self.assertEqual(Task.COMPLETE, task.status)
for f in output_files:
self.check_file_exists(os.path.join('tasks', str(task.id), 'outputs', f))
```
#### File: simple_saga_task_manager/tests/test_saga_interface_unit.py
```python
import os
from django.test import override_settings
from simple_saga_task_manager.tests.saga_test_case import SagaTestCase
from simple_saga_task_manager.saga_interface import local_working_dir,\
remote_working_dir, \
local_job_service_url, remote_job_service_url,\
local_file_server_url, remote_file_server_url,\
local_file_server_task_url
class SagaInterfaceUnitTests(SagaTestCase):
@override_settings(SAGA_LOCAL_WORKING_DIR = '/test/directory')
def test_local_working_dir(self):
actual_directory = local_working_dir('taskId')
expected_directory = os.path.normpath('/test/directory/taskId')
self.assertEqual(actual_directory, expected_directory)
@override_settings(SAGA_REMOTE_WORKING_DIR = '/test/directory')
def test_remote_working_dir(self):
actual_directory = remote_working_dir('taskId')
expected_directory = '/test/directory/taskId'
self.assertEqual(actual_directory, expected_directory)
def test_local_job_service_url(self):
actual_url = str(local_job_service_url())
expected_url = "fork://localhost"
self.assertEqual(actual_url, expected_url)
@override_settings(SAGA_REMOTE_HOST = 'test.machine.address',
SAGA_REMOTE_JOB_ADAPTER = 'lsf+ssh')
def test_remote_job_service_url(self):
actual_url = str(remote_job_service_url())
expected_url = "lsf+ssh://test.machine.address"
self.assertEqual(actual_url, expected_url)
@override_settings(SAGA_LOCAL_WORKING_DIR = '/test/directory')
def test_local_file_server_url(self):
actual_url = str(local_file_server_url())
expected_url = 'file://localhost/test/directory'
self.assertEqual(actual_url, expected_url)
@override_settings(SAGA_REMOTE_HOST = 'test.machine.address',
SAGA_REMOTE_WORKING_DIR = '/test/directory')
def test_remote_file_server_url(self):
actual_url = str(remote_file_server_url())
expected_url = "sftp://test.machine.address/test/directory"
self.assertEqual(actual_url, expected_url)
@override_settings(SAGA_LOCAL_WORKING_DIR = '/test/directory')
def test_local_file_server_task_url(self):
actual_url = str(local_file_server_task_url('taskId'))
expected_url = 'file://localhost/test/directory/taskId/'
self.assertEqual(actual_url, expected_url)
```
|
{
"source": "jeremyn/python-machine-learning-book",
"score": 3
}
|
#### File: jeremyn/python-machine-learning-book/chapter_12.py
```python
import os
import pickle
import sys
import matplotlib.pyplot as plt
import numpy as np
from scipy.special import expit
NN_MNIST_FILENAME = 'nn_mnist.pkl'
def get_mnist_data():
path = os.path.join('datasets', 'mnist')
mnist_data = []
for kind in ('train', 't10k'):
labels_path = os.path.join(path, "%s-labels-idx1-ubyte" % kind)
images_path = os.path.join(path, "%s-images-idx3-ubyte" % kind)
with open(labels_path, 'rb') as lbpath:
lbpath.seek(8)
mnist_data.append(np.fromfile(lbpath, dtype=np.uint8))
with open(images_path, 'rb') as imgpath:
imgpath.seek(16)
mnist_data.append(
np.fromfile(
imgpath,
dtype=np.uint8,
).reshape(len(mnist_data[-1]), 784)
)
y_train, X_train, y_test, X_test = mnist_data
print(
"Train: rows: %d, columns: %d" % (X_train.shape[0], X_train.shape[1])
)
print("Test: rows: %d, columns: %d" % (X_test.shape[0], X_test.shape[1]))
return X_train, X_test, y_train, y_test
def display_mnist_examples(X, y):
fig, ax = plt.subplots(nrows=2, ncols=5, sharex=True, sharey=True)
ax = ax.flatten()
for i in range(10):
img = X[y == i][0].reshape(28, 28)
ax[i].imshow(img, cmap='Greys', interpolation='nearest')
ax[0].set_xticks([])
ax[0].set_yticks([])
plt.show()
fig, ax = plt.subplots(nrows=5, ncols=5, sharex=True, sharey=True)
ax = ax.flatten()
for i in range(25):
img = X[y == 7][i].reshape(28, 28)
ax[i].imshow(img, cmap='Greys', interpolation='nearest')
ax[0].set_xticks([])
ax[0].set_yticks([])
plt.show()
class NeuralNetMLP(object):
"""Feedforward neural network / Multi-layer perceptron classifier.
Parameters
----------
n_output : int
Number of output units, should be equal to the number of unique class
labels.
n_features : int
Number of features (dimensions) in the target dataset. Should be equal
to the number of columns in the X array.
n_hidden : int (default: 30)
Number of hidden units.
l1 : float (default: 0.0)
Lambda value for L1-regularization. No regularization if l1=0.0 (default)
l2 : float (default: 0.0)
Lambda value for L2-regularization. No regularization if l2=0.0 (default)
epochs : int (default: 500)
Number of passes over the training set.
eta : float (default: 0.001)
Learning rate.
alpha : float (default: 0.0)
Momentum constant. Factor multiplied with the gradient of the previous
epoch t-1 to improve learning speed.
decrease_const : float (default: 0.0)
Decrease constant. Shrinks the learning rate after each epoch via
eta / (1 + epoch*decrease_const)
shuffle : bool (default: True)
Shuffles training data every epoch if True to prevent circles.
minibatches : int (default: 1)
Divides training data into k minibatches for efficiency. Normal gradient
descent learning if k=1 (default).
random_state : int (default: None)
Set random state for shuffling and initializing the weights.
Attributes
----------
cost_: list
Sum of squared errors after each epoch.
"""
def __init__(self, n_output, n_features, n_hidden=30, l1=0.0, l2=0.0,
epochs=500, eta=0.001, alpha=0.0, decrease_const=0.0,
shuffle=True, minibatches=1, random_state=None, debug=False):
np.random.seed(random_state)
self.n_output = n_output
self.n_features = n_features
self.n_hidden = n_hidden
self.w1, self.w2 = self._initialize_weights()
self.l1 = l1
self.l2 = l2
self.epochs = epochs
self.eta = eta
self.alpha = alpha
self.decrease_const = decrease_const
self.shuffle = shuffle
self.minibatches = minibatches
self.debug = debug
def _encode_labels(self, y, k):
"""Encode labels into one-hot representation
Parameters
----------
y : array, shape = (n_samples, )
Target values.
Returns
-------
onehot : array, shape = (n_labels, n_samples)
"""
onehot = np.zeros((k, y.shape[0]))
for index, val in enumerate(y):
onehot[val, index] = 1.0
return onehot
def _initialize_weights(self):
"""Initialize weights with small random numbers."""
w1 = np.random.uniform(
-1.0,
1.0,
size=self.n_hidden*(self.n_features+1),
)
w1 = w1.reshape(self.n_hidden, self.n_features+1)
w2 = np.random.uniform(
-1.0,
1.0,
size=self.n_output*(self.n_hidden+1),
)
w2 = w2.reshape(self.n_output, self.n_hidden+1)
return w1, w2
def _sigmoid(self, z):
"""Compute logistic function (sigmoid)
Uses scipy.special.expit to avoid overflow error for very small input
values z.
"""
# return 1.0 / (1.0 + np.exp(-z))
return expit(z)
def _sigmoid_gradient(self, z):
"""Compute gradient function of the logistic function"""
sg = self._sigmoid(z)
return sg * (1 - sg)
def _add_bias_unit(self, X, how='column'):
"""Add bias unit (column or row of 1s) to array at index 0"""
if how == 'column':
X_new = np.ones((X.shape[0], X.shape[1]+1))
X_new[:, 1:] = X
elif how == 'row':
X_new = np.ones((X.shape[0]+1, X.shape[1]))
X_new[1:, :] = X
else:
raise AttributeError("'how' must be 'column' or 'row"'')
return X_new
def _feedforward(self, X, w1, w2):
"""Compute feedforward step
Parameters
----------
X : array, shape = (n_samples, n_features)
Input layer with original features.
w1 : array, shape = (n_hidden_units, n_features)
Weight matrix for input layer -> hidden layer.
w2 : array, shape = (n_output_units, n_hidden_units)
Weight matrix for hidden layer -> output layer.
Returns
-------
a1 : array, shape = (n_samples, n_features+1)
Input values with bias unit.
z2 : array, shape = (n_hidden, n_samples)
Net input of hidden layer.
a2 : array, shape = (n_hidden+1, n_samples)
Activation of hidden layer.
z3 : array, shape = (n_output_units, n_samples)
Net input of output layer.
a3 : array, shape = (n_output_units, n_samples)
Activation of output layer.
"""
a1 = self._add_bias_unit(X, how='column')
z2 = w1.dot(a1.T)
a2 = self._sigmoid(z2)
a2 = self._add_bias_unit(a2, how='row')
z3 = w2.dot(a2)
a3 = self._sigmoid(z3)
return a1, z2, a2, z3, a3
def _L2_reg(self, lambda_, w1, w2):
"""Compute L2-regularization cost"""
return (lambda_/2.0) * (np.sum(w1[:, 1:]**2) + np.sum(w2[:, 1:]**2))
def _L1_reg(self, lambda_, w1, w2):
"""Compute L1-regularization cost"""
return (
(lambda_/2.0) *
(np.abs(w1[:, 1:]).sum() + np.abs(w2[:, 1:]).sum())
)
def _get_cost(self, y_enc, output, w1, w2):
"""Compute cost function.
y_enc : array, shape = (n_labels, n_samples)
One-hot encoded class labels.
output : array, shape = (n_output_units, n_samples)
Activation of the output layer (feedforward)
w1 : array, shape = (n_hidden_units, n_features)
Weight matrix for input layer -> hidden layer.
w2 : array, shape = (n_output_units, n_hidden_units)
Weight matrix for hidden layer -> output layer.
Returns
-------
cost : float
Regularizaed cost.
"""
term1 = -y_enc * np.log(output)
term2 = (1 - y_enc) * np.log(1 - output)
cost = np.sum(term1 - term2)
L1_term = self._L1_reg(self.l1, w1, w2)
L2_term = self._L2_reg(self.l2, w1, w2)
cost = cost + L1_term + L2_term
return cost
def _get_gradient(self, a1, a2, a3, z2, y_enc, w1, w2):
"""Compute gradient step using backpropagation.
Parameters
----------
a1 : array, shape = (n_samples, n_features+1)
Input values with bias unit.
a2 : array, shape = (n_hidden+1, n_samples)
Activation of hidden layer.
a3 : array, shape = (n_output_units, n_samples)
Activation of output layer.
z2 : array, shape = (n_hidden, n_samples)
Net input of hidden layer.
y_enc : array, shape = (n_labels, n_samples)
One-hot encoded class labels.
w1 : array, shape = (n_hidden_units, n_features)
Weight matrix for input layer -> hidden layer.
w2 : array, shape = (n_output_units, n_hidden_units)
Weight matrix for hidden layer -> output layer.
Returns
-------
grad1 : array, shape = (n_hidden_units, n_features)
Gradient of the weight matrix w1.
grad2 : array, shape = (n_output_units, n_hidden_units)
Gradient of the weight matrix w2.
"""
# backpropagation
sigma3 = a3 - y_enc
z2 = self._add_bias_unit(z2, how='row')
sigma2 = w2.T.dot(sigma3) * self._sigmoid_gradient(z2)
sigma2 = sigma2[1:, :]
grad1 = sigma2.dot(a1)
grad2 = sigma3.dot(a2.T)
# regularize
grad1[:, 1:] += w1[:, 1:] * (self.l1 + self.l2)
grad2[:, 1:] += w2[:, 1:] * (self.l1 + self.l2)
return grad1, grad2
def _gradient_checking(self, X, y_enc, w1, w2, epsilon, grad1, grad2):
"""Apply gradient checking (for debugging only)
Returns
-------
relative_error : float
Relative error between the numerically approximated gradients and
the backpropagated gradients.
"""
num_grad1 = np.zeros(np.shape(w1))
epsilon_arr1 = np.zeros(np.shape(w1))
for i in range(w1.shape[0]):
for j in range(w1.shape[1]):
epsilon_arr1[i, j] = epsilon
a1, z2, a2, z3, a3 = self._feedforward(X, w1-epsilon_arr1, w2)
cost1 = self._get_cost(y_enc, a3, w1-epsilon_arr1, w2)
a1, z2, a2, z3, a3 = self._feedforward(X, w1+epsilon_arr1, w2)
cost2 = self._get_cost(y_enc, a3, w1+epsilon_arr1, w2)
num_grad1[i, j] = (cost2 - cost1) / (2 * epsilon)
epsilon_arr1[i, j] = 0
num_grad2 = np.zeros(np.shape(w2))
epsilon_arr2 = np.zeros(np.shape(w2))
for i in range(w2.shape[0]):
for j in range(w2.shape[1]):
epsilon_arr2[i, j] = epsilon
a1, z2, a2, z3, a3 = self._feedforward(X, w1, w2-epsilon_arr2)
cost1 = self._get_cost(y_enc, a3, w1, w2-epsilon_arr2)
a1, z2, a2, z3, a3 = self._feedforward(X, w1, w2+epsilon_arr2)
cost2 = self._get_cost(y_enc, a3, w1, w2+epsilon_arr2)
num_grad2[i, j] = (cost2 - cost1) / (2 * epsilon)
epsilon_arr2[i, j] = 0
num_grad = np.hstack((num_grad1.flatten(), num_grad2.flatten()))
grad = np.hstack((grad1.flatten(), grad2.flatten()))
norm1 = np.linalg.norm(num_grad - grad)
norm2 = np.linalg.norm(num_grad)
norm3 = np.linalg.norm(grad)
relative_error = norm1 / (norm2 + norm3)
return relative_error
def predict(self, X):
"""Predict class labels.
Parameters
----------
X : array, shape = (n_samples, n_features)
Input layer with original features.
Returns
-------
y_pred : array, shape = (n_samples, )
Predicted class labels.
"""
if len(X.shape) != 2:
raise AttributeError(
"X must be an (n_samples, n_features) array. Use X[:, None] "
"for 1-feature classification, or X[[i]] for 1-sample "
"classification"
)
a1, z2, s2, z3, a3 = self._feedforward(X, self.w1, self.w2)
y_pred = np.argmax(z3, axis=0)
return y_pred
def fit(self, X, y, print_progress=False):
"""Learn weights from training data.
Parameters
----------
X : array, shape = (n_samples, n_faetures)
Input layer with original features.
y : array, shape = (n_samples, )
Target class labels.
print_progress : bool (default: False)
Prints progress as the number of epochs to stderr.
Returns
-------
self
"""
self.cost_ = []
X_data = X.copy()
y_data = y.copy()
y_enc = self._encode_labels(y, self.n_output)
delta_w1_prev = np.zeros(self.w1.shape)
delta_w2_prev = np.zeros(self.w2.shape)
for i in range(self.epochs):
# adaptive learning rate
self.eta /= 1 + self.decrease_const*i
if print_progress:
sys.stderr.write("\rEpoch: %d/%d" % (i+1, self.epochs))
if self.shuffle:
index = np.random.permutation(y_data.shape[0])
X_data = X_data[index]
y_enc = y_enc[:, index]
mini = np.array_split(range(y_data.shape[0]), self.minibatches)
for index in mini:
# feedforward
a1, z2, a2, z3, a3 = self._feedforward(
X_data[index],
self.w1,
self.w2,
)
cost = self._get_cost(
y_enc=y_enc[:, index],
output=a3,
w1=self.w1,
w2=self.w2,
)
self.cost_.append(cost)
# compute gradient via backpropagation
grad1, grad2 = self._get_gradient(
a1=a1,
a2=a2,
a3=a3,
z2=z2,
y_enc=y_enc[:, index],
w1=self.w1,
w2=self.w2,
)
if self.debug:
grad_diff = self._gradient_checking(
X=X_data[index],
y_enc=y_enc[:, index],
w1=self.w1,
w2=self.w2,
epsilon=1e-5,
grad1=grad1,
grad2=grad2,
)
if grad_diff <= 1e-7:
status = "Ok"
elif grad_diff <= 1e-4:
status = "Warning"
else:
status = "PROBLEM"
print("%s: %s" % (status, grad_diff))
delta_w1 = self.eta * grad1
delta_w2 = self.eta * grad2
self.w1 -= delta_w1 + (self.alpha * delta_w1_prev)
self.w2 -= delta_w2 + (self.alpha * delta_w2_prev)
delta_w1_prev = delta_w1
delta_w2_prev = delta_w2
return self
def get_trained_nn(X_train, y_train):
np.random.seed(1)
try:
nn = pickle.load(open(NN_MNIST_FILENAME, 'rb'))
except FileNotFoundError:
nn = NeuralNetMLP(
n_output=10,
n_features=X_train.shape[1],
n_hidden=50,
l2=0.1,
l1=0.0,
epochs=1000,
eta=0.001,
alpha=0.001,
decrease_const=0.00001,
minibatches=50,
shuffle=False,
random_state=1,
)
nn.fit(X_train, y_train, print_progress=True)
pickle.dump(nn, open(NN_MNIST_FILENAME, 'wb'), protocol=4)
return nn
def display_nn_performance(nn):
plt.plot(range(len(nn.cost_)), nn.cost_)
plt.ylim([0, 2000])
plt.ylabel('Cost')
plt.xlabel('Epochs * 50')
plt.show()
batches = np.array_split(range(len(nn.cost_)), 1000)
cost_arr = np.array(nn.cost_)
cost_avgs = [np.mean(cost_arr[i]) for i in batches]
plt.plot(range(len(cost_avgs)), cost_avgs, color='red')
plt.ylim([0, 2000])
plt.ylabel('Cost')
plt.xlabel('Epochs')
plt.show()
y_train_pred = nn.predict(X_train)
acc = np.sum(y_train == y_train_pred, axis=0) / X_train.shape[0]
print("Training accuracy: %.2f%%" % (acc * 100))
y_test_pred = nn.predict(X_test)
acc = np.sum(y_test == y_test_pred, axis=0) / X_test.shape[0]
print("Testing accuracy: %.2f%%" % (acc * 100))
miscl_img = X_test[y_test != y_test_pred][:25]
correct_lab = y_test[y_test != y_test_pred][:25]
miscl_lab = y_test_pred[y_test != y_test_pred][:25]
fig, ax = plt.subplots(nrows=5, ncols=5, sharex=True, sharey=True)
ax = ax.flatten()
for i in range(25):
img = miscl_img[i].reshape(28, 28)
ax[i].imshow(img, cmap='Greys', interpolation='nearest')
ax[i].set_title(
"%d) t: %d p: %d" %
(i+1, correct_lab[i], miscl_lab[i])
)
ax[0].set_xticks([])
ax[0].set_yticks([])
plt.show()
def run_nn_check(X, y):
nn_check = NeuralNetMLP(
n_output=10,
n_features=X.shape[1],
n_hidden=10,
l2=0.0,
l1=0.0,
epochs=10,
eta=0.001,
alpha=0.0,
decrease_const=0.0,
minibatches=1,
shuffle=False,
random_state=1,
debug=True,
)
nn_check.fit(X[:5], y[:5], print_progress=False)
if __name__ == '__main__':
path = os.path.join('datasets', 'mnist')
X_train, X_test, y_train, y_test = get_mnist_data()
# display_mnist_examples(X_train, y_train)
# nn = get_trained_nn(X_train, y_train)
# display_nn_performance(nn)
run_nn_check(X_train, y_train)
```
#### File: jeremyn/python-machine-learning-book/chapter_3.py
```python
import matplotlib.pyplot as plt
import numpy as np
from sklearn.cross_validation import train_test_split
from sklearn.ensemble import RandomForestClassifier
from sklearn.linear_model import (
LogisticRegression,
Perceptron,
)
from sklearn.metrics import accuracy_score
from sklearn.neighbors import KNeighborsClassifier
from sklearn.preprocessing import StandardScaler
from sklearn.svm import SVC
from sklearn.tree import DecisionTreeClassifier
from sklearn import datasets
from visualization import plot_decision_regions
def gini(p):
return 2 * p * (1-p)
def entropy(p):
return -p * np.log2(p) - (1-p) * np.log2(1-p)
def error(p):
return 1 - max(p, 1-p)
def plot_impurity_indexes():
probs = np.arange(0.0, 1.0, 0.01)
entropies = [entropy(p) if p != 0 else None for p in probs]
scaled_entropies = [e * 0.5 if e is not None else None for e in entropies]
errors = [error(p) for p in probs]
plt.figure()
ax = plt.subplot(111)
plots = (
(entropies, 'Entropy', '-', 'black'),
(scaled_entropies, 'Entropy (scaled)', '-', 'lightgray'),
(gini(probs), 'Gini Impurity', '--', 'red'),
(errors, 'Misclassification Error', '-.', 'green'),
)
for y, label, linestyle, color in plots:
ax.plot(probs, y, label=label, linestyle=linestyle, lw=2, color=color)
ax.legend(
loc='upper center',
bbox_to_anchor=(0.5, 1.15),
ncol=3,
fancybox=True,
shadow=False,
)
ax.axhline(y=0.5, linewidth=1, color='k', linestyle='--')
ax.axhline(y=1.0, linewidth=1, color='k', linestyle='--')
plt.ylim([0, 1.1])
plt.xlabel('p(i=1)')
plt.ylabel('Impurity Index')
plt.show()
def plot_iris_with_classifier(clf, print_accuracy=False, standardize=True):
iris = datasets.load_iris()
X = iris.data[:, [2, 3]]
y = iris.target
X_train, X_test, y_train, y_test = train_test_split(
X,
y,
test_size=0.3,
random_state=0,
)
if standardize:
sc = StandardScaler()
sc.fit(X_train)
X_train = sc.transform(X_train)
X_test = sc.transform(X_test)
units = 'standardized'
else:
units = 'cm'
clf.fit(X_train, y_train)
y_pred = clf.predict(X_test)
if print_accuracy:
print("Misclassified samples: %d" % (y_test != y_pred).sum())
print("Accuracy: %.2f" % accuracy_score(y_test, y_pred))
X_combined = np.vstack((X_train, X_test))
y_combined = np.hstack((y_train, y_test))
plot_decision_regions(
X=X_combined,
y=y_combined,
classifier=clf,
test_index=range(105, 150),
)
plt.xlabel("petal length [%s]" % units)
plt.ylabel("petal width [%s]" % units)
plt.legend(loc='upper left')
plt.show()
def plot_lr_regularization():
iris = datasets.load_iris()
X = iris.data[:, [2, 3]]
y = iris.target
X_train, _, y_train, _ = train_test_split(
X,
y,
test_size=0.3,
random_state=0,
)
sc = StandardScaler()
sc.fit(X_train)
X_train_std = sc.transform(X_train)
weights = []
params = []
for c in np.logspace(-5, 4, num=10):
lr = LogisticRegression(C=c, random_state=0)
lr.fit(X_train_std, y_train)
weights.append(lr.coef_[1])
params.append(c)
weights = np.array(weights)
plt.plot(params, weights[:, 0], label='petal length')
plt.plot(params, weights[:, 1], linestyle='--', label='petal width')
plt.ylabel('weight coefficient')
plt.xlabel('C')
plt.legend(loc='upper left')
plt.xscale('log')
plt.show()
def sigmoid(z):
return 1.0 / (1.0 + np.exp(-z))
def plot_sigmoid():
z = np.arange(-7, 7, 0.1)
phi_z = sigmoid(z)
plt.plot(z, phi_z)
plt.axvline(0.0, color='k')
plt.ylim(-0.1, 1.1)
plt.xlabel('z')
plt.ylabel('$\phi (z)$')
plt.yticks([0.0, 0.5, 1.0])
ax = plt.gca()
ax.yaxis.grid(True)
plt.show()
def plot_xor():
np.random.seed(0)
X_xor = np.random.randn(200, 2)
y_xor = np.logical_xor(X_xor[:, 0] > 0, X_xor[:, 1] > 0)
y_xor = np.where(y_xor, 1, -1)
svm = SVC(kernel='rbf', random_state=0, gamma=0.1, C=10.0)
svm.fit(X_xor, y_xor)
plot_decision_regions(X_xor, y_xor, classifier=svm)
plt.legend(loc='upper left')
plt.show()
if __name__ == '__main__':
# clf = Perceptron(n_iter=40, eta0=0.1, random_state=0)
# clf = LogisticRegression(C=1000.0, random_state=0)
# clf = SVC(kernel='linear', C=1.0, random_state=0)
# clf = SVC(kernel='rbf', random_state=0, gamma=0.2, C=1.0)
# clf = SVC(kernel='rbf', random_state=0, gamma=100.0, C=1.0)
clf = KNeighborsClassifier(n_neighbors=5, p=2, metric='minkowski')
plot_iris_with_classifier(clf)
# clf = DecisionTreeClassifier(criterion='entropy', max_depth=3, random_state=0)
# clf = RandomForestClassifier(criterion='entropy', n_estimators=10, random_state=1, n_jobs=2)
# plot_iris_with_classifier(clf, standardize=False)
# plot_sigmoid()
# plot_lr_regularization()
# plot_xor()
# plot_impurity_indexes()
```
#### File: jeremyn/python-machine-learning-book/chapter_4.py
```python
from io import StringIO
from itertools import combinations
import os
import matplotlib.pyplot as plt
import numpy as np
import pandas as pd
from sklearn.base import clone
from sklearn.cross_validation import train_test_split
from sklearn.ensemble import RandomForestClassifier
from sklearn.feature_selection import SelectFromModel
from sklearn.linear_model import LogisticRegression
from sklearn.metrics import accuracy_score
from sklearn.neighbors import KNeighborsClassifier
from sklearn.preprocessing import (
Imputer,
LabelEncoder,
MinMaxScaler,
OneHotEncoder,
StandardScaler,
)
def work_with_categorical_data():
df = pd.DataFrame([
['green', 'M', 10.1, 'class1'],
['red', 'L', 13.5, 'class2'],
['blue', 'XL', 15.3, 'class1'],
])
df.columns = ['color', 'size', 'price', 'class_label']
print(df, end='\n\n')
size_mapping = {
'XL': 3,
'L': 2,
'M': 1,
}
df['size'] = df['size'].map(size_mapping)
print(df, end='\n\n')
inv_size_mapping = {v: k for k, v in size_mapping.items()}
print(df['size'].map(inv_size_mapping), end='\n\n')
class_mapping = {
label: index
for index, label
in enumerate(np.unique(df['class_label']))
}
print(class_mapping, end='\n\n')
df['class_label'] = df['class_label'].map(class_mapping)
print(df, end='\n\n')
inv_class_mapping = {v: k for k, v in class_mapping.items()}
df['class_label'] = df['class_label'].map(inv_class_mapping)
print(df, end='\n\n')
class_label_encoder = LabelEncoder()
y = class_label_encoder.fit_transform(df['class_label'].values)
print(y, end='\n\n')
class_label_encoder.inverse_transform(y)
print(class_label_encoder.inverse_transform(y), end='\n\n')
X = df[['color', 'size', 'price']].values
color_label_encoder = LabelEncoder()
X[:, 0] = color_label_encoder.fit_transform(X[:, 0])
print(X, end='\n\n')
ohe = OneHotEncoder(categorical_features=[0])
print(ohe.fit_transform(X).toarray(), end='\n\n')
print(pd.get_dummies(df[['price', 'color', 'size']]), end='\n\n')
def work_with_numerical_data():
csv_data = """
A,B,C,D
1.0,2.0,3.0,4.0
5.0,6.0,,8.0
10.0,11.0,12.0,
"""
df = pd.read_csv(StringIO(csv_data))
print(df, end='\n\n')
print(df.isnull().sum(), end='\n\n')
print(df.values, end='\n\n')
print(df.dropna(), end='\n\n')
print(df.dropna(axis=1), end='\n\n')
print(df.dropna(how='all'), end='\n\n')
print(df.dropna(thresh=4), end='\n\n')
print(df.dropna(subset=['C']), end='\n\n')
imr = Imputer(missing_values='NaN', strategy='mean', axis=0)
imr = imr.fit(df)
imputed_data = imr.transform(df.values)
print(imputed_data)
def plot_regularization_path(columns, X, y):
fig = plt.figure()
ax = plt.subplot(111)
colors = [
'blue', 'green', 'red', 'cyan', 'magenta', 'yellow', 'black', 'pink',
'lightgreen', 'lightblue', 'gray', 'indigo', 'orange',
]
weights = []
params = []
for c in np.arange(-4, 6):
lr = LogisticRegression(penalty='l1', C=10**c, random_state=0)
lr.fit(X, y)
weights.append(lr.coef_[1])
params.append(10**c)
weights = np.array(weights)
for column, color in zip(range(weights.shape[1]), colors):
plt.plot(
params,
weights[:, column],
label=columns[column+1],
color=color,
)
plt.axhline(0, color='black', linestyle='--', linewidth=3)
plt.xlim([10**-5, 10**5])
plt.ylabel('weight coefficient')
plt.xlabel('C')
plt.xscale('log')
plt.legend(loc='upper left')
ax.legend(
loc='upper center',
bbox_to_anchor=(1.38, 1.03),
ncol=1,
fancybox=True,
)
plt.show()
def use_sbs_with_knn(columns, X_train, X_test, y_train, y_test):
knn = KNeighborsClassifier(n_neighbors=2)
sbs = SBS(knn, k_features=1)
sbs.fit(X_train, y_train)
k_feat = [len(k) for k in sbs.subsets_]
plt.plot(k_feat, sbs.scores_, marker='o')
plt.ylim([0.7, 1.1])
plt.ylabel('Accuracy')
plt.xlabel('Number of features')
plt.grid()
plt.show()
k5 = list(sbs.subsets_[8])
print(columns[1:][k5])
knn.fit(X_train, y_train)
print("Training accuracy: %s" % knn.score(X_train, y_train))
print("Test accuracy: %s" % knn.score(X_test, y_test))
knn.fit(X_train[:, k5], y_train)
print("Training accuracy: %s" % knn.score(X_train[:, k5], y_train))
print("Test accuracy: %s" % knn.score(X_test[:, k5], y_test))
def plot_feature_importances(columns, X_train, y_train):
feat_labels = columns[1:]
forest = RandomForestClassifier(n_estimators=10000, random_state=0)
forest.fit(X_train, y_train)
importances = forest.feature_importances_
indices = np.argsort(importances)[::-1]
for f in range(X_train.shape[1]):
print("%2d) %-*s %f" % (
f+1,
30,
feat_labels[indices[f]],
importances[indices[f]],
))
print()
plt.title('Feature Importances')
plt.bar(
range(X_train.shape[1]),
importances[indices],
color='lightblue',
align='center',
)
plt.xticks(range(X_train.shape[1]), feat_labels[indices], rotation=90)
plt.xlim([-1, X_train.shape[1]])
plt.show()
feature_selector = SelectFromModel(forest, threshold=0.15, prefit=True)
X_selected = feature_selector.transform(X_train)
print(X_selected.shape)
def work_with_wine_data():
df = pd.read_csv(os.path.join('datasets', 'wine.data'), header=None)
df.columns = [
'Class label',
'Alcohol',
'Malic acid',
'Ash',
'Alcalinity of ash',
'Magnesium',
'Total phenols',
'Flavanoids',
'Nonflavanoid phenols',
'Proanthocyanins',
'Color intensity',
'Hue',
'OD280/OD315 of diluted wines',
'Proline',
]
print('Class labels', np.unique(df['Class label']), end='\n\n')
print(df.head(), end='\n\n')
X = df.iloc[:, 1:].values
y = df.iloc[:, 0].values
X_train, X_test, y_train, y_test = train_test_split(
X,
y,
test_size=0.3,
random_state=0,
)
ex = pd.DataFrame([0, 1, 2, 3, 4, 5], dtype=np.float64)
ex[1] = StandardScaler().fit_transform(ex)
ex[2] = MinMaxScaler().fit_transform(ex[0].reshape(-1, 1))
ex.columns = ['input', 'standardized', 'normalized']
print(ex, end='\n\n')
min_max_scaler = MinMaxScaler()
X_train_norm = min_max_scaler.fit_transform(X_train)
X_test_norm = min_max_scaler.transform(X_test)
std_scaler = StandardScaler()
X_train_std = std_scaler.fit_transform(X_train)
X_test_std = std_scaler.transform(X_test)
lr = LogisticRegression(penalty='l1', C=0.1)
lr.fit(X_train_std, y_train)
print("Training accuracy: %s" % lr.score(X_train_std, y_train))
print("Test accuracy: %s" % lr.score(X_test_std, y_test))
print("Intercept: %s" % lr.intercept_)
print("Coefficients: %s" % lr.coef_)
# plot_regularization_path(df.columns, X_train_std, y_train)
# use_sbs_with_knn(df.columns, X_train_std, X_test_std, y_train, y_test)
plot_feature_importances(df.columns, X_train, y_train)
class SBS(object):
def __init__(
self,
estimator,
k_features,
scoring=accuracy_score,
test_size=0.25,
random_state=1):
self.estimator = clone(estimator)
self.k_features = k_features
self.scoring = scoring
self.test_size = test_size
self.random_state = random_state
def _calc_score(self, X_train, y_train, X_test, y_test, indices):
self.estimator.fit(X_train[:, indices], y_train)
y_pred = self.estimator.predict(X_test[:, indices])
score = self.scoring(y_test, y_pred)
return score
def fit(self, X, y):
X_train, X_test, y_train, y_test = train_test_split(
X,
y,
test_size=self.test_size,
random_state=self.random_state,
)
dim = X_train.shape[1]
self.indices_ = tuple(range(dim))
self.subsets_ = [self.indices_, ]
score = self._calc_score(X_train, y_train, X_test, y_test, self.indices_)
self.scores_ = [score, ]
while dim > self.k_features:
scores = []
subsets = []
for p in combinations(self.indices_, r=dim-1):
score = self._calc_score(X_train, y_train, X_test, y_test, p)
scores.append(score)
subsets.append(p)
best = np.argmax(scores)
self.indices_ = subsets[best]
self.subsets_.append(self.indices_)
dim -= 1
self.scores_.append(scores[best])
self.k_score_ = self.scores_[-1]
return self
def transform(self, X):
return X[:, self.indices_]
if __name__ == '__main__':
# work_with_numerical_data()
# work_with_categorical_data()
work_with_wine_data()
```
#### File: jeremyn/python-machine-learning-book/chapter_5.py
```python
import os
import matplotlib.pyplot as plt
from matplotlib.ticker import FormatStrFormatter
import numpy as np
import pandas as pd
from scipy import exp
from scipy.linalg import eigh
from scipy.spatial.distance import (
pdist,
squareform,
)
from sklearn.cross_validation import train_test_split
from sklearn.datasets import (
make_circles,
make_moons,
)
from sklearn.decomposition import (
KernelPCA,
PCA,
)
from sklearn.discriminant_analysis import LinearDiscriminantAnalysis as LDA
from sklearn.linear_model import LogisticRegression
from sklearn.preprocessing import StandardScaler
from visualization import plot_decision_regions
def plot_manual_lda_transformation(X, y):
np.set_printoptions(precision=4)
print("Class label distribution: %s" % np.bincount(y)[1:])
num_features = 13
mean_vectors = []
for label in range(1, 4):
mean_vectors.append(
np.mean(X[y == label], axis=0).reshape(num_features, 1)
)
print("MV %s: %s\n" % (label, mean_vectors[label-1].T))
mean_overall = np.mean(X, axis=0).reshape(num_features, 1)
S_W_unscaled = np.zeros((num_features, num_features))
S_W = np.zeros((num_features, num_features))
S_B = np.zeros((num_features, num_features))
for label, mean_vector in zip(range(1, 4), mean_vectors):
class_scatter = np.zeros((num_features, num_features))
for row in X[y == label]:
row = row.reshape(num_features, 1)
class_scatter += (row - mean_vector).dot((row - mean_vector).T)
S_W_unscaled += class_scatter
S_W += np.cov(X[y == label].T)
n = X[y == label, :].shape[0]
S_B += n * (mean_vector - mean_overall).dot(
(mean_vector - mean_overall).T
)
print(
"Unscaled within-class scatter matrix: %sx%s" %
(S_W_unscaled.shape[0], S_W_unscaled.shape[1])
)
print(
"Scaled within-class scatter matrix: %sx%s" %
(S_W.shape[0], S_W.shape[1])
)
print(
"Between-class scatter matrix: %sx%s" %
(S_B.shape[0], S_B.shape[1])
)
eigenvalues, eigenvectors = np.linalg.eig(np.linalg.inv(S_W).dot(S_B))
eigenpairs = [
(np.abs(eigenvalue), eigenvectors[:, index])
for index, eigenvalue
in enumerate(eigenvalues)
]
eigenpairs = sorted(eigenpairs, key=lambda k: k[0], reverse=True)
print("Eigenvalues in decreasing order: \n")
for eigenpair in eigenpairs:
print(eigenpair[0])
tot = sum(eigenvalues.real)
discr = [i/tot for i in map(lambda p: p[0], eigenpairs)]
cum_discr = np.cumsum(discr)
plt.bar(
range(1, 14),
discr,
alpha=0.5,
align='center',
label='individual "discriminability"',
)
plt.step(
range(1, 14),
cum_discr,
where='mid',
label='cumulative "discriminability"',
)
plt.ylabel('"discriminability" ratio')
plt.xlabel('Linear Discriminants')
plt.ylim([-0.1, 1.1])
plt.legend(loc='best')
plt.show()
w = np.hstack((
eigenpairs[0][1][:, np.newaxis].real,
eigenpairs[1][1][:, np.newaxis].real,
))
print('Matrix W:\n', w)
X_lda = X.dot(w)
colors = ['r', 'b', 'g']
markers = ['s', 'x', 'o']
for label, color, marker in zip(np.unique(y), colors, markers):
plt.scatter(
X_lda[y == label, 0],
X_lda[y == label, 1],
c=color,
label=label,
marker=marker,
)
plt.xlabel('LD 1')
plt.ylabel('LD 2')
plt.legend(loc='upper right')
plt.show()
def plot_sklearn_lda_with_lr(X_train, X_test, y_train, y_test):
lda = LDA(n_components=2)
X_train_lda = lda.fit_transform(X_train, y_train)
lr = LogisticRegression()
lr = lr.fit(X_train_lda, y_train)
plot_decision_regions(X_train_lda, y_train, classifier=lr)
plt.xlabel('LD 1')
plt.ylabel('LD 2')
plt.legend(loc='lower left')
plt.show()
X_test_lda = lda.transform(X_test)
plot_decision_regions(X_test_lda, y_test, classifier=lr)
plt.xlabel('LD 1')
plt.ylabel('LD 2')
plt.legend(loc='lower left')
plt.show()
def plot_manual_pca_transformation(X, y):
cov_mat = np.cov(X.T)
eigenvalues, eigenvectors = np.linalg.eig(cov_mat)
print("\nEigenvalues \n%s" % eigenvalues)
tot = sum(eigenvalues)
var_exp = [i/tot for i in sorted(eigenvalues, reverse=True)]
cum_var_exp = np.cumsum(var_exp)
plt.bar(
range(1, 14),
var_exp,
alpha=0.5,
align='center',
label='individual explained variance',
)
plt.step(
range(1, 14),
cum_var_exp,
where='mid',
label='cumulative explained variance',
)
plt.ylabel('Explained variance ratio')
plt.xlabel('Principal components')
plt.legend(loc='best')
plt.show()
eigenpairs = [
(np.abs(eigenvalue), eigenvectors[:, index])
for index, eigenvalue
in enumerate(eigenvalues)
]
eigenpairs.sort(reverse=True)
w = np.hstack((
eigenpairs[0][1][:, np.newaxis],
eigenpairs[1][1][:, np.newaxis],
))
print('Matrix W:\n%s\n' % w)
X_pca = X.dot(w)
colors = ['r', 'b', 'g']
markers = ['s', 'x', 'o']
for label, color, marker in zip(np.unique(y), colors, markers):
plt.scatter(
X_pca[y == label, 0],
X_pca[y == label, 1],
c=color,
label=label,
marker=marker,
)
plt.xlabel('PC 1')
plt.ylabel('PC 2')
plt.legend(loc='lower left')
plt.show()
print(X_pca[0])
def plot_sklearn_pca_with_lr(X_train, X_test, y_train, y_test):
pca = PCA()
pca.fit(X_train)
print(pca.explained_variance_ratio_)
plt.bar(
range(1, 14),
pca.explained_variance_ratio_,
alpha=0.5,
align='center',
)
plt.step(
range(1, 14),
np.cumsum(pca.explained_variance_ratio_),
where='mid',
)
plt.ylabel('Explained variance ratio')
plt.xlabel('Principal components')
plt.show()
pca = PCA(n_components=2)
X_train_pca = pca.fit_transform(X_train)
X_test_pca = pca.transform(X_test)
plt.scatter(X_train_pca[:, 0], X_train_pca[:, 1])
plt.xlabel('PC 1')
plt.ylabel('PC 2')
plt.show()
lr = LogisticRegression()
lr = lr.fit(X_train_pca, y_train)
plot_decision_regions(X_train_pca, y_train, classifier=lr)
plt.xlabel('PC 1')
plt.ylabel('PC 2')
plt.legend(loc='lower left')
plt.show()
plot_decision_regions(X_test_pca, y_test, classifier=lr)
plt.xlabel('PC 1')
plt.ylabel('PC 2')
plt.legend(loc='lower left')
plt.show()
def get_standardized_wine_data():
df = pd.read_csv(os.path.join('datasets', 'wine.data'), header=None)
df.columns = [
'Class label', 'Alcohol', 'Malic acid', 'Ash', 'Alcalinity of ash',
'Magnesium', 'Total phenols', 'Flavanoids', 'Nonflavanoid phenols',
'Proanthocyanins', 'Color intensity', 'Hue',
'OD280/OD315 of diluted wines', 'Proline',
]
X = df.iloc[:, 1:].values
y = df.iloc[:, 0].values
X_train, X_test, y_train, y_test = train_test_split(
X,
y,
test_size=0.3,
random_state=0,
)
sc = StandardScaler()
X_train_std = sc.fit_transform(X_train)
X_test_std = sc.transform(X_test)
return X_train_std, X_test_std, y_train, y_test
def rbf_kernel_pca(X, gamma, n_components):
sq_dists = pdist(X, 'sqeuclidean')
mat_sq_dists = squareform(sq_dists)
K = exp(-gamma * mat_sq_dists)
N = K.shape[0]
one_n = np.ones((N, N)) / N
K = K - one_n.dot(K) - K.dot(one_n) + one_n.dot(K).dot(one_n)
eigenvalues, eigenvectors = eigh(K)
alphas = np.column_stack((
eigenvectors[:, -i] for i in range(1, n_components+1)
))
lambdas = [eigenvalues[-i] for i in range(1, n_components+1)]
return alphas, lambdas
def plot_pca_for_data(data_type, n_samples):
if data_type == 'half_circles':
X, y = make_moons(n_samples=n_samples, random_state=123)
format_x_axis = True
elif data_type == 'concentric_circles':
X, y = make_circles(
n_samples=n_samples,
random_state=123,
noise=0.1,
factor=0.2,
)
format_x_axis = False
plt.scatter(
X[y == 0, 0],
X[y == 0, 1],
color='red',
marker='^',
alpha=0.5,
)
plt.scatter(
X[y == 1, 0],
X[y == 1, 1],
color='blue',
marker='o',
alpha=0.5,
)
plt.show()
X_spca = PCA(n_components=2).fit_transform(X)
X_kpca, _ = rbf_kernel_pca(X, gamma=15, n_components=2)
X_skernpca = KernelPCA(
n_components=2,
kernel='rbf',
gamma=15,
).fit_transform(X)
for index, X_pca in enumerate((X_spca, X_kpca, X_skernpca)):
fig, ax = plt.subplots(nrows=1, ncols=2, figsize=(7, 3))
ax[0].scatter(
X_pca[y == 0, 0],
X_pca[y == 0, 1],
color='red',
marker='^',
alpha=0.5,
)
ax[0].scatter(
X_pca[y == 1, 0],
X_pca[y == 1, 1],
color='blue',
marker='o',
alpha=0.5,
)
ax[1].scatter(
X_pca[y == 0, 0],
np.zeros((n_samples/2, 1))+0.02,
color='red',
marker='^',
alpha=0.5,
)
ax[1].scatter(
X_pca[y == 1, 0],
np.zeros((n_samples/2, 1))-0.02,
color='blue',
marker='o',
alpha=0.5,
)
ax[0].set_xlabel('PC1')
ax[0].set_ylabel('PC2')
ax[1].set_ylim([-1, 1])
ax[1].set_yticks([])
ax[1].set_xlabel('PC1')
if format_x_axis and (index == 1):
ax[0].xaxis.set_major_formatter(FormatStrFormatter('%0.1f'))
ax[1].xaxis.set_major_formatter(FormatStrFormatter('%0.1f'))
plt.show()
def project_x(x_new, X, gamma, alphas, lambdas):
pair_dist = np.array([np.sum((x_new - row)**2) for row in X])
k = np.exp(-gamma * pair_dist)
return k.dot(alphas / lambdas)
def plot_new_data_with_kernel_pca():
X, y = make_moons(n_samples=100, random_state=123)
alphas, lambdas = rbf_kernel_pca(X, gamma=15, n_components=1)
x_new = X[25]
print("x_new: %s" % x_new)
x_proj = alphas[25]
print("x_proj: %s" % x_proj)
x_reproj = project_x(x_new, X, gamma=15, alphas=alphas, lambdas=lambdas)
print("x_reproj: %s" % x_reproj)
plt.scatter(
alphas[y == 0, 0],
np.zeros(50),
color='red',
marker='^',
alpha=0.5,
)
plt.scatter(
alphas[y == 1, 0],
np.zeros(50),
color='blue',
marker='o',
alpha=0.5,
)
plt.scatter(
x_proj,
0,
color='black',
label='original projection of point X[25]',
marker='^',
s=100,
)
plt.scatter(
x_reproj,
0,
color='green',
label='remapped point X[25]',
marker='x',
s=500,
)
plt.legend(scatterpoints=1)
plt.show()
if __name__ == '__main__':
X_train, X_test, y_train, y_test = get_standardized_wine_data()
# plot_manual_pca_transformation(X_train, y_train)
# plot_sklearn_pca_with_lr(X_train, X_test, y_train, y_test)
# plot_manual_lda_transformation(X_train, y_train)
# plot_sklearn_lda_with_lr(X_train, X_test, y_train, y_test)
plot_pca_for_data(data_type='half_circles', n_samples=100)
# plot_pca_for_data(data_type='concentric_circles', n_samples=1000)
# plot_new_data_with_kernel_pca()
```
#### File: jeremyn/python-machine-learning-book/chapter_7.py
```python
from itertools import product
import math
import os
import matplotlib.pyplot as plt
import numpy as np
import pandas as pd
from scipy.misc import comb
from sklearn import datasets
from sklearn.base import (
BaseEstimator,
ClassifierMixin,
clone,
)
from sklearn.cross_validation import (
cross_val_score,
train_test_split,
)
from sklearn.ensemble import (
AdaBoostClassifier,
BaggingClassifier,
)
from sklearn.externals import six
from sklearn.grid_search import GridSearchCV
from sklearn.linear_model import LogisticRegression
from sklearn.metrics import (
accuracy_score,
auc,
roc_curve,
)
from sklearn.neighbors import KNeighborsClassifier
from sklearn.pipeline import (
_name_estimators,
Pipeline,
)
from sklearn.preprocessing import (
LabelEncoder,
StandardScaler,
)
from sklearn.tree import DecisionTreeClassifier
def ensemble_error(n_classifier, error):
k_start = math.ceil(n_classifier / 2.0)
probs = [
comb(n_classifier, k) * error**k * (1-error)**(n_classifier-k)
for k in range(k_start, n_classifier+1)
]
return sum(probs)
def plot_ensemble_error():
error_range = np.arange(0.0, 1.01, 0.01)
ensemble_errors = [
ensemble_error(n_classifier=11, error=error) for error in error_range
]
plt.plot(
error_range,
ensemble_errors,
label='Ensemble error',
linewidth=2,
)
plt.plot(
error_range,
error_range,
linestyle='--',
label='Base error',
linewidth=2,
)
plt.xlabel('Base error')
plt.ylabel('Base/Ensemble error')
plt.legend(loc='upper left')
plt.grid()
plt.show()
def use_adaboost_classifier():
tree = DecisionTreeClassifier(
criterion='entropy',
max_depth=1,
random_state=0,
)
ada = AdaBoostClassifier(
base_estimator=tree,
n_estimators=500,
learning_rate=0.1,
random_state=0,
)
return use_ensemble_classifier(tree, 'Decision tree', ada, 'AdaBoost')
def use_bagging_classifier():
tree = DecisionTreeClassifier(
criterion='entropy',
max_depth=None,
random_state=3,
)
bag = BaggingClassifier(
base_estimator=tree,
n_estimators=500,
max_samples=1.0,
max_features=1.0,
bootstrap=True,
bootstrap_features=False,
random_state=1
)
return use_ensemble_classifier(tree, 'Decision tree', bag, 'Bagging')
def use_ensemble_classifier(clf1, label1, clf2, label2):
df = pd.read_csv(os.path.join('datasets', 'wine.data'), header=None)
df.columns = [
'Class label', 'Alcohol', 'Malic acid', 'Ash', 'Alcalinity of ash',
'Magnesium', 'Total phenols', 'Flavanoids', 'Nonflavanoid phenols',
'Proanthocyanins', 'Color intensity', 'Hue',
'OD280/OD315 of diluted wines', 'Proline',
]
df = df[df['Class label'] != 1]
X = df[['Alcohol', 'Hue']].values
y = df['Class label'].values
label_encoder = LabelEncoder()
y = label_encoder.fit_transform(y)
X_train, X_test, y_train, y_test = train_test_split(
X,
y,
test_size=0.4,
random_state=1,
)
clfs = [clf1, clf2]
labels = [label1, label2]
for clf, label in zip(clfs, labels):
clf = clf.fit(X_train, y_train)
y_train_pred = clf.predict(X_train)
y_test_pred = clf.predict(X_test)
clf_train = accuracy_score(y_train, y_train_pred)
clf_test = accuracy_score(y_test, y_test_pred)
print(
"%s train/test accuracies %.3f/%.3f" %
(label, clf_train, clf_test)
)
x_min = X_train[:, 0].min() - 1
x_max = X_train[:, 0].max() + 1
y_min = X_train[:, 1].min() - 1
y_max = X_train[:, 1].max() + 1
xx, yy = np.meshgrid(
np.arange(x_min, x_max, 0.1),
np.arange(y_min, y_max, 0.1),
)
f, axarr = plt.subplots(
nrows=1,
ncols=2,
sharex='col',
sharey='row',
figsize=(8, 3),
)
for index, clf, tt in zip([0, 1], clfs, labels):
clf.fit(X_train, y_train)
Z = clf.predict(np.c_[xx.ravel(), yy.ravel()])
Z = Z.reshape(xx.shape)
axarr[index].contourf(xx, yy, Z, alpha=0.3)
axarr[index].scatter(
X_train[y_train == 0, 0],
X_train[y_train == 0, 1],
c='blue',
marker='^',
)
axarr[index].scatter(
X_train[y_train == 1, 0],
X_train[y_train == 1, 1],
c='red',
marker='o',
)
axarr[index].set_title(tt)
axarr[0].set_ylabel('Alcohol', fontsize=12)
plt.text(9.8, -1, s='Hue', ha='center', va='center', fontsize=12)
plt.show()
def use_majority_vote_classifier():
iris = datasets.load_iris()
X = iris.data[50:, [1, 2]]
y = iris.target[50:]
label_encoder = LabelEncoder()
y = label_encoder.fit_transform(y)
X_train, X_test, y_train, y_test = train_test_split(
X,
y,
test_size=0.5,
random_state=1,
)
clf1 = LogisticRegression(penalty='l2', C=0.001, random_state=0)
clf2 = DecisionTreeClassifier(
max_depth=1,
criterion='entropy',
random_state=0,
)
clf3 = KNeighborsClassifier(n_neighbors=1, p=2, metric='minkowski')
pipe1 = Pipeline([['sc', StandardScaler()], ['clf', clf1]])
pipe3 = Pipeline([['sc', StandardScaler()], ['clf', clf3]])
mv_clf = MajorityVoteClassifier(classifiers=[pipe1, clf2, pipe3])
all_clf = [pipe1, clf2, pipe3, mv_clf]
clf_labels = [
'Logistic Regression',
'Decision Tree',
'KNN',
'Majority Voting',
]
print('10-fold cross-validation:\n')
for clf, label in zip(all_clf, clf_labels):
scores = cross_val_score(
estimator=clf,
X=X_train,
y=y_train,
cv=10,
scoring='roc_auc',
)
print(
"ROC AUC: %0.2f (+/- %0.2f) [%s]" %
(scores.mean(), scores.std(), label)
)
print()
colors = ['black', 'orange', 'blue', 'green']
linestyles = [':', '--', '-.', '-']
for clf, label, clr, ls in zip(all_clf, clf_labels, colors, linestyles):
y_pred = clf.fit(X_train, y_train).predict_proba(X_test)[:, 1]
fpr, tpr, thresholds = roc_curve(y_true=y_test, y_score=y_pred)
roc_auc = auc(x=fpr, y=tpr)
plt.plot(
fpr,
tpr,
color=clr,
linestyle=ls,
label="%s (auc = %0.2f)" % (label, roc_auc)
)
plt.legend(loc='lower right')
plt.plot([0, 1], [0, 1], linestyle='--', color='gray', linewidth=2)
plt.xlim([-0.1, 1.1])
plt.ylim([-0.1, 1.1])
plt.grid()
plt.xlabel('False Positive Rate')
plt.ylabel('True Positive Rate')
plt.show()
sc = StandardScaler()
X_train_std = sc.fit_transform(X_train)
x_min = X_train_std[:, 0].min() - 1
x_max = X_train_std[:, 0].max() + 1
y_min = X_train_std[:, 1].min() - 1
y_max = X_train_std[:, 1].max() + 1
xx, yy = np.meshgrid(
np.arange(x_min, x_max, 0.1),
np.arange(y_min, y_max, 0.1),
)
f, axarr = plt.subplots(
nrows=2,
ncols=2,
sharex='col',
sharey='row',
figsize=(7, 5),
)
for index, clf, tt in zip(product([0, 1], [0, 1]), all_clf, clf_labels):
clf.fit(X_train_std, y_train)
Z = clf.predict(np.c_[xx.ravel(), yy.ravel()])
Z = Z.reshape(xx.shape)
axarr[index[0], index[1]].contourf(xx, yy, Z, alpha=0.3)
axarr[index[0], index[1]].scatter(
X_train_std[y_train == 0, 0],
X_train_std[y_train == 0, 1],
c='blue',
marker='^',
s=50,
)
axarr[index[0], index[1]].scatter(
X_train_std[y_train == 1, 0],
X_train_std[y_train == 1, 1],
c='red',
marker='o',
s=50,
)
axarr[index[0], index[1]].set_title(tt)
plt.text(
-3.5,
-4.5,
s='Sepal width [standardized]',
ha='center',
va='center',
fontsize=12,
)
plt.text(
-11.75,
4.5,
s='Petal length [standardized]',
ha='center',
va='center',
fontsize=12,
rotation=90,
)
plt.show()
# print(mv_clf.get_params())
param_grid = {
'decisiontreeclassifier__max_depth': [1, 2],
'pipeline-1__clf__C': [0.001, 0.1, 100.0],
}
gs = GridSearchCV(
estimator=mv_clf,
param_grid=param_grid,
cv=10,
scoring='roc_auc',
)
gs.fit(X_train, y_train)
for params, mean_score, scores in gs.grid_scores_:
print("%0.3f +/- %0.2f %r" % (mean_score, scores.std() / 2, params))
print("\nBest parameters: %s" % gs.best_params_)
print("Accuracy: %.2f" % gs.best_score_)
class MajorityVoteClassifier(BaseEstimator, ClassifierMixin):
def __init__(self, classifiers, vote='classlabel', weights=None):
if vote not in ('classlabel', 'probability'):
raise ValueError(
"vote must be 'classlabel' or 'probability'; got (vote=%r)" %
vote
)
else:
self.vote = vote
if (weights is not None) and (len(weights) != len(classifiers)):
raise ValueError(
"Number of classifiers and weights must be equal; got %d "
"weights, %d classifiers" % (len(weights), len(classifiers))
)
else:
self.weights = weights
self.classifiers = classifiers
self.named_classifiers = {
k: v for k, v in _name_estimators(classifiers)
}
def fit(self, X, y):
self.label_encoder_ = LabelEncoder()
self.label_encoder_.fit(y)
self.classes_ = self.label_encoder_.classes_
self.classifiers_ = []
for clf in self.classifiers:
fitted_clf = clone(clf).fit(
X,
self.label_encoder_.transform(y)
)
self.classifiers_.append(fitted_clf)
return self
def predict(self, X):
if self.vote == 'classlabel':
predictions = np.asarray(
[clf.predict(X) for clf in self.classifiers_]
).T
maj_vote = np.apply_along_axis(
lambda x: np.argmax(np.bincount(x, weights=self.weights)),
axis=1,
arr=predictions,
)
elif self.vote == 'probability':
maj_vote = np.argmax(self.predict_proba(X), axis=1)
maj_vote = self.label_encoder_.inverse_transform(maj_vote)
return maj_vote
def predict_proba(self, X):
probas = np.asarray(
[clf.predict_proba(X) for clf in self.classifiers_]
)
avg_proba = np.average(probas, axis=0, weights=self.weights)
return avg_proba
def get_params(self, deep=True):
if not deep:
return super(MajorityVoteClassifier, self).get_params(deep=False)
else:
out = self.named_classifiers.copy()
for name, step in six.iteritems(self.named_classifiers):
for k, v in six.iteritems(step.get_params(deep=True)):
out["%s__%s" % (name, k)] = v
return out
if __name__ == '__main__':
# plot_ensemble_error()
# use_majority_vote_classifier()
# use_bagging_classifier()
use_adaboost_classifier()
```
|
{
"source": "Jeremy-NZ/reddit_wallpaper",
"score": 3
}
|
#### File: Jeremy-NZ/reddit_wallpaper/downloader.py
```python
import os
from urllib import request
import ctypes
import imghdr
import random
import praw
import time
from urllib.error import HTTPError
def background_changer(sub):
USER_AGENT = 'wallpaper changer for windows by /u/Jeremy11'
REDDIT_ID = "Jeremy11"
REDDIT_PASS = ""
reddit = praw.Reddit(USER_AGENT)
reddit.login(REDDIT_ID, REDDIT_PASS)
images = reddit.get_subreddit(sub)
for sub in images.get_hot(limit=5):
image_link = sub.url
print(image_link)
file_name = "temp"
request.urlretrieve(image_link, file_name)
file_exts = ('png', 'bmp', 'gif', 'jpeg', 'jpg')
if imghdr.what(file_name) in file_exts or image_link.endswith(file_exts):
change_background(file_name)
return True
return False
def change_background(image_file):
SPI_SETDESKWALLPAPER = 20
SPIF_UPDATEINIFILE = 1
SPIF_SENDCHANGE = 2
image_path = os.path.abspath(image_file)
ctypes.windll.user32.SystemParametersInfoW(SPI_SETDESKWALLPAPER,
0, image_path, SPIF_UPDATEINIFILE | SPIF_SENDCHANGE)
def get_subs():
return ["HighRes", "earthporn", "pics", "hdpics", "topwalls", "OldSchoolCool", "QuotesPorn",
"spaceporn", "pictureswithpatrick"]
def main():
wallpaper_set = False
while not wallpaper_set:
try:
subs = ["QuotesPorn"]
wallpaper_set = background_changer(random.choice(subs))
except HTTPError:
time.sleep(15)
continue
main()
```
|
{
"source": "jeremyong/simdjson",
"score": 3
}
|
#### File: simdjson/scripts/detect_nonascii_sourcefiles.py
```python
import sys
def verifyContent(f,filename):
linenumber=-999
line=''
try:
for linenumber, line in enumerate(f):
try:
ascii=line.encode('ascii')
except UnicodeEncodeError as e:
#print(f"a: found problem {e} at line {linenumber+1} in {filename}:")
print(f"Found problem at line {linenumber+1} in {filename}:")
print(line.rstrip())
for col, char in enumerate(line.encode('utf-8')):
if char>=127:
offender=char
offendingcol=col
break
print(" "*offendingcol + "^")
print(f"Column {offendingcol+1} contains 0x{offender:02X}")
sys.exit(1)
except UnicodeDecodeError as e:
print(f"Could not open {filename} as utf-8, it can't be ascii.")
sys.exit(1)
for filename in sys.argv[1:]:
with open(filename,encoding='utf-8') as f:
#print(f"file {filename} was possible to open as utf-8")
verifyContent(f,filename)
print("all files were found to be ascii.")
```
|
{
"source": "jeremyorme/pyfathom",
"score": 3
}
|
#### File: pyfathom/pyfathom/classifier.py
```python
from .rule import *
from .tokenisers import *
from .classifications import *
class classifier:
def __init__(self, knowledge, tokeniser=default_tokeniser()):
self.rules = []
for line in knowledge.splitlines():
l = line.strip()
if len(l) > 0:
self.rules.append(rule(l))
self.tokeniser = tokeniser
def classify(self, in_str):
classification_list = []
tokens = self.tokeniser.tokenise(in_str)
for rule in self.rules:
rule.match(tokens, classification_list)
return classifications(tokens, classification_list)
```
#### File: pyfathom/pyfathom/tokenisers.py
```python
import re
class default_tokeniser:
def tokenise(self, in_str):
return [t for t in re.split('([a-zA-Z][a-zA-Z\\-]*|[½⅓⅔¼¾⅕⅖⅗⅘⅙⅚⅛⅜⅝⅞\\d]+|[^\\w ])', in_str) if t.strip() != '']
```
|
{
"source": "jeremyosborne/examples-python",
"score": 4
}
|
#### File: decorators_lab/solution/decorators.py
```python
import json
from decimal import *
account = {
"username": "oscar",
"password": "<PASSWORD>",
"account": 1234,
"balance": Decimal("0.00"),
}
def data_parser(f):
def parse(j):
trans = json.loads(j)
return f(trans)
return parse
def validate(f):
def validate(trans):
if not "account" in trans:
return 'No Account Number Provided'
elif trans["password"] != account["password"]:
return 'Invalid Password'
elif trans["account"] != account["account"]:
return 'Invalid Account'
else:
return f(trans)
return validate
@data_parser
@validate
def deposit(transaction):
global account
account["balance"] += Decimal(str(transaction["amount"]))
return 'OK'
@data_parser
@validate
def withdraw(transaction):
global total
account["balance"] -= Decimal(str(transaction["amount"]))
return 'OK'
@data_parser
@validate
def balance(transaction):
return str(account["balance"])
if __name__ == '__main__':
import doctest
doctest.testmod(verbose=True)
```
#### File: server/stats/jsonencoderdelegator.py
```python
import json
class JSONEncoderDelegator(json.JSONEncoder):
"""Wrapper for the JSON encoder that attempts to delegate the encoding
of JSON objects to the object itself prior to attempting to serialize
the object.
"""
def default(self, o):
"""Override the default function.
Method called when the JSONEncoder can't decide what to do with
the object. That object will be passed to this function, and we have
the option of dealing with it, or throwing an error.
"""
if hasattr(o, "tojsonobject"):
return o.tojsonobject()
else:
# attempt to process iterable object that might contain
# jsonserialize-able objects.
try:
items = []
for item in o:
# Just assume, but look for attribute error.
# ASSUMPTION: if we get a dictionary like object, we
# assume we'll fail as keys shouldn't have a
# .jsonserialize method.
items.append(item.tojsonobject())
return items
except (TypeError, AttributeError) as err:
# Fall through, let the native JSONEncoder raise an error.
pass
return json.JSONEncoder.default(self, o)
```
#### File: server/stats/tests.py
```python
import json
from datetime import datetime
from django.utils import unittest
from django.db import models
from jsonserializermixin import JSONSerializerMixin
from jsonencoderdelegator import JSONEncoderDelegator
class TestModel(models.Model, JSONSerializerMixin):
"""A sample test model.
"""
count = models.IntegerField()
class TestRelatedModel(models.Model, JSONSerializerMixin):
"""A sample model related to the test model.
"""
owner = models.ForeignKey(TestModel)
description = models.TextField()
class TestDescribedModel(models.Model, JSONSerializerMixin):
"""A sample model related to the test model, but doesn't describe the
relation.
"""
owner = models.ForeignKey(TestModel)
description = models.TextField()
def describe(self):
"""Testing out the whitelist for only the description.
"""
return {
"description": "string",
}
class JSONSerializerMixinTest(unittest.TestCase):
"""Test the json serializer mixin, ensure that it returns a JSON
friendly object.
"""
def setUp(self):
self.t = TestModel.objects.create(count=42)
self.d = TestDescribedModel.objects.create(owner=self.t, description=24)
def tearDown(self):
self.d.delete()
self.t.delete()
def test_sanity(self):
self.assertEqual(self.t.tojsonobject(),
{"count": 42, "id": 1},
"Serialized model matches JSON friendly object.")
self.assertEqual(json.dumps(self.t.tojsonobject(), sort_keys=True),
'{"count": 42, "id": 1}',
"Serialized model behaves correctly in json.dumps.")
def test_describe(self):
self.assertEqual(self.d.tojsonobject(),
{"description": "24"},
"White list correctly ignores the owner attribute.")
class JSONEncoderDelegatorTest(unittest.TestCase):
def setUp(self):
self.testlist = [TestModel.objects.create(count=42),
TestModel.objects.create(count=42),]
self.relatedTestModel = TestRelatedModel.objects.create(
owner=self.testlist[0],
description="42"
)
def tearDown(self):
# Remove models with relations, first.
self.relatedTestModel.delete()
del self.relatedTestModel
# Remove non related models.
for t in self.testlist:
t.delete()
del self.testlist
def test_sanity(self):
# Expected iterators work as expected.
testobject = [42, 42]
json = JSONEncoderDelegator()
output = json.encode(testobject)
self.assertEqual(output,
"[42, 42]",
"Standard items serialized correctly.")
def test_list(self):
json = JSONEncoderDelegator()
output = json.encode(self.testlist)
self.assertEqual(output,
'[{"count": 42, "id": 1}, {"count": 42, "id": 2}]',
"jsonserializer in a list works as expected.")
def test_related(self):
self.assertEqual(self.relatedTestModel.tojsonobject(),
{'owner': {'fk': 1}, 'id': 1, 'description': u'42'},
"Models return a simple object with related fk.")
```
#### File: server/stats/views.py
```python
from django.db import models
from django.shortcuts import render_to_response
import stats.models
from stats.models import Eggs
from django.core.exceptions import ValidationError
#from django.contrib.auth import authenticate
from jsonencoderdelegator import JSONEncoderDelegator
from django.http import HttpResponse
from django.views.decorators.csrf import csrf_exempt
def json_response_from(context):
"""Wrapper used to convert the context object to JSON and send back as
an HTTP response.
"""
json = JSONEncoderDelegator()
return HttpResponse(json.encode(context), mimetype='application/json')
def get(request):
"""Handle retreival request for the egg statistics via an HTTP GET.
On a get request, an array will be returned containing
all of the egg days that have been added to the database.
Required GET arguments (for eggs):
-> statName {string} A valid stat name.
"""
context = {
# Error by default.
"error": "_Could not retrieve data.",
# Will always be a list for a general get request,
# or null if there is an auth error.
"stats": None,
}
user = request.user
statName = request.GET.get("statName", "")
try:
if user.is_authenticated() and user.is_active and getattr(stats.models, statName).is_ajax_accessible:
context["stats"] = getattr(stats.models, statName).objects.all().order_by('-date').filter(user=user)
# Turn off the error.
context["error"] = None
except AttributeError:
# We fail by default.
pass
return json_response_from(context)
# TODO: Checkout ensure_csrf_cookie() when we update to Django 1.4.
# TODO: Stop ignoring the csrf cookie.
# Since this handles an ajax request, we ignore the csrf tag (for now).
@csrf_exempt
def save(request):
"""Handle the save statistics request via an HTTP POST.
This request does double duty for create and update, there is no external
distinction between the two.
The request requires previous authentication, and validates authentication
via cookie.
A JSON response will be returned for interpretation by the client.
Required POST arguments must match the fields of the model being updated.
"""
context = {
# If there is an error, this will not be null.
"error": None,
# What was the model that was modified (created or updated)?
"stat": None,
}
statModel = None
statFields = None
if request.method == "POST":
user = request.user
statName = request.POST.get("statName", "")
# Due to the nature of query params, where multiple values can be
# sent in during a request, the values of each key will be in a list.
statData = dict(request.POST)
try:
if getattr(stats.models, statName).is_ajax_accessible:
statModel = getattr(stats.models, statName)
# Remove the statName from the input.
del statData["statName"]
except (AttributeError, KeyError):
context["error"] = "_Incorrect statName %s" % statName
if statModel and statData and user.is_authenticated() and user.is_active:
try:
stat = statModel.objects.filter(date=statData["date"][0]).filter(user=user)
if len(stat) > 0:
# This is an update to an existing record.
stat = stat[0]
else:
# This is a new record.
stat = statModel()
stat.user = user
# copy the fields into the array and pray for the best.
for k, v in statData.items():
setattr(stat, k, v[0])
# Make sure validators are called (not sure if this is needed).
stat.clean_fields()
stat.save()
context["stat"] = stat
except (ValidationError, KeyError) as err:
context["error"] = "".join(err.__str__()[0])
# Deal with non-content and non-error state
if context["error"] == None and context["stat"] == None:
context["error"] = "_Could not save the data. Please check your input."
else:
context["error"] = "_Service only accepts HTTP POST requests."
return json_response_from(context)
# TODO: Checkout ensure_csrf_cookie() when we update to Django 1.4.
# TODO: Stop ignoring the csrf cookie.
# Since this handles an ajax request, we ignore the csrf tag (for now).
@csrf_exempt
def delete(request):
"""Handle the delete statistics request via an HTTP POST.
If a record exists, it is deleted.
The request requires previous authentication, and validates authentication
via cookie.
A JSON response will be returned for interpretation by the client.
Required DELETE arguments:
-> date (as a YYYY-MM-DD string, must be unique)
"""
context = {
# If there is an error, this will not be null.
"error": None,
# What was the model that was deleted?
"stat": None,
}
statModel = None
statFields = None
if request.method == "POST":
user = request.user
# We only need the date, a unique field, to delete
date = request.POST.get("date", None)
statName = request.POST.get("statName", "")
try:
if getattr(stats.models, statName).is_ajax_accessible:
statModel = getattr(stats.models, statName)
except AttributeError:
context["error"] = "_Incorrect statName %s" % statName
if statModel and date and user.is_authenticated() and user.is_active:
# Validate and attempt to find all at once.
try:
stat = statModel.objects.filter(date=date).filter(user=user)
# Only continue if we have a stat to delete
if len(stat):
# Get ourselves the stat from the returned list
stat = stat[0]
# Delete the stat
stat.delete()
# Object still exists in memory and we can serialize it.
context["stat"] = stat
except ValidationError as err:
# Validation Errors return a list of error strings.
context["error"] = "".join(err.__str__())
# Deal with non-content and non-error state
if context["error"] == None and context["stat"] == None:
context["error"] = "_No record to delete."
else:
context["error"] = "_Service only accepts HTTP POST requests."
return json_response_from(context)
def describe(request):
"""Return a JSON friendly description of a model available to the user
via an HTTP GET.
The request requires previous authentication, and validates authentication
via cookie.
A JSON response will be returned for interpretation by the client.
The names of the models returned will be suitable for use in web requests,
although may not be human readable.
Required arguments:
-> model (string)
"""
# Register the available models below (for now).
available_models = [
"Eggs",
"Weight",
]
context = {
# If there is an error, this will not be null.
"error": None,
# Will always be an object.
"description": {},
}
user = request.user
if user.is_authenticated() and user.is_active:
for available_model in available_models:
for m in models.get_models():
if available_model == m.__name__:
# Instance method, need an instance to call desribe.
context["description"][available_model] = m().describe()
# Match the first only and move on.
break
# Deal with errors of some kind.
if context["error"] == None and context["description"] == {}:
context["error"] = "_No stat description available."
return json_response_from(context)
```
#### File: general/stringer/stringer.py
```python
__version_info__ = (0, 0, 1)
__version__ = '.'.join((str(info) for info in __version_info__))
import ConfigParser
DEBUG = False
"""If true, output debugging statements to sys.stderr. Otherwise, no debugging
statements will be output.
@type: bool
"""
DICTIONARY_KEY = "en"
"""The default, and current, dictionary we are localizing against. The name
of each dictionary is not enforced against any form of language codes.
@type: str
"""
DICTIONARIES = {}
"""Cache of localization dictionaries, keyed by their dictionary key, and
referenced in localization by the currently set DICTIONARY_KEY.
@type: dict
"""
def debug(message):
"""Write to standard out any debug messages, but only if the module
is set to debug mode.
@type message: str
@param message: Debug message to display.
"""
if DEBUG == True:
print "stringer debug message: {0}".format(message)
def setkey(key):
"""Sets the DICTIONARY_KEY, which is the suggested interface
for switching dictionary keys (includes debugging statements).
@type key: str
@param key: New dictionary key we will work with.
"""
global DICTIONARY_KEY
if key not in DICTIONARIES:
debug("{0} is not a key within DICTIONARIES".format(key))
DICTIONARY_KEY = key
def deldict(key):
"""Attempts to delete a specific dictionary.
@type key: str
@param key: Dictionary key to delete.
"""
try:
global DICTIONARIES
del DICTIONARIES[key]
debug("Deleted the {0} dictionary.".format(key))
except KeyError:
debug("Warning: Attempting to delete non-existant dictionary: {0}".format(key))
def deldicts():
"""Frees (deletes) all loaded dictionaries.
"""
global DICTIONARIES
DICTIONARIES = {}
debug("All stringer dictionaries dropped.")
def loaddictionary(path, dictionary_key=None):
"""Read a dictionary file located at path and load into memory.
By default, the dictionary key for the dictionary is defined in the
dictionary file.
Will throw an IOError if path does not lead to an existing file.
Will throw a TypeError if the file is invalid or doesn't exist.
@type path: string
@param path: The full path to the localization dictionary file.
@type dictionary_key: string
@param dictionary_key: An optional parameter that, if included, will
override the default dictionary key located within the file.
"""
c = ConfigParser.ConfigParser()
# Make things case sensitive
c.optionxform = str
debug("Attempting to open dictionary file at: {0}".format(path))
# This will throw an IOError all by itself.
f = open(path, "r")
try:
c.readfp(f)
# Done with the file.
f.close()
# Only read the first section
section = c.sections()[0]
if DEBUG == True:
debug("Retrieving dictionary for section: {0}".format(section))
s = c.sections()
if len(s) > 1:
debug("Warning, dictionary file contains more than one dictionary.")
debug("Ignoring the following sections:")
s.pop(0)
for ignoredSection in s:
debug(ignoredSection)
# Section itself becomes a dictionary in our DICTIONARIES.
DICTIONARIES[section] = {}
for phrase, translation in c.items(section):
DICTIONARIES[section][phrase] = translation
except (KeyError, ConfigParser.MissingSectionHeaderError):
raise TypeError("Error reading stringer dictionary file:" + path)
def lookup(key):
"""Lookup a particular key in the current dictionary.
@type key: str
@param key: The key phrase to lookup.
@rtype: str
@return: The value of the lookup key. The phrase itself will be returned
if there is no lookup key in the corresponding dictionary.
"""
try:
return DICTIONARIES[DICTIONARY_KEY][key]
except KeyError:
debug("Warning: no translation for {0} lookup key in {1} DICTIONARY_KEY".format(
key, DICTIONARY_KEY))
return key
class Stringer(object):
"""Used for marking strings for localization.
Recommend usage as (using the recommended importing):
# For strings with no context objects (assuming it is used in a context
# in which __str__ will be called).
_("_hello world")
# Forcing the localization without a context object, for situations
# where we can't trust the operator overrides to happen, or when
# we can't trust the __str__ method to be called.
_("_hello world").f()
# Example string with one context object, that being a list.
_("_hello world").f(["hi there"])
# Example string with 2 number of context objects, one a string, and
# one a labeled dictionary.
_("_hello world").f("blah", chicken={"dog":"cat"})
"""
lookupString = None
"""The string to be used to lookup the translation string in the
localization dictionary. The lookup string does not need have any templating
in it, as it is not used for templating itself.
@type: str
"""
def __init__(self, lookupString):
"""Initializer.
@type lookupString: str
@param lookupString: The lookup key to our string localization
dictionary.
"""
self.lookupString = lookupString
def f(self, *context, **keyedcontext):
"""Attempt to (f)ormat and translate this string according to the
current locale dictionary. This function need not be called if the
class is going to be coerced into a string format, such as when
in a print statement, AND if string will not be accepting any
context arguments.
@type context: mixed
@param context: An optional set of context arguments that will be
dereferenced and passed to the format method on localization.
@type keyedcontext: mixed
@param keyedcontext: An optional set of labeled arguments that will
be dereferenced and passed into the format function.
"""
if len(context) and not len(keyedcontext):
return lookup(self.lookupString).format(*context)
elif len(keyedcontext) and not len(context):
return lookup(self.lookupString).format(**keyedcontext)
elif len(context) and len(keyedcontext):
return lookup(self.lookupString).format(*context, **keyedcontext)
else:
# No context, assume no formatting.
return lookup(self.lookupString)
def __str__(self):
"""Stringers are serialized via a lookup."""
return lookup(self.lookupString)
def __add__(self, other):
"""Attempt concatenation as string."""
return self.__str__() + other
def __radd__(self, other):
"""Attempt concatenation as string."""
return self.__str__() + other
def __eq__(self, other):
"""Attempt to compare equality as string."""
return self.__str__() == other
def __neq__(self, other):
"""Attempt to compare inequality as string."""
return self.__str__() != other
```
#### File: wtfplanets/lib/events.py
```python
class EventObject(dict):
"""A bare object for passing event data to event listeners.
The following properties are provided:
name {str} The name of the event.
source {object} The object that is marked as the cause of the event
being published.
data {dict} Event specific data to be passed to the listener.
"""
def __init__(self, name="unnamed", source=None, data=None):
self.name = name
self.source = source
self.data = data or {}
class EventPublisher(object):
"""Implements a simple pub/sub interface.
Suitable as an instance (simple publisher) or a class mixin.
Requires call to __init__ from inheritor to initialize if used as a
mixin.
"""
_listener_id = 0L
def __init__(self, *args, **kwargs):
"""Initializer.
Accepts all arguments only as a convenience to subclasses. All
arguments are ignored.
"""
# Make it mixin friendly.
super(EventPublisher, self).__init__(*args, **kwargs)
# Listeners get organized as a hash of hashes.
self._event_listeners = {}
def countsubs(self):
"""Total number of event subscribers.
"""
return sum(len(event_list) for event, event_list in self._event_listeners.items())
def _next_listener_id(self):
"""Return the next key available with which to id a listener.
"""
EventPublisher._listener_id += 1
return EventPublisher._listener_id
def sub(self, event, callback):
"""Subscribe to a named event.
event {str} Name of the event to listen to.
callback {function} Callback function that will be passed one argument:
the EventObject.
return {tuple} A key that can be used to unsubscribe this listener
from this event.
"""
listener_id = self._next_listener_id()
if event not in self._event_listeners:
self._event_listeners[event] = {}
self._event_listeners[event][listener_id] = callback
# Should be considered opaque outside of the pub/sub world.
return event, listener_id
def pub(self, event, **kwargs):
"""Publish an event by name to any listeners listening.
event {str} Name of the event to publish.
target {mixed} Reference to object that should act as the target of
the event.
data {dict} Dictionary of data to be passed on to the listener.
"""
if event in self._event_listeners:
listeners = self._event_listeners[event]
for listener_id in listeners:
listeners[listener_id](EventObject(name=event, source=self, data=kwargs.copy()))
def clear_one(self, event_key):
"""Remove a specific event listener by key.
event_key {tuple} An opaque key for removing events.
"""
try:
del self._event_listeners.get(event_key[0])[event_key[1]]
except Exception:
# If there is no event to remove, don't explode.
pass
def clear_many(self, event=None):
"""Remove all event listeners, or a particular group of event listeners
by name.
event {str} The name of the group of event listeners to remove.
If not passed all event listeners are removed.
"""
if not event:
# nominate all listeners for garbage collection
self._event_listeners = {}
else:
try:
del self._event_listeners[event]
except KeyError:
# allow silent fail for unsubscribed names.
pass
def audit(self):
"""Debug and diagnostic of events currently subscribed.
returns {dict} with a "count" item listing current total listeners,
and a shallow copied "listeners" dictionary of all listeners
currently subscribed.
"""
return {
"count": self.countsubs(),
"listeners": self._event_listeners.copy()
}
class EventSubscriber(object):
"""Convenience class that aids in the removal of event listeners that
this object has subscribed to.
For use with objects that only intend to be listeners, not publishers,
although will not conflict with EventPublisher.
Requires call to __init__ from inheritor to initialize.
"""
def __init__(self, *args, **kwargs):
"""Initializer.
Accepts all arguments only as a convenience to subclasses. All
arguments are ignored.
"""
# Make it mixin friendly.
super(EventSubscriber, self).__init__(*args, **kwargs)
# A hash of lists of event keys to specific event subscriptions.
self._event_subs = {}
def subto(self, source, event, callback):
"""Subscribe to a particular event and allow for easy removal of
the listener at a later date.
source {EventPublisher} Instance that publishes events.
event {str} Name of the event to listen to.
callback {function} Callback function that will be passed one argument:
the EventObject.
Raises TypeError if source does not subclass EventPublisher.
"""
if isinstance(source, EventPublisher):
if event not in self._event_subs:
self._event_subs[event] = []
subscription_key = source.sub(event, callback)
self._event_subs[event].append({
"subscription_key": subscription_key,
"source": source
})
else:
raise TypeError("source must subclass EventPublisher")
def unsubfrom(self, event=None):
"""Unsubscribe from a list of events.
event {str} The name of the group of event listeners to remove.
If not passed all event listeners are removed that we have tracked.
"""
# Allow for one type of iteration loop.
if event == None:
# All events.
event_listeners = ((evname, subs) for evname, subs in self._event_subs.items())
elif event in self._event_subs:
# All events of a particular name.
event_listeners = ((event, self._event_subs[event]),)
else:
# No event, quit out.
return
for evname, subs in event_listeners:
while subs:
# Unsubscribe each event.
subscriber_dict = subs.pop()
subscriber_dict["source"].clear_one(subscriber_dict["subscription_key"])
# Get rid of the event list now that it is empty.
del self._event_subs[evname]
```
#### File: examples-python/multiproc/poolasync.py
```python
from multiprocessing import Pool
import time
def finite(myid, iters=0):
results = ["(id %s): start time is: %s" % (myid, time.time())]
for i in xrange(iters):
pass
results.append("(id %s): end time is: %s" % (myid, time.time()))
# What gets resturned gets caught by apply_async and passed to
# callback.
return results
def done(results):
print "We got the following results:"
print "\n".join(results)
if __name__ == '__main__':
pool = Pool(processes=1)
result = pool.apply_async(finite,
args=(100,),
kwds={"iters":10000000},
callback=done)
print "Will see before the results..."
result.wait()
# Could also do the following instead of the callback.
#print result.get()
```
#### File: examples-python/multiproc/thread.py
```python
import threading
import time
import random
class NamedObject(object):
"""Object identifiable by a name.
"""
def __init__(self, name):
self.name = name
def speak(self):
# arbitrary minor pause.
time.sleep(random.random())
print 'My name is {0}!'.format(self.name)
def worker(obj):
obj.speak()
if __name__ == '__main__':
name = "<NAME> %s"
threads = []
for i in range(10):
# Fill the qs
t = threading.Thread(target=worker, args=(NamedObject(name % i),))
t.start()
threads.append(t)
# Optional, should we wish to block for results.
# for t in threads:
# t.join()
```
#### File: examples-python/oo_lab/tests.py
```python
import unittest
import die
import dice
class TestDieClass(unittest.TestCase):
def setUp(self):
"""All methods beginning with substring 'test' will be executed.
"""
self.klass = die.Die
self.d = die.Die()
def tearDown(self):
self.klass = None
self.d = None
def test_init(self):
self.assertRaises(ValueError, self.klass, 0)
self.assertRaises(TypeError, self.klass, "hello")
def test_attrs(self):
self.assertEqual(self.d.faces, 6, "Expected default value.")
d20 = self.klass(20)
self.assertEqual(d20.faces, 20, "Expected default value.")
def test_roll(self):
self.assertEqual(type(self.d.roll()), int, "roll produces an int.")
def test_eq(self):
self.assertRaises(TypeError, self.d.__add__, "dogs")
# Some functional tests.
try:
self.d + 10
except BaseException as err:
self.fail(err)
try:
self.d + self.d
except BaseException as err:
self.fail(err)
class TestD6Class(TestDieClass):
def setUp(self):
"""All methods beginning with substring 'test' will be executed.
"""
self.klass = dice.D6
self.d = dice.D6()
def test_init(self):
# This will always raise a TypeError since we don't allow args.
self.assertRaises(TypeError, self.klass, 0)
self.assertRaises(TypeError, self.klass, "hello")
def test_attrs(self):
self.assertEqual(self.d.faces, 6, "Expected default value.")
class TestD20Class(TestDieClass):
def setUp(self):
"""All methods beginning with substring 'test' will be executed.
"""
self.klass = dice.D20
self.d = dice.D20()
def test_init(self):
# This will always raise a TypeError since we don't allow args.
self.assertRaises(TypeError, self.klass, 0)
self.assertRaises(TypeError, self.klass, "hello")
def test_attrs(self):
self.assertEqual(self.d.faces, 20, "Expected default value.")
if __name__ == '__main__':
# Tests are kicked off using the main static method.
unittest.main(verbosity=2)
# Watch the output, should get an "OK" result.
```
#### File: bulletin/notes/views.py
```python
from django.shortcuts import render
from notes.models import Note
def index(request):
context = {
# Get all of the notes in the database, in descending order.
# Instead of SQL, this is how Django provides a cross-database
# query engine.
'notes': Note.objects.all().order_by('-date')
}
# Map our context object that we just built, with all of our models,
# into our template. Whatever the results will be passed back as
# the response to this particular HTTP request.
return render(request, 'notes/index.html', context)
# CHANGEME: Class based instead of functional based view.
from django import forms
# A form that excludes fields we want to populate ourself via session data.
class NoteForm(forms.ModelForm):
class Meta:
model = Note
exclude = ('author','date',)
# We only allow creation of notes.
from datetime import datetime
from django.views.generic.edit import CreateView
class NoteCreate(CreateView):
form_class = NoteForm
model = Note
template_name = 'notes/add.html'
success_url = '/'
def form_valid(self, form):
# Populate the information ourselves that was excluded.
form.instance.author = self.request.user
form.instance.date = datetime.now()
# Delegate validation to parent class.
return super(NoteCreate, self).form_valid(form)
```
#### File: basics/examples/fabfile_0.py
```python
from fabric.api import run
def host_type():
run('uname -s')
def diskspace():
run('df')
```
#### File: pygame/06_petri/petri.py
```python
import pygame
import pygame.locals
from pygame.locals import *
from engine import GameWorld
# TEST our gameobject
from gameobject import GameObject
def petrimousedown(gameworld, event):
if event.button == 1:
# MODIFIED CODE
# TEST AGAIN
GameObject(gameworld=gameworld, pos=event.pos)
elif event.button == 3:
# MODIFIED CODE
# TEST AGAIN
GameObject(gameworld=gameworld, pos=event.pos)
def petrikeydown(gameworld, event):
if event.key == K_ESCAPE:
print "Erasing the board."
if __name__ == "__main__":
pygame.init()
pygame.display.set_caption('Petri Dish')
petri = GameWorld()
petri.addcallback("mousedown", petrimousedown)
petri.addcallback("keydown", petrikeydown)
petri.run()
pygame.quit()
```
#### File: pygame/08_petri/blob.py
```python
import pygame
import pygame.locals
from pygame.locals import *
from random import random
from gameobject import GameObject
class Blob(GameObject):
backgroundColor = (55, 151, 144)
classification = "blob"
# The life of a blob is determined by its health. The blob loses one
# health every frame (essentially every update) and gains health when
# it eats food.
# Some blobs are faster than others, but every blob starts out with the
# same health.
health = 1000
def __init__(self, gameworld, pos):
super(Blob, self).__init__(gameworld, pos)
# The update function will be called each frame to allow our
# objects to update themselves.
def update(self, gameworld):
# The first thing we check is whether or not the blob is dead.
if self.checkhealth(gameworld) == False:
# Quick exit, we're dead.
return
# Checks the health of our blob and, if we are dead (health <= 0)
# we remove ourselves from the world object.
# We return a boolean, True if still alive, False if dead.
def checkhealth(self, gameworld):
# Update our health every frame.
self.health -= 1
if self.health <= 0:
# Poor dead blobby.
print "Sad news blob #{0} has passed on.".format(self.uniqueid)
gameworld.removeobj(self)
return False
else:
# We're still alive!
return True
```
#### File: examples-python/tkinter/scrolltext.py
```python
from Tkinter import *
sbarPackConf = {"side":RIGHT, "fill":Y}
textPackConf = {"side":LEFT, "expand":YES, "fill":BOTH}
framePackConf = {"expand":YES, "fill":BOTH}
class ScrollText(Frame):
def __init__(self, parent=None):
Frame.__init__(self, parent)
self.pack(**framePackConf)
self.makewidgets()
self.isMinimal = False
def makewidgets(self):
sbar = Scrollbar(self)
text = Text(self)
sbar.config(command=text.yview)
sbar.pack(**sbarPackConf)
self.sbar = sbar
text.config(yscrollcommand=sbar.set, font=('courier', 20, 'normal'), height=20)
text.pack(**textPackConf)
self.text = text
def settext(self, text=''):
self.text.delete('1.0', END)
self.text.insert('1.0', text)
self.text.mark_set(INSERT, '1.0')
self.text.focus()
def gettext(self):
return self.text.get('1.0', END+'-1c')
def findtext(self, target=''):
t = self.text
where = t.search(target, INSERT+"-1c", END)
if where:
pastit = where + ('+%dc' % len(target))
t.tag_remove(SEL, '1.0', END)
t.tag_add(SEL, where, pastit)
t.mark_set(INSERT, pastit)
t.see(INSERT)
t.focus()
def toggleminimal(self):
if self.isMinimal == False:
self.isMinimal = True
self.sbar.pack_forget()
else:
self.isMinimal = False
self.pack_forget()
self.text.pack_forget()
self.pack(**framePackConf)
self.sbar.pack(**sbarPackConf)
self.text.pack(**textPackConf)
if __name__ == "__main__":
s = ScrollText()
s.settext("Hello\nworld!")
print s.gettext()
s.master.bind('<Escape>', lambda a: s.toggleminimal())
s.master.bind('<Control-f>', lambda a: s.findtext('Hello'))
s.mainloop()
```
|
{
"source": "jeremy-palmer/sdctools",
"score": 2
}
|
#### File: sdctools/sdctools/untar.py
```python
import tarfile
import tempfile
import csv
import boto3
import datetime
import random
import botocore.exceptions
import logging
# entry point - Lambda should call this method
def unbundle_pon(src_bucket_name, pon_key, dest_bucket_name, dest_prefix):
logging.basicConfig(level=logging.INFO,
format='%(asctime)s %(levelname)s %(message)s',
filename='/Users/jeremypalmer/desktop/xampleeeeeeeeee.log',
filemode='a')
logging.info('Start processing %s from %s' % (pon_key, src_bucket_name))
# determine compression type as this impacts level of nesting in file
file_ext = pon_key.split('.')[-1]
logging.info('file extension is %s' % file_ext)
s3 = boto3.resource('s3')
bucket = s3.Bucket(src_bucket_name)
# single .TAR file can just be downloaded and processed
if file_ext.upper() == 'TAR':
temp_s3 = tempfile.SpooledTemporaryFile()
bucket.download_fileobj(pon_key, temp_s3)
temp_s3.seek(0)
temp_tar = tarfile.open(name=None, mode='r', fileobj=temp_s3)
__untar_pon(temp_tar, dest_bucket_name, dest_prefix)
elif file_ext.upper() == 'GZ':
__extract_tars(bucket, pon_key, dest_bucket_name, dest_prefix)
else:
raise ValueError('Invalid PON File Type')
logging.info('Finished processing %s from %s' % (pon_key, src_bucket_name))
return True
# returns a filename for extracted PON data that does not already exist in S3
# file_type is the PON file type
# e.g. 'ponOltUtilTxOntHistory'
# bucket_name is the bucket extracted PON data is sent to
# prefix (optional) is the S3 folder in bucket_name (include the last / here)
# e.g. 'sdc_pom_extracted/'
def get_filename(file_type, bucket_name, prefix=''):
# build string and check if key already exists
s3 = boto3.resource('s3')
while True:
new_name = file_type \
+ '_' \
+ datetime.datetime.now().strftime('%Y%m%d-%H%M') \
+ '_' \
+ str(random.randint(1, 999999)) \
+ '.csv'
try:
s3.Object(bucket_name, prefix + new_name).load()
except botocore.exceptions.ClientError as e:
if e.response['Error']['Code'] == "404":
# The object does not exist.
break
else:
# Something else has gone wrong.
raise
else:
# already exists
new_name = new_name
return prefix + new_name
# basic wrapper to upload to S3 and capture common errors
def upload_to_s3(bucket_name, key_name, file_contents):
s3 = boto3.resource('s3')
try:
s3 = boto3.resource('s3')
r = s3.Bucket(bucket_name).put_object(Key=key_name, Body=file_contents)
return r
except botocore.exceptions.InvalidMaxRetryAttemptsError:
print('Max Retry fail')
raise
# extracts multiple TAR files from a high level TAR.GZ and calls __untar_pon for each
# TODO: complete the extraction logic & test
def __extract_tars(bucket, pon_key, dest_bucket_name, dest_prefix):
parent_temp = tempfile.SpooledTemporaryFile()
bucket.download_fileobj(pon_key, parent_temp)
parent_tar = tarfile.open(name=None, mode='r', fileobj=parent_temp)
for tarinfo in parent_tar:
if tarinfo.isreg():
print(tarinfo.name())
# takes a single .tar file and returns new fileobj(s) with normalised CSV data
def __untar_pon(pon_tarfile, dest_bucket_name, dest_prefix):
# make sure the prefix ends in forward slash
if not dest_prefix.endswith('/'):
logging.info("Adding '/' to end of destination bucket prefix")
dest_prefix = dest_prefix + '/'
for info in pon_tarfile:
if info.isreg() and info.name.split('.')[-1].upper() == 'CSV':
file_type = None
timestamp = None
object_type = ''
ne_name = ''
ne_type = ''
headers = []
raw_data = []
data_row = []
data_rows = []
logging.info('Processing %s' % info.name)
if info.name == 'iSAM_ponOltUtilTxOntHistoryData.csv':
file_type = 'ponOltUtilTxOntHistory'
elif info.name == 'iSAM_ponOltUtilRxOntHistoryData.csv':
file_type = 'ponOltUtilRxOntHistory'
elif info.name == 'iSAM_ponOltUtilHistoryData.csv':
file_type = 'ponOltUtilHistory'
elif info.name == 'iSAM_ontOltUtilBulkHistoryData.csv':
file_type = 'ontOltUtilBulkHistory'
elif info.name == 'iSAM_ng2ChannelPairOltUtilTxOntHistoryData.csv':
file_type = 'ng2CpOltTxOntHistory'
elif info.name == 'iSAM_ng2ChannelPairOltUtilRxOntHistoryData.csv':
file_type = 'ng2CpOltRxOntHistory'
elif info.name == 'iSAM_ng2ChannelPairOltUtilHistoryData.csv':
file_type = 'ng2CpOltUtilHistory'
elif info.name == 'iSAM_ng2OntOltUtilBulkHistoryData.csv':
file_type = 'ng2OntOltUtilBulkHistory'
if file_type is not None:
for line in pon_tarfile.extractfile(info.name):
csv_line = csv.reader([line.decode('utf-8')], quotechar='"')
# get the raw values
for r in csv_line:
try:
# get the file level variables and headers
if r[0] == 'Time stamp':
timestamp = r[1]
elif r[0] == 'Object Type':
object_type = r[1]
elif r[0] == 'NE Name':
ne_name = r[1]
elif r[0] == 'NE Type/Release':
ne_type = r[1]
elif r[0] == 'Object ID':
headers = ['file_type', 'time_stamp', 'objecttype', 'nename', 'ne_type'
, r[0]
, r[1] + '_1', r[1] + '_2', r[1] + '_3'
, r[2] + '_1', r[2] + '_2', r[2] + '_3']
else: # assuming these are data rows
raw_data = [(file_type, timestamp, object_type, ne_name, ne_type, r[0], r[1], r[2])]
split_1 = raw_data[0][6].strip('{}').split()
split_2 = raw_data[0][7].strip('{}').split()
# data_row = [(file_type, timestamp, object_type, ne_name, ne_type, r[0]
# , split_1[0].strip(','), split_1[1].strip(','), split_1[2].strip(',')
# , split_2[0].strip(','), split_2[1].strip(','), split_2[2].strip(','))]
# data_rows.append(data_row)
# normalise the arrays by splitting each row into three rows
for i in range (0,3):
data_row = [(file_type, timestamp, object_type, ne_name, ne_type, r[0]
, split_1[i].strip(','), split_2[i].strip(','))]
data_rows.append(data_row)
except IndexError:
pass
with tempfile.SpooledTemporaryFile(mode='wb+') as temp_outfile:
# no headers, if required add: temp.write((','.join(map(str, headers)) + '\n').encode('utf-8'))
for row in data_rows:
# build a comma separated string & remove {} from array fields
delimited_row = (','.join(map(str, row)).strip('()') + '\n').encode('utf-8')
# replace single quotes from list obj with double for CSV output
a = delimited_row.decode().replace(" '",'"').replace("'",'"').encode('utf-8')
temp_outfile.write(a)
temp_outfile.seek(0)
outfile_key = get_filename(file_type=file_type
, bucket_name=dest_bucket_name
, prefix=dest_prefix)
logging.info('New S3 file name is %s' % outfile_key)
upload_to_s3(bucket_name=dest_bucket_name
, key_name=outfile_key
, file_contents=temp_outfile)
return True
```
|
{
"source": "jeremyparadie/CASPER",
"score": 3
}
|
#### File: source/robots/base_robot_commands.py
```python
def command(time, command):
"""
desc: this the base funtion used to send commands to a robot, the first
argument is the number of seconds after the trial starts the command
should be run. the second argument is the command that should be sent,
surround the command in quotes;
"""
global _robot_commands
_robot_commands.append((time, command))
def phase(time, name):
"""
desc: this the base function used to send current phase, the first
argument is the number of seconds after the trial starts that
this phase should start. the second argument is the name of the phase
in quotes;
"""
_phase_commands.append((time, name))
```
#### File: source/robots/example_squirrel.py
```python
def skee(time, angle):
command(time, str(angle)) # str(angle) puts quotes around whatever angle they input if it doens't alearyd have quotes
```
|
{
"source": "jeremypedersen/terraformExamples",
"score": 3
}
|
#### File: abc/alibaba-slack-integration/index_http.py
```python
import logging
import requests
import json
import urllib.parse
# Pretty-print and properly escape the JSON
# text passed to us by CloudMonitor, so that
# we can display it in Slack
def pprint_json(leading_char, parsed_json):
output_text = '\n'
for key in parsed_json:
item = parsed_json[key]
if isinstance(item, dict): # We need to go deeper!
output_text += key + ':'
output_text += pprint_json(leading_char + '\t', item)
else:
output_text += "{}{}: {}\n".format(leading_char, key, item)
return output_text
# Function body: takes JSON from CloudMonitor callbacks and sends it on to
# Slack as properly formatted displayable text
def handler(environ, start_response):
logger = logging.getLogger()
context = environ['fc.context']
request_uri = environ['fc.request_uri']
# This is left in as an example of how you would process
# request parameters. We don't use it as we are only
# interested in the body of the POST received from
# CloudMonitor
for k, v in environ.items():
if k.startswith('HTTP_'):
# process custom request headers
pass
# Parse JSON and then POST to Slack Webhook
try:
request_body_size = int(environ.get('CONTENT_LENGTH', 0))
except (ValueError):
request_body_size = 0
request_body = environ['wsgi.input'].read(request_body_size)
# Decode the URL-encoded parameters passed by CloudMonitor
try:
request_body_string = urllib.parse.unquote(request_body.decode())
except:
output = "Uh oh! Unable to decode and unquote the URL-formatted request body...check your Function Compute logs."
try:
request_body_json = urllib.parse.parse_qs(request_body_string)
except:
output = "Uh oh! Unable to parse the URL query string parameters passed by CloudMonitor...check your Function Compute logs."
try:
output = pprint_json('', request_body_json)
except:
output = "Uh oh! Couldn't pretty-print the JSON passed to us by CloudMonitor...check your Function Compute logs."
# Log the request that we received, for debugging purposes
logger.info(request_body)
# URL of the Slack webhook
end_url = '<KEY>'
headers = {'Content-type': 'application/json'}
# Send message to slack
payload = {'text': output}
r = requests.post(end_url,headers=headers, data=json.dumps(payload))
# Send response (to indicate success or failure in posting to slack)
# FIXME: Use status from variable 'r' here to indicate success or failure communicating
# with slack
status = '200 OK'
response_headers = [('Content-type', 'text/plain')]
start_response(status, response_headers)
# Output formatted text for debugging purposes
return [output.encode()]
```
|
{
"source": "jeremyplichtafc/cloud-custodian",
"score": 2
}
|
#### File: c7n/resources/appelb.py
```python
from __future__ import absolute_import, division, print_function, unicode_literals
import json
import logging
import six
from collections import defaultdict
from c7n.actions import ActionRegistry, BaseAction
from c7n.exceptions import PolicyValidationError
from c7n.filters import (
Filter, FilterRegistry, DefaultVpcBase, MetricsFilter, ValueFilter)
import c7n.filters.vpc as net_filters
from c7n import tags
from c7n.manager import resources
from c7n.query import QueryResourceManager, DescribeSource, ConfigSource
from c7n.utils import (
local_session, chunks, type_schema, get_retry, set_annotation)
from c7n.resources.shield import IsShieldProtected, SetShieldProtection
log = logging.getLogger('custodian.app-elb')
@resources.register('app-elb')
class AppELB(QueryResourceManager):
"""Resource manager for v2 ELBs (AKA ALBs).
"""
class resource_type(object):
service = 'elbv2'
type = 'loadbalancer/app'
enum_spec = ('describe_load_balancers', 'LoadBalancers', None)
name = 'LoadBalancerName'
id = 'LoadBalancerArn'
filter_name = "Names"
filter_type = "list"
dimension = "LoadBalancer"
date = 'CreatedTime'
config_type = 'AWS::ElasticLoadBalancingV2::LoadBalancer'
retry = staticmethod(get_retry(('Throttling',)))
@classmethod
def get_permissions(cls):
# override as the service is not the iam prefix
return ("elasticloadbalancing:DescribeLoadBalancers",
"elasticloadbalancing:DescribeLoadBalancerAttributes",
"elasticloadbalancing:DescribeTags")
def get_arn(self, r):
return r[self.resource_type.id]
def get_source(self, source_type):
if source_type == 'describe':
return DescribeAppElb(self)
elif source_type == 'config':
return ConfigAppElb(self)
raise ValueError("Unsupported source: %s for %s" % (
source_type, self.resource_type.config_type))
class DescribeAppElb(DescribeSource):
def get_resources(self, ids, cache=True):
"""Support server side filtering on arns or names
"""
if ids[0].startswith('arn:'):
params = {'LoadBalancerArns': ids}
else:
params = {'Names': ids}
return self.query.filter(self.manager, **params)
def augment(self, albs):
_describe_appelb_tags(
albs,
self.manager.session_factory,
self.manager.executor_factory,
self.manager.retry)
return albs
class ConfigAppElb(ConfigSource):
def load_resource(self, item):
resource = super(ConfigAppElb, self).load_resource(item)
item_tags = item['supplementaryConfiguration']['Tags']
# Config originally stored supplementaryconfig on elbv2 as json
# strings. Support that format for historical queries.
if isinstance(item_tags, six.string_types):
item_tags = json.loads(item_tags)
resource['Tags'] = [
{'Key': t['key'], 'Value': t['value']} for t in item_tags]
item_attrs = item['supplementaryConfiguration'][
'LoadBalancerAttributes']
if isinstance(item_attrs, six.string_types):
item_attrs = json.loads(item_attrs)
# Matches annotation of AppELBAttributeFilterBase filter
resource['Attributes'] = {
attr['key']: parse_attribute_value(attr['value']) for
attr in item_attrs}
return resource
def _describe_appelb_tags(albs, session_factory, executor_factory, retry):
client = local_session(session_factory).client('elbv2')
def _process_tags(alb_set):
alb_map = {alb['LoadBalancerArn']: alb for alb in alb_set}
results = retry(client.describe_tags, ResourceArns=list(alb_map.keys()))
for tag_desc in results['TagDescriptions']:
if ('ResourceArn' in tag_desc and
tag_desc['ResourceArn'] in alb_map):
alb_map[tag_desc['ResourceArn']]['Tags'] = tag_desc['Tags']
with executor_factory(max_workers=2) as w:
list(w.map(_process_tags, chunks(albs, 20)))
AppELB.filter_registry.register('tag-count', tags.TagCountFilter)
AppELB.filter_registry.register('marked-for-op', tags.TagActionFilter)
AppELB.filter_registry.register('shield-enabled', IsShieldProtected)
AppELB.filter_registry.register('network-location', net_filters.NetworkLocation)
AppELB.action_registry.register('set-shield', SetShieldProtection)
@AppELB.filter_registry.register('metrics')
class AppElbMetrics(MetricsFilter):
"""Filter app load balancer by metric values.
See available metrics here
https://docs.aws.amazon.com/elasticloadbalancing/latest/application/load-balancer-cloudwatch-metrics.html
Custodian defaults to specifying dimensions for the app elb only.
Target Group dimension not supported atm.
"""
def get_dimensions(self, resource):
return [{
'Name': self.model.dimension,
'Value': 'app/%s/%s' % (
resource[self.model.name],
resource[self.model.id].rsplit('/')[-1])}]
@AppELB.filter_registry.register('security-group')
class SecurityGroupFilter(net_filters.SecurityGroupFilter):
RelatedIdsExpression = "SecurityGroups[]"
@AppELB.filter_registry.register('subnet')
class SubnetFilter(net_filters.SubnetFilter):
RelatedIdsExpression = "AvailabilityZones[].SubnetId"
@AppELB.filter_registry.register('vpc')
class VpcFilter(net_filters.VpcFilter):
RelatedIdsExpression = "VpcId"
@AppELB.filter_registry.register('waf-enabled')
class WafEnabled(Filter):
schema = type_schema(
'waf-enabled', **{
'web-acl': {'type': 'string'},
'state': {'type': 'boolean'}})
permissions = ('waf-regional:ListResourcesForWebACL', 'waf-regional:ListWebACLs')
# TODO verify name uniqueness within region/account
# TODO consider associated resource fetch in augment
def process(self, resources, event=None):
client = local_session(self.manager.session_factory).client(
'waf-regional')
target_acl = self.data.get('web-acl')
state = self.data.get('state', False)
name_id_map = {}
resource_map = {}
wafs = self.manager.get_resource_manager('waf-regional').resources()
for w in wafs:
if 'c7n:AssociatedResources' not in w:
arns = client.list_resources_for_web_acl(
WebACLId=w['WebACLId']).get('ResourceArns', [])
w['c7n:AssociatedResources'] = arns
name_id_map[w['Name']] = w['WebACLId']
for r in w['c7n:AssociatedResources']:
resource_map[r] = w['WebACLId']
target_acl_id = name_id_map.get(target_acl, target_acl)
# generally frown on runtime validation errors, but also frown on
# api calls during validation.
if target_acl_id not in name_id_map.values():
raise ValueError("Invalid target acl:%s, acl not found" % target_acl)
arn_key = self.manager.resource_type.id
state_map = {}
for r in resources:
arn = r[arn_key]
if arn in resource_map:
r['c7n_webacl'] = resource_map[arn]
if not target_acl:
state_map[arn] = True
continue
r_acl = resource_map[arn]
if r_acl == target_acl_id:
state_map[arn] = True
continue
state_map[arn] = False
else:
state_map[arn] = False
return [r for r in resources if state_map[r[arn_key]] == state]
@AppELB.action_registry.register('set-waf')
class SetWaf(BaseAction):
"""Enable/Disable waf protection on applicable resource.
"""
permissions = ('waf-regional:AssociateWebACL', 'waf-regional:ListWebACLs')
schema = type_schema(
'set-waf', required=['web-acl'], **{
'web-acl': {'type': 'string'},
# 'force': {'type': 'boolean'},
'state': {'type': 'boolean'}})
def validate(self):
found = False
for f in self.manager.iter_filters():
if isinstance(f, WafEnabled):
found = True
break
if not found:
# try to ensure idempotent usage
raise PolicyValidationError(
"set-waf should be used in conjunction with waf-enabled filter on %s" % (
self.manager.data,))
return self
def process(self, resources):
wafs = self.manager.get_resource_manager('waf-regional').resources()
name_id_map = {w['Name']: w['WebACLId'] for w in wafs}
target_acl = self.data.get('web-acl')
target_acl_id = name_id_map.get(target_acl, target_acl)
state = self.data.get('state', True)
if state and target_acl_id not in name_id_map.values():
raise ValueError("invalid web acl: %s" % (target_acl_id))
client = local_session(
self.manager.session_factory).client('waf-regional')
arn_key = self.manager.resource_type.id
# TODO implement force to reassociate.
# TODO investigate limits on waf association.
for r in resources:
if state:
client.associate_web_acl(
WebACLId=target_acl_id, ResourceArn=r[arn_key])
else:
client.disassociate_web_acl(
WebACLId=target_acl_id, ResourceArn=r[arn_key])
@AppELB.action_registry.register('set-s3-logging')
class SetS3Logging(BaseAction):
"""Action to enable/disable S3 logging for an application loadbalancer.
:example:
.. code-block:: yaml
policies:
- name: elbv2-test
resource: app-elb
filters:
- type: value
key: Attributes."access_logs.s3.enabled"
value: False
actions:
- type: enable-s3-logging
bucket: elbv2logtest
prefix: dahlogs
"""
schema = type_schema(
'set-s3-logging',
state={'enum': ['enabled', 'disabled']},
bucket={'type': 'string'},
prefix={'type': 'string'},
required=('state',))
permissions = ("elasticloadbalancing:ModifyLoadBalancerAttributes",)
def validate(self):
if self.data.get('state') == 'enabled':
if 'bucket' not in self.data or 'prefix' not in self.data:
raise PolicyValidationError((
"alb logging enablement requires `bucket` "
"and `prefix` specification on %s" % (self.manager.data,)))
return self
def process(self, resources):
client = local_session(self.manager.session_factory).client('elbv2')
for elb in resources:
elb_arn = elb['LoadBalancerArn']
attributes = [{
'Key': 'access_logs.s3.enabled',
'Value': (
self.data.get('state') == 'enabled' and 'true' or 'value')}]
if self.data.get('state') == 'enabled':
attributes.append({
'Key': 'access_logs.s3.bucket',
'Value': self.data['bucket']})
prefix_template = self.data['prefix']
info = {t['Key']: t['Value'] for t in elb.get('Tags', ())}
info['DNSName'] = elb.get('DNSName', '')
info['AccountId'] = elb['LoadBalancerArn'].split(':')[4]
info['LoadBalancerName'] = elb['LoadBalancerName']
attributes.append({
'Key': 'access_logs.s3.prefix',
'Value': prefix_template.format(**info)})
self.manager.retry(
client.modify_load_balancer_attributes,
LoadBalancerArn=elb_arn, Attributes=attributes)
@AppELB.action_registry.register('mark-for-op')
class AppELBMarkForOpAction(tags.TagDelayedAction):
"""Action to create a delayed action on an ELB to start at a later date
:example:
.. code-block:: yaml
policies:
- name: appelb-failed-mark-for-op
resource: app-elb
filters:
- "tag:custodian_elb_cleanup": absent
- State: failed
actions:
- type: mark-for-op
tag: custodian_elb_cleanup
msg: "AppElb failed: {op}@{action_date}"
op: delete
days: 1
"""
batch_size = 1
@AppELB.action_registry.register('tag')
class AppELBTagAction(tags.Tag):
"""Action to create tag/tags on an ELB
:example:
.. code-block:: yaml
policies:
- name: appelb-create-required-tag
resource: app-elb
filters:
- "tag:RequiredTag": absent
actions:
- type: tag
key: RequiredTag
value: RequiredValue
"""
batch_size = 1
permissions = ("elasticloadbalancing:AddTags",)
def process_resource_set(self, client, resource_set, ts):
client.add_tags(
ResourceArns=[alb['LoadBalancerArn'] for alb in resource_set],
Tags=ts)
@AppELB.action_registry.register('remove-tag')
class AppELBRemoveTagAction(tags.RemoveTag):
"""Action to remove tag/tags from an ELB
:example:
.. code-block:: yaml
policies:
- name: appelb-delete-expired-tag
resource: app-elb
filters:
- "tag:ExpiredTag": present
actions:
- type: remove-tag
tags: ["ExpiredTag"]
"""
batch_size = 1
permissions = ("elasticloadbalancing:RemoveTags",)
def process_resource_set(self, client, resource_set, tag_keys):
client.remove_tags(
ResourceArns=[alb['LoadBalancerArn'] for alb in resource_set],
TagKeys=tag_keys)
@AppELB.action_registry.register('delete')
class AppELBDeleteAction(BaseAction):
"""Action to delete an ELB
To avoid unwanted deletions of ELB, it is recommended to apply a filter
to the rule
:example:
.. code-block:: yaml
policies:
- name: appelb-delete-failed-elb
resource: app-elb
filters:
- State: failed
actions:
- delete
"""
schema = type_schema('delete', force={'type': 'boolean'})
permissions = (
"elasticloadbalancing:DeleteLoadBalancer",
"elasticloadbalancing:ModifyLoadBalancerAttributes",)
def process(self, load_balancers):
client = local_session(self.manager.session_factory).client('elbv2')
for lb in load_balancers:
self.process_alb(client, lb)
def process_alb(self, client, alb):
try:
if self.data.get('force'):
client.modify_load_balancer_attributes(
LoadBalancerArn=alb['LoadBalancerArn'],
Attributes=[{
'Key': 'deletion_protection.enabled',
'Value': 'false',
}])
self.manager.retry(
client.delete_load_balancer, LoadBalancerArn=alb['LoadBalancerArn'])
except client.exceptions.LoadBalancerNotFoundException:
pass
except client.exceptions.OperationNotPermittedException as e:
self.log.warning(
"Exception trying to delete ALB: %s error: %s",
alb['LoadBalancerArn'], e)
class AppELBListenerFilterBase(object):
""" Mixin base class for filters that query LB listeners.
"""
permissions = ("elasticloadbalancing:DescribeListeners",)
def initialize(self, albs):
client = local_session(self.manager.session_factory).client('elbv2')
self.listener_map = defaultdict(list)
for alb in albs:
results = client.describe_listeners(
LoadBalancerArn=alb['LoadBalancerArn'])
self.listener_map[alb['LoadBalancerArn']] = results['Listeners']
def parse_attribute_value(v):
if v.isdigit():
v = int(v)
elif v == 'true':
v = True
elif v == 'false':
v = False
return v
class AppELBAttributeFilterBase(object):
""" Mixin base class for filters that query LB attributes.
"""
def initialize(self, albs):
client = local_session(self.manager.session_factory).client('elbv2')
def _process_attributes(alb):
if 'Attributes' not in alb:
alb['Attributes'] = {}
results = client.describe_load_balancer_attributes(
LoadBalancerArn=alb['LoadBalancerArn'])
# flatten out the list of dicts and cast
for pair in results['Attributes']:
k = pair['Key']
v = parse_attribute_value(pair['Value'])
alb['Attributes'][k] = v
with self.manager.executor_factory(max_workers=2) as w:
list(w.map(_process_attributes, albs))
@AppELB.filter_registry.register('is-logging')
class IsLoggingFilter(Filter, AppELBAttributeFilterBase):
""" Matches AppELBs that are logging to S3.
bucket and prefix are optional
:example:
.. code-block:: yaml
policies:
- name: alb-is-logging-test
resource: app-elb
filters:
- type: is-logging
- name: alb-is-logging-bucket-and-prefix-test
resource: app-elb
filters:
- type: is-logging
bucket: prodlogs
prefix: alblogs
"""
permissions = ("elasticloadbalancing:DescribeLoadBalancerAttributes",)
schema = type_schema('is-logging',
bucket={'type': 'string'},
prefix={'type': 'string'}
)
def process(self, resources, event=None):
self.initialize(resources)
bucket_name = self.data.get('bucket', None)
bucket_prefix = self.data.get('prefix', None)
return [alb for alb in resources
if alb['Attributes']['access_logs.s3.enabled'] and
(not bucket_name or bucket_name == alb['Attributes'].get(
'access_logs.s3.bucket', None)) and
(not bucket_prefix or bucket_prefix == alb['Attributes'].get(
'access_logs.s3.prefix', None))
]
@AppELB.filter_registry.register('is-not-logging')
class IsNotLoggingFilter(Filter, AppELBAttributeFilterBase):
""" Matches AppELBs that are NOT logging to S3.
or do not match the optional bucket and/or prefix.
:example:
.. code-block:: yaml
policies:
- name: alb-is-not-logging-test
resource: app-elb
filters:
- type: is-not-logging
- name: alb-is-not-logging-bucket-and-prefix-test
resource: app-elb
filters:
- type: is-not-logging
bucket: prodlogs
prefix: alblogs
"""
permissions = ("elasticloadbalancing:DescribeLoadBalancerAttributes",)
schema = type_schema('is-not-logging',
bucket={'type': 'string'},
prefix={'type': 'string'}
)
def process(self, resources, event=None):
self.initialize(resources)
bucket_name = self.data.get('bucket', None)
bucket_prefix = self.data.get('prefix', None)
return [alb for alb in resources
if alb['Type'] == 'application' and (
not alb['Attributes']['access_logs.s3.enabled'] or (
bucket_name and bucket_name != alb['Attributes'].get(
'access_logs.s3.bucket', None)) or (
bucket_prefix and bucket_prefix != alb['Attributes'].get(
'access_logs.s3.prefix', None)))]
class AppELBTargetGroupFilterBase(object):
""" Mixin base class for filters that query LB target groups.
"""
def initialize(self, albs):
self.target_group_map = defaultdict(list)
target_groups = self.manager.get_resource_manager(
'app-elb-target-group').resources()
for target_group in target_groups:
for load_balancer_arn in target_group['LoadBalancerArns']:
self.target_group_map[load_balancer_arn].append(target_group)
@AppELB.filter_registry.register('listener')
class AppELBListenerFilter(ValueFilter, AppELBListenerFilterBase):
"""Filter ALB based on matching listener attributes
Adding the `matched` flag will filter on previously matched listeners
:example:
.. code-block:: yaml
policies:
- name: app-elb-invalid-ciphers
resource: app-elb
filters:
- type: listener
key: Protocol
value: HTTPS
- type: listener
key: SslPolicy
value: ['ELBSecurityPolicy-TLS-1-1-2017-01','ELBSecurityPolicy-TLS-1-2-2017-01']
op: ni
matched: true
actions:
- type: modify-listener
sslpolicy: "ELBSecurityPolicy-TLS-1-2-2017-01"
"""
schema = type_schema(
'listener', rinherit=ValueFilter.schema, matched={'type': 'boolean'})
permissions = ("elasticloadbalancing:DescribeLoadBalancerAttributes",)
def validate(self):
if not self.data.get('matched'):
return
listeners = list(self.manager.iter_filters())
found = False
for f in listeners[:listeners.index(self)]:
if not f.data.get('matched', False):
found = True
break
if not found:
raise PolicyValidationError(
"matched listener filter, requires preceding listener filter on %s " % (
self.manager.data,))
return self
def process(self, albs, event=None):
self.initialize(albs)
return super(AppELBListenerFilter, self).process(albs, event)
def __call__(self, alb):
listeners = self.listener_map[alb['LoadBalancerArn']]
if self.data.get('matched', False):
listeners = alb.pop('c7n:MatchedListeners', [])
found_listeners = False
for listener in listeners:
if self.match(listener):
set_annotation(alb, 'c7n:MatchedListeners', listener)
found_listeners = True
return found_listeners
@AppELB.action_registry.register('modify-listener')
class AppELBModifyListenerPolicy(BaseAction):
"""Action to modify the policy for an App ELB
:example:
.. code-block:: yaml
policies:
- name: appelb-modify-listener
resource: app-elb
filters:
- type: listener
key: Protocol
value: HTTP
actions:
- type: modify-listener
protocol: HTTPS
sslpolicy: "ELBSecurityPolicy-TLS-1-2-2017-01"
certificate: "arn:aws:acm:region:123456789012:certificate/12345678-\
1234-1234-1234-123456789012"
"""
schema = type_schema(
'modify-listener',
port={'type': 'integer'},
protocol={'enum': ['HTTP', 'HTTPS']},
sslpolicy={'type': 'string'},
certificate={'type': 'string'}
)
permissions = ("elasticloadbalancing:ModifyListener",)
def validate(self):
for f in self.manager.iter_filters():
if f.type == 'listener':
return self
raise PolicyValidationError(
"modify-listener action requires the listener filter %s" % (
self.manager.data,))
def process(self, load_balancers):
args = {}
if 'port' in self.data:
args['Port'] = self.data.get('port')
if 'protocol' in self.data:
args['Protocol'] = self.data.get('protocol')
if 'sslpolicy' in self.data:
args['SslPolicy'] = self.data.get('sslpolicy')
if 'certificate' in self.data:
args['Certificates'] = [{'CertificateArn': self.data.get('certificate')}]
client = local_session(self.manager.session_factory).client('elbv2')
for alb in load_balancers:
for matched_listener in alb.get('c7n:MatchedListeners', ()):
client.modify_listener(
ListenerArn=matched_listener['ListenerArn'],
**args)
@AppELB.filter_registry.register('healthcheck-protocol-mismatch')
class AppELBHealthCheckProtocolMismatchFilter(Filter,
AppELBTargetGroupFilterBase):
"""Filter AppELBs with mismatched health check protocols
A mismatched health check protocol is where the protocol on the target group
does not match the load balancer health check protocol
:example:
.. code-block:: yaml
policies:
- name: appelb-healthcheck-mismatch
resource: app-elb
filters:
- healthcheck-protocol-mismatch
"""
schema = type_schema('healthcheck-protocol-mismatch')
permissions = ("elasticloadbalancing:DescribeTargetGroups",)
def process(self, albs, event=None):
def _healthcheck_protocol_mismatch(alb):
for target_group in self.target_group_map[alb['LoadBalancerArn']]:
if (target_group['Protocol'] !=
target_group['HealthCheckProtocol']):
return True
return False
self.initialize(albs)
return [alb for alb in albs if _healthcheck_protocol_mismatch(alb)]
@AppELB.filter_registry.register('target-group')
class AppELBTargetGroupFilter(ValueFilter, AppELBTargetGroupFilterBase):
"""Filter ALB based on matching target group value"""
schema = type_schema('target-group', rinherit=ValueFilter.schema)
permissions = ("elasticloadbalancing:DescribeTargetGroups",)
def process(self, albs, event=None):
self.initialize(albs)
return super(AppELBTargetGroupFilter, self).process(albs, event)
def __call__(self, alb):
target_groups = self.target_group_map[alb['LoadBalancerArn']]
return self.match(target_groups)
@AppELB.filter_registry.register('default-vpc')
class AppELBDefaultVpcFilter(DefaultVpcBase):
"""Filter all ELB that exist within the default vpc
:example:
.. code-block:: yaml
policies:
- name: appelb-in-default-vpc
resource: app-elb
filters:
- default-vpc
"""
schema = type_schema('default-vpc')
def __call__(self, alb):
return alb.get('VpcId') and self.match(alb.get('VpcId')) or False
@resources.register('app-elb-target-group')
class AppELBTargetGroup(QueryResourceManager):
"""Resource manager for v2 ELB target groups.
"""
class resource_type(object):
service = 'elbv2'
type = 'app-elb-target-group'
enum_spec = ('describe_target_groups', 'TargetGroups', None)
name = 'TargetGroupName'
id = 'TargetGroupArn'
filter_name = None
filter_type = None
dimension = None
date = None
filter_registry = FilterRegistry('app-elb-target-group.filters')
action_registry = ActionRegistry('app-elb-target-group.actions')
retry = staticmethod(get_retry(('Throttling',)))
filter_registry.register('tag-count', tags.TagCountFilter)
filter_registry.register('marked-for-op', tags.TagActionFilter)
@classmethod
def get_permissions(cls):
# override as the service is not the iam prefix
return ("elasticloadbalancing:DescribeTargetGroups",
"elasticloadbalancing:DescribeTags")
def augment(self, target_groups):
client = local_session(self.session_factory).client('elbv2')
def _describe_target_group_health(target_group):
result = self.retry(client.describe_target_health,
TargetGroupArn=target_group['TargetGroupArn'])
target_group['TargetHealthDescriptions'] = result[
'TargetHealthDescriptions']
with self.executor_factory(max_workers=2) as w:
list(w.map(_describe_target_group_health, target_groups))
_describe_target_group_tags(
target_groups, self.session_factory,
self.executor_factory, self.retry)
return target_groups
def _describe_target_group_tags(target_groups, session_factory,
executor_factory, retry):
client = local_session(session_factory).client('elbv2')
def _process_tags(target_group_set):
target_group_map = {
target_group['TargetGroupArn']:
target_group for target_group in target_group_set
}
results = retry(
client.describe_tags,
ResourceArns=list(target_group_map.keys()))
for tag_desc in results['TagDescriptions']:
if ('ResourceArn' in tag_desc and
tag_desc['ResourceArn'] in target_group_map):
target_group_map[
tag_desc['ResourceArn']
]['Tags'] = tag_desc['Tags']
with executor_factory(max_workers=2) as w:
list(w.map(_process_tags, chunks(target_groups, 20)))
@AppELBTargetGroup.action_registry.register('mark-for-op')
class AppELBTargetGroupMarkForOpAction(tags.TagDelayedAction):
"""Action to specify a delayed action on an ELB target group"""
@AppELBTargetGroup.action_registry.register('tag')
class AppELBTargetGroupTagAction(tags.Tag):
"""Action to create tag/tags on an ELB target group
:example:
.. code-block:: yaml
policies:
- name: appelb-targetgroup-add-required-tag
resource: app-elb-target-group
filters:
- "tag:RequiredTag": absent
actions:
- type: tag
key: RequiredTag
value: RequiredValue
"""
batch_size = 1
permissions = ("elasticloadbalancing:AddTags",)
def process_resource_set(self, client, resource_set, ts):
client.add_tags(
ResourceArns=[tgroup['TargetGroupArn'] for tgroup in resource_set],
Tags=ts)
@AppELBTargetGroup.action_registry.register('remove-tag')
class AppELBTargetGroupRemoveTagAction(tags.RemoveTag):
"""Action to remove tag/tags from ELB target group
:example:
.. code-block:: yaml
policies:
- name: appelb-targetgroup-remove-expired-tag
resource: app-elb-target-group
filters:
- "tag:ExpiredTag": present
actions:
- type: remove-tag
tags: ["ExpiredTag"]
"""
batch_size = 1
permissions = ("elasticloadbalancing:RemoveTags",)
def process_resource_set(self, client, resource_set, tag_keys):
client.remove_tags(
ResourceArns=[tgroup['TargetGroupArn'] for tgroup in resource_set],
TagKeys=tag_keys)
@AppELBTargetGroup.filter_registry.register('default-vpc')
class AppELBTargetGroupDefaultVpcFilter(DefaultVpcBase):
"""Filter all application elb target groups within the default vpc
:example:
.. code-block:: yaml
policies:
- name: appelb-targetgroups-default-vpc
resource: app-elb-target-group
filters:
- default-vpc
"""
schema = type_schema('default-vpc')
def __call__(self, target_group):
return (target_group.get('VpcId') and
self.match(target_group.get('VpcId')) or False)
@AppELBTargetGroup.action_registry.register('delete')
class AppELBTargetGroupDeleteAction(BaseAction):
"""Action to delete ELB target group
It is recommended to apply a filter to the delete policy to avoid unwanted
deletion of any app elb target groups.
:example:
.. code-block:: yaml
policies:
- name: appelb-targetgroups-delete-unused
resource: app-elb-target-group
filters:
- "tag:SomeTag": absent
actions:
- delete
"""
schema = type_schema('delete')
permissions = ('elasticloadbalancing:DeleteTargetGroup',)
def process(self, resources):
client = local_session(self.manager.session_factory).client('elbv2')
for tg in resources:
self.process_target_group(client, tg)
def process_target_group(self, client, target_group):
self.manager.retry(
client.delete_target_group,
TargetGroupArn=target_group['TargetGroupArn'])
```
|
{
"source": "jeremyplichta/pi-halloween",
"score": 2
}
|
#### File: jeremyplichta/pi-halloween/speaker.py
```python
import threading
import logging
from os import listdir
from os.path import isfile, join
import random
import time
import pexpect
import sys
logging.basicConfig(level=logging.DEBUG,
format='(%(threadName)-10s) %(asctime)s %(message)s',
)
class Speaker(threading.Thread):
def __init__(self, group=None, target=None, name=None,
args=(), kwargs=None, verbose=None):
threading.Thread.__init__(self, group=group, target=target, name=name,
verbose=verbose)
self.args = args
self.sounddir = args[0]
self.doneplaying = args[1]
self.shouldstop = threading.Event()
self.shouldplay = threading.Event()
self.kwargs = kwargs
return
def stop(self):
self.shouldstop.set()
self.shouldplay.set()
def play(self):
self.shouldplay.set()
def cancel(self):
self.aplay.terminate()
def getfiletoplay(self, mypath):
onlyfiles = [ join(mypath,f) for f in listdir(mypath) if isfile(join(mypath,f)) and not '.DS_Store' in f ]
return random.choice(onlyfiles)
def playfile(self, playfile):
playfile = playfile.replace(' ', '\ ')
logging.debug('Playing file: {}'.format(playfile))
self.aplay = pexpect.spawn(command='aplay -D bluetooth {}'.format(playfile), logfile=sys.stdout)
self.aplay.logfile = sys.stdout
status = self.aplay.wait()
logging.debug('Done playing file: {} ({})'.format(playfile, status))
self.shouldplay.clear()
self.doneplaying.set()
def run(self):
logging.debug('running with %s and %s', self.args, self.kwargs)
while True:
self.shouldplay.wait()
if self.shouldstop.isSet():
logging.debug('exiting play thread')
return
playfile = self.getfiletoplay(self.sounddir)
self.playfile(playfile)
```
|
{
"source": "JeremyPorquez/CRASHyperspectralAnalysis",
"score": 2
}
|
#### File: CRASHyperspectralAnalysis/HSA/__init__.py
```python
from .HSAGUI.main import Ui_MainWindow
from . import pyqtgraph as pg
from .pyqtgraph import QtCore
from .pyqtgraph import QtGui as QtWidgets
from . import tiff
from . import svd
from . import ramancsv
from . import mplcanvas
from . import CARS
from . import rgb
from scipy.interpolate import interp1d
from multiprocessing.pool import ThreadPool
import os
import numpy as np
import pandas as pd
class HSA(object):
class Signal(QtCore.QObject):
image_loaded = QtCore.pyqtSignal()
applying_ansc_transform = QtCore.pyqtSignal()
applied_ansc_transform = QtCore.pyqtSignal()
setting_ansc_transform = QtCore.pyqtSignal()
set_ansc_transform = QtCore.pyqtSignal()
update_svd_image = QtCore.pyqtSignal()
def __init__(self):
self.signal = self.Signal()
self.createUi()
self._reinit()
def _reinit(self):
self.data = None
self.raman_index = None
self.cars_index = None
self.cars_intensity = None
self.bg_index = None
self.bg_intensity = None
self.retrieved_index = None
self.retrieved_intensity = None
self.new_image_loaded = False
def createUi(self):
self.mainWindow = QtWidgets.QMainWindow()
self.ui = Ui_MainWindow()
self.ui.setupUi(self.mainWindow)
self.setupSignals()
self.createPgItems()
self.createMplItems()
self.setupButtons()
self.ui.tabWidget.setCurrentIndex(0)
self.mainWindow.show()
def setupSignals(self):
def info(message, timeout=0):
self.ui.statusbar.showMessage(message, timeout)
self.signal.applying_ansc_transform.connect(lambda: info('Applying Anscombe-SVD filter'))
self.signal.setting_ansc_transform.connect(lambda: info('Setting Anscombe-SVD filter value'))
self.signal.set_ansc_transform.connect(lambda: info('Anscombe-SVD filter value set'))
self.signal.applied_ansc_transform.connect(lambda: info('Anscombe-SVD filter applied'))
self.signal.image_loaded.connect(lambda: info('Image Loaded'))
self.signal.update_svd_image.connect(self.update_SVDPgImage)
self.ui.rgb_comboBox.currentIndexChanged.connect(self.update_rgb_images)
info('Hyperspectral Image c/o JGPorquez')
def createPgItems(self):
self.image_tiff = pg.ImageView()
self.image_svd = pg.ImageView()
self.ui.pglayout.addWidget(self.image_tiff)
self.ui.svdLayout.addWidget(self.image_svd)
self.image_tiff.timeLine.sigPositionChanged.connect(
lambda: self.update_pgimage_position(self.image_tiff,
self.ui.tiff_position_doubleSpinBox))
self.image_svd.timeLine.sigPositionChanged.connect(
lambda: self.update_pgimage_position(self.image_svd,
self.ui.svd_position_doubleSpinBox))
## create widgets for rgb tab
self.rgb_image = rgb.ContrastImage(calculation_mode=self.ui.rgb_comboBox.currentText())
self.ui.rgb_pglayout1_1.addWidget(self.rgb_image.r[0])
self.ui.rgb_pglayout1_2.addWidget(self.rgb_image.r[1])
self.ui.rgb_pglayout1_3.addWidget(self.rgb_image.r[2])
self.ui.rgb_pglayout2_1.addWidget(self.rgb_image.g[0])
self.ui.rgb_pglayout2_2.addWidget(self.rgb_image.g[1])
self.ui.rgb_pglayout2_3.addWidget(self.rgb_image.g[2])
self.ui.rgb_pglayout3_1.addWidget(self.rgb_image.b[0])
self.ui.rgb_pglayout3_2.addWidget(self.rgb_image.b[1])
self.ui.rgb_pglayout3_3.addWidget(self.rgb_image.b[2])
self.ui.rgb_pglayout_rgb.addWidget(self.rgb_image.rgb)
def createMplItems(self):
self.mplPlot = mplcanvas.MplCanvas(self.mainWindow)
self.mplPlot.createPlot()
self.ui.ramanRetrievalLayout.addWidget(self.mplPlot)
self.navi_toolbar = mplcanvas.NavigationToolbar(self.mplPlot, self.mainWindow)
self.ui.ramanRetrievalLayout.addWidget(self.navi_toolbar)
def setupButtons(self):
self.ui.openTiff.clicked.connect(self.open_tiff)
self.ui.saveTiffROI.clicked.connect(lambda: self.save_roi(self.image_tiff))
self.ui.setTiffROItoCARS.clicked.connect(lambda: self.set_roi_as_cars(self.image_tiff))
self.ui.setTiffROItoBG.clicked.connect(lambda: self.set_roi_as_background(self.image_tiff))
self.ui.openWN.clicked.connect(lambda: self.open_wn(None))
self.ui.applySVD.clicked.connect(self.apply_svd)
self.ui.saveSVD.clicked.connect(self.save_svd)
self.ui.saveSVD_all.clicked.connect(self.save_svd_all)
self.ui.saveSVDROI.clicked.connect(lambda: self.save_roi(self.image_svd))
self.ui.setSVDValue.clicked.connect(lambda: self.set_svd_value())
self.ui.setSVDROItoCARS.clicked.connect(lambda: self.set_roi_as_cars(self.image_svd))
self.ui.setSVDROItoBG.clicked.connect(lambda: self.set_roi_as_background(self.image_svd))
self.ui.openBackground.clicked.connect(lambda: self.open_background(None))
self.ui.openCARSIntensity.clicked.connect(lambda: self.open_cars(None))
self.ui.applyRetrieval.clicked.connect(self.apply_retrieval)
self.ui.saveRetrieved.clicked.connect(self.save_retrieved)
self.ui.tiff_position_doubleSpinBox.valueChanged.connect(
lambda: self.set_pgimage_position(self.image_tiff,
self.ui.tiff_position_doubleSpinBox))
self.ui.svd_position_doubleSpinBox.valueChanged.connect(
lambda: self.set_pgimage_position(self.image_svd,
self.ui.svd_position_doubleSpinBox))
self.ui.load_rgb_raw.clicked.connect(self.open_rgb_raw)
self.ui.load_rgb_svd.clicked.connect(self.open_rgb_svd)
self.ui.save_rgb_1.clicked.connect(lambda: self.save_rgb(1))
self.ui.save_rgb_2.clicked.connect(lambda: self.save_rgb(2))
self.ui.save_rgb_3.clicked.connect(lambda: self.save_rgb(3))
self.ui.save_rgb_rgb.clicked.connect(lambda: self.save_rgb('rgb'))
def loadFiles(self):
idx = 0
for file in self.filenames:
fname, ext = os.path.splitext(file)
if any(x in ext for x in ('tiff','tif')):
self.filename_tiff = file
data = tiff.imread(file)
self.data = svd.Image(data)
idx += 1
if any(x in ext for x in ('csv')):
self.open_wn(file)
return self.data
def open_tiff(self):
fileDialog = QtWidgets.QFileDialog()
fileDialog.setFileMode(QtWidgets.QFileDialog.ExistingFiles)
filter = "TIFF (*.tiff);;TIF (*.tif)"
defaultDirectory = os.path.join(os.path.join(os.path.expanduser('~')), 'Desktop')
files, filter = fileDialog.getOpenFileNames(QtWidgets.QWidget(), "Open files")
self.filenames = files
if len(self.filenames) > 0:
# self._reinit()
self.loadFiles()
self.update_pgimage(self.image_tiff,self.data.raw_image)
z,y,x = self.data.shape
bitsize = self.data.dtype.name
image_info_text = "{} {}x{}x{}".format(bitsize,z,x,y)
self.ui.image_info_label.setText(image_info_text)
def open_wn(self, file=None):
if file is None:
fileDialog = QtWidgets.QFileDialog()
fileDialog.setFileMode(QtWidgets.QFileDialog.ExistingFile)
# defaultDirectory = os.path.join(os.path.join(os.path.expanduser('~')), 'Desktop')
file, filter = fileDialog.getOpenFileName(QtWidgets.QWidget(), "Open file")
if file == '':
return None
wn_dataframe = pd.read_csv(file)
self.raman_index = ramancsv.getRamanIndex(wn_dataframe)
if self.data is not None:
self.update_pgimage(self.image_tiff,self.data.raw_image)
self.update_pgimage(self.image_svd,self.data.svd_image)
def open_background(self,file=None,col=1):
if file is None:
fileDialog = QtWidgets.QFileDialog()
fileDialog.setFileMode(QtWidgets.QFileDialog.ExistingFile)
file, filter = fileDialog.getOpenFileName(QtWidgets.QWidget(),"Open file")
fname, ext = os.path.splitext(file)
if fname == '':
return None
if 'csv' in ext:
background = pd.read_csv(file)
if any(x in ext for x in ('xls', 'xlsx')):
background = pd.read_excel(file)
if 'Y' in background.columns:
bg_intensity = background.Y
else:
bg_intensity = background[background.columns[col]].values
if 'Raman' in background.columns:
index = background.Raman.values
elif 'X' in background.columns:
index = background.X.values
else:
index = background.index
self.bg_index = index
self.bg_intensity = bg_intensity
self.plot_background()
def open_cars(self,file=None,col=1):
if file is None:
fileDialog = QtWidgets.QFileDialog()
fileDialog.setFileMode(QtWidgets.QFileDialog.ExistingFile)
file, filter = fileDialog.getOpenFileName(QtWidgets.QWidget(),"Open file")
fname, ext = os.path.splitext(file)
if fname == '':
return None
if 'csv' in ext:
cars = pd.read_csv(file)
if any(x in ext for x in ('xls','xlsx')):
cars = pd.read_excel(file)
if 'Y' in cars.columns:
cars_intensity= cars.Y.values
else:
cars_intensity = cars[cars.columns[col]].values
if 'Raman' in cars.columns:
index = cars.Raman.values
elif 'X' in cars.columns:
index = cars.X.values
else:
index = cars.index
self.cars_index = index
self.cars_intensity = cars_intensity
self.plot_cars()
def open_rgb_raw(self):
self.rgb_image.set_image(self.data.raw_image)
def open_rgb_svd(self):
self.rgb_image.set_image(self.data.svd_image)
def plot_background(self):
self.mplPlot.plots[0].set_data(self.bg_index, self.bg_intensity)
self.mplPlot.axes[0].set_xlim(self.bg_index.min(), self.bg_index.max())
self.mplPlot.axes[0].set_ylim(self.bg_intensity.min(), self.bg_intensity.max())
self.mplPlot.draw()
def plot_cars(self):
self.mplPlot.plots[1].set_data(self.cars_index, self.cars_intensity)
self.mplPlot.axes[1].set_xlim(self.cars_index.min(), self.cars_index.max())
self.mplPlot.axes[1].set_ylim(self.cars_intensity.min(), self.cars_intensity.max())
self.mplPlot.draw()
def plot_retrieved(self):
self.mplPlot.plots[2].set_data(self.retrieved_index, self.retrieved_intensity)
self.mplPlot.axes[2].set_xlim(self.retrieved_index.min(), self.retrieved_index.max())
self.mplPlot.axes[2].set_ylim(self.retrieved_intensity.min(), self.retrieved_intensity.max())
self.mplPlot.draw()
def apply_svd(self):
if self.data.raw_image is not None:
self.signal.applying_ansc_transform.emit()
singular_values = self.ui.singularValues_spinBox.value()
self.data.calc_svd(singular_values=singular_values,signal=self.signal.update_svd_image)
# self.update_SVDPgImage()
def apply_retrieval(self):
if (self.bg_intensity is None) & (self.cars_intensity is None):
return None
if len(self.bg_intensity) != len(self.cars_intensity):
densify = True
print('NRB and CARS have different shapes, applying interpolation')
#todo : need to detect whether bg_index and cars_index are similar
if densify:
x_min = np.min([self.bg_index.min(), self.cars_index.min()])
x_max = np.max([self.bg_index.max(), self.cars_index.max()])
numberOfPoints = self.ui.densify_spinbox.value()
x_values = np.linspace(x_min,x_max, numberOfPoints)
f_cars = interp1d(self.cars_index,self.cars_intensity, fill_value='extrapolate')
f_bg = interp1d(self.bg_index, self.bg_intensity, fill_value='extrapolate')
cars_intensity = f_cars(x_values)
bg_intensity = f_bg(x_values)
else:
cars_intensity = self.cars_intensity
bg_intensity = self.bg_intensity
smoothness_exponent = self.ui.smoothness_spinbox.value()
smoothness = 10**smoothness_exponent
asymmetry_exponent = self.ui.asymmetry_spinbox.value()
asymmetry = 10**asymmetry_exponent
savgol_window = self.ui.savgol_window_retr_spinbox.value()
try :
self.retrieved_intensity = CARS.getCorrectedCARSPhase(I_CARS=cars_intensity,
I_REF=bg_intensity,
SMOOTHNESS_PARAM=smoothness,
ASYM_PARAM=asymmetry,
SAVGOL_WINDOW=savgol_window)
self.retrieved_index = x_values
self.plot_retrieved()
except Exception as e:
print(e)
def apply_img_retrieval(self):
if (self.bg_intensity is None) & (self.data.raw_image is None):
return None
smoothness_exponent = self.ui.smoothness_spinbox.value()
smoothness = 10**smoothness_exponent
asymmetry_exponent = self.ui.asymmetry_spinbox.value()
asymmetry = 10**asymmetry_exponent
savgol_window = self.ui.savgol_window_retr_spinbox.value()
img = self.data.image
self.retrieved_image = CARS.getCorrectedCARSPhaseImage(img,
I_REF=self.bg_intensity,
SMOOTHNESS_PARAM=smoothness,
ASYM_PARAM=asymmetry,
SAVGOL_WINDOW=savgol_window)
# self.update_pgimage()
def save_roi(self,imageView):
assert isinstance(imageView, pg.ImageView)
if len(imageView.roiCurves) == 0:
return None
fileDialog = QtWidgets.QFileDialog()
filter = "CSV (*.csv)"
file, filt = fileDialog.getSaveFileName(QtWidgets.QWidget(), "Save CSV", filter=filter)
roiCurve = imageView.roiCurves[0]
x,y = roiCurve.xData, roiCurve.yData
try :
df = pd.DataFrame(y,index=x, columns=['Y'])
df.index.name = 'X'
df.to_csv(file)
except Exception as e:
print('Error in saving ROI : {}'.format(e))
def save_svd(self):
if self.data.svd_image is not None:
filter = "TIF (*.tif)"
fileDialog = QtWidgets.QFileDialog()
file, filter = fileDialog.getSaveFileName(QtWidgets.QWidget(), "Save svd tiff", filter=filter)
tiff.imsave(file,self.data.svd_image)
def save_svd_all(self):
if self.data.svd_image is not None:
fileDialog = QtWidgets.QFileDialog()
saveDirectory = fileDialog.getExistingDirectory()
singular_values = self.ui.singularValues_spinBox.value()
def save_sv():
for sv in range(-1, singular_values + 1):
print('Saving singular value : {}'.format(sv))
self.data_svd.calc_svd_single(sv)
image = self.data.svd_image_single
if sv == -1:
filename = 'svd_full.tif'
else:
filename = 'svd_{0:0>3}.tif'.format(sv)
filename = os.path.join(saveDirectory,filename)
tiff.imsave(filename,image)
pool = ThreadPool()
pool.apply_async(save_sv)
def save_retrieved(self):
if self.retrieved_intensity is None:
return None
fileDialog = QtWidgets.QFileDialog()
filter = "CSV (*.csv)"
file, filt = fileDialog.getSaveFileName(QtWidgets.QWidget(), "Save CSV", filter=filter)
try :
df = pd.DataFrame(self.retrieved_intensity, index=self.retrieved_index, columns=['Y'])
df.index.name = 'X'
df.to_csv(file)
except Exception as e:
print('Error in saving ROI : {}'.format(e))
def save_rgb(self,content=1):
filter = "TIF (*.tif)"
fileDialog = QtWidgets.QFileDialog()
file, filter = fileDialog.getSaveFileName(QtWidgets.QWidget(), "Save rgb {} tiff".format(content), filter=filter)
if content == 1:
tiff.imsave(file, self.rgb_image.r[2].image.astype(np.uint16))
elif content == 2:
tiff.imsave(file, self.rgb_image.g[2].image.astype(np.uint16))
elif content == 3:
tiff.imsave(file, self.rgb_image.b[2].image.astype(np.uint16))
elif content == 'rgb':
tiff.imsave(file, self.rgb_image.rgb.image.astype(np.uint16))
def set_roi_as_cars(self,imageView):
assert isinstance(imageView, pg.ImageView)
if len(imageView.roiCurves) == 0:
return None
roiCurve = imageView.roiCurves[0]
x,y = roiCurve.xData, roiCurve.yData
self.cars_index = x
self.cars_intensity = y
self.plot_cars()
def set_roi_as_background(self,imageView):
assert isinstance(imageView, pg.ImageView)
if len(imageView.roiCurves) == 0:
return None
roiCurve = imageView.roiCurves[0]
x,y = roiCurve.xData, roiCurve.yData
self.bg_index = x
self.bg_intensity = y
self.plot_background()
def set_svd_value(self, singular_value=None, updateImage=True):
if self.data is None:
return None
if singular_value is None:
singular_value = self.ui.singularValue_spinBox.value()
self.signal.setting_ansc_transform.emit()
self.data.calc_svd_single(singular_value)
if updateImage:
self.update_SVDPgImage(self.data.svd_image_single)
self.signal.set_ansc_transform.emit()
def set_pgimage_position(self, imageView, doubleSpinBox):
if (not isinstance(imageView, pg.ImageView)) & (not isinstance(doubleSpinBox, QtWidgets.QDoubleSpinBox)):
return None
new_value = doubleSpinBox.value()
current_index = imageView.currentIndex
new_index = np.argmin(np.abs(new_value - imageView.tVals))
current_value = np.round(imageView.tVals[current_index], 2)
if current_index == new_index:
if new_value > current_value:
new_index += 1
elif new_value < current_value:
new_index -= 1
try:
imageView.setCurrentIndex(new_index)
except Exception as e:
print(e)
def update_pgimage(self,imageView,data):
assert isinstance(imageView, pg.ImageView)
if data is not None:
assert isinstance(data, np.ndarray)
raman_index = None
if self.raman_index is not None:
if data.shape[0] == len(self.raman_index):
raman_index = self.raman_index
imageView.setImage(np.swapaxes(data,1,2),
xvals=raman_index,
autoLevels=True
)
imageView.autoRange()
self.signal.image_loaded.emit()
def update_pgimage_position(self, imageview, doubleSpinBox):
if (not isinstance(imageview, pg.ImageView)) & (not isinstance(doubleSpinBox, QtWidgets.QDoubleSpinBox)):
return None
value = imageview.timeLine.value()
doubleSpinBox.setValue(value)
def update_rgb_images(self):
self.rgb_image.set_calculation_mode(self.ui.rgb_comboBox.currentText())
def update_SVDPgImage(self,image=None):
if self.data.svd_image is not None:
if image is None:
image = self.data.svd_image
raman_index = None
if self.raman_index is not None:
if image.shape[0] == len(self.raman_index):
raman_index = self.raman_index
self.image_svd.setImage(np.swapaxes(image, 1, 2),
xvals=raman_index,
autoLevels=True
)
self.image_svd.autoRange()
self.signal.applied_ansc_transform.emit()
```
#### File: HSA/svd/__init__.py
```python
import numpy as np
from multiprocessing.pool import ThreadPool
def anscombe(data):
return 2 * np.sqrt(data + 3. / 8)
def inverse_anscombe(data):
return (data / 2.) ** 2 - 3. / 8
def svd_filter(data, full_matrices=False, singular_values=10, full_result=False):
'''
:param data: 3D numpy matrix with shape [spectra, y, x]
:param full_matrices: bool, optional
performs full SVD or reduced SVD
:param singular_values: int, optional
number of first singular values to retain
:param full_result: bool, optional
returns SVD filtered image, U, S, V
else returns SVD filtered image
:return:
SVD filtered image (optional: U, S, V with full_result = True)
'''
if not type(data) == np.ndarray:
data = np.array(data)
assert len(data.shape) == 3, 'data must be in 3-D'
assert type(singular_values) == int
z, y, x = data.shape
svd_data = data.reshape((z,y*x))
U, s, V = np.linalg.svd(svd_data,full_matrices=full_matrices)
s_approx = s.copy()
s_approx[singular_values:] = 0
filtered_image = np.dot(U, np.dot(np.diag(s_approx),V))
filtered_image = filtered_image.reshape((z, y, x))
if full_result:
return filtered_image, (U, s, V)
else:
return filtered_image
def calc_svd(usv,shape=(3,100,100),value=0,singular_values=10):
U, s, V = usv
if value == -1:
s_approx = s.copy()
s_approx[singular_values:] = 0
else:
s_approx = np.zeros(s.shape)
s_approx[value] = s[value]
filtered_image = np.dot(U, np.dot(np.diag(s_approx),V))
filtered_image = filtered_image.reshape(shape)
return filtered_image
class Image(object):
def __init__(self, data):
assert isinstance(data, np.ndarray)
self.raw_image = data
self.shape = data.shape
self.dtype = data.dtype
self.svd_image = None
self.svd_image_single = None
self.usv = None
self.singular_values = None
def calc_svd(self, singular_values=None, anscombe_results=True, signal=None):
if singular_values is None:
if self.singular_values is None:
singular_values = 10
else:
singular_values = self.singular_values
pool = ThreadPool()
svd_image = self.raw_image
def process(svd_image, full_matrices, singular_values, full_result, signal):
if anscombe_results:
svd_image = anscombe(svd_image)
svd_image, self.usv = svd_filter(svd_image, full_matrices, singular_values, full_result)
if anscombe_results:
svd_image = inverse_anscombe(svd_image)
self.svd_image = svd_image
signal.emit()
args = (svd_image, False, singular_values, True,signal)
pool.apply_async(process, args)
def calc_svd_single(self,singular_value=-1):
if self.usv is None:
self.calc_svd()
pool = ThreadPool(processes=1)
args = (self.usv, self.shape, singular_value, self.singular_values)
svd_image_single = pool.apply_async(calc_svd, args)
self.svd_image_single = svd_image_single.get()
min_val = self.svd_image_single.min()
if min_val < 0:
self.svd_image_single -= min_val
return self.svd_image_single
```
|
{
"source": "jeremyprice/PythonForDevs",
"score": 4
}
|
#### File: PythonForDevs/DemoProgs/generatordecimals.py
```python
def frange(start, stop, step):
i = start
while i < stop:
yield i
i = i + step
for x in frange(0.5, 1.0, 0.1):
print x
print
from decimal import Decimal
for x in frange(Decimal('0.5'), Decimal('1.0'), Decimal('0.1')):
print x
```
#### File: PythonForDevs/DemoProgs/lcl_glbl.py
```python
def lcl_tst(a, b):
z = a + b
print dir(), '\n'
x = 'changed'
print 'x =', x, '\n'
return
x = 'string1'
y = 'string2'
lngint = 1234567890123456789
print 'Globals -', dir(), '\n'
lcl_tst(x, y)
print 'x =', x
```
#### File: PythonForDevs/DemoProgs/varykwargs.py
```python
def gets(**kwargs):
print type(kwargs), len(kwargs), '\n', kwargs
for i in kwargs:
print i, kwargs[i], type(kwargs[i])
x = 12.34
y = 'string'
gets(a=1234, b=x+1, c=y+'s', d=12.34)
```
|
{
"source": "jeremy-quicklearner/advent-of-code",
"score": 3
}
|
#### File: 2020/11/11.py
```python
import copy
with open('seats.txt') as fh:
lines = fh.readlines()
grid = [[{'.':None,'L':False}[c] for c in l.strip()] for l in lines]
for row in grid:
row.append(None)
row.insert(0, None)
grid.insert(0, [None for c in grid[0]])
grid.append([None for c in grid[0]])
def iterate1(g):
ng = copy.deepcopy(g)
for row in range(len(g)):
for col in range(len(g[row])):
if g[row][col] is not None:
c = g[row][col]
n = 0
if g[row-1][col-1]:
n += 1
if g[row-1][col+1]:
n += 1
if g[row+1][col-1]:
n += 1
if g[row+1][col+1]:
n += 1
if g[row][col-1]:
n += 1
if g[row][col+1]:
n += 1
if g[row-1][col]:
n += 1
if g[row+1][col]:
n += 1
if c == False and n == 0:
ng[row][col] = True
elif c == True and n >= 4:
ng[row][col] = False
return ng
def traverse(g, startrow, startcol, rowstep, colstep):
row = startrow + rowstep
col = startcol + colstep
while True:
if row < 0 or col < 0 or row >= len(g) or col >= len(g[row]):
return False
if g[row][col] is not None:
return g[row][col]
row = row + rowstep
col = col + colstep
def iterate2(g):
ng = copy.deepcopy(g)
for row in range(len(g)):
for col in range(len(g[row])):
if g[row][col] is not None:
c = g[row][col]
n = 0
if traverse(g,row,col,-1,-1):
n += 1
if traverse(g,row,col,-1, 1):
n += 1
if traverse(g,row,col, 1,-1):
n += 1
if traverse(g,row,col, 1, 1):
n += 1
if traverse(g,row,col, 0,-1):
n += 1
if traverse(g,row,col, 0, 1):
n += 1
if traverse(g,row,col,-1, 0):
n += 1
if traverse(g,row,col, 1, 0):
n += 1
if c == False and n == 0:
ng[row][col] = True
elif c == True and n >= 5:
ng[row][col] = False
return ng
def equals(g, ng):
for row in range(len(g)):
for col in range(len(g[row])):
if g[row][col] != ng[row][col]:
return False
return True
def count(g):
acc = 0
for row in range(len(g)):
for col in range(len(g[row])):
if g[row][col]:
acc += 1
return acc
while(True):
newgrid = iterate2(grid)
if equals(grid, newgrid):
break
grid = newgrid
print(count(grid))
```
#### File: 2020/14/14.py
```python
import copy
with open('instr.txt') as fh:
lines = fh.readlines()
mem = {}
mask = '000000000000000000000000000000000000'
for line in lines:
if line[:4] == 'mask':
mask = line.strip().split()[2]
if line[:3] == 'mem':
addr = int(line.strip().split(' ')[0].split('[')[1][:-1])
val = int(line.strip().split(' ')[2])
mval = ''.join([{'0':'0','1':'1','X':v}[m] for (m,v) in zip(mask,'{:036b}'.format(val))])
mem[addr] = mval
print(sum([int(mem[addr],2) for addr in mem]))
mem = {}
mask = '000000000000000000000000000000000000'
def variants(maddr):
if maddr.count('X') == 0:
return [maddr]
idx = maddr.index('X')
variant0, variant1 = copy.copy(maddr), copy.copy(maddr)
variant0[idx] = '0'
variant1[idx] = '1'
return variants(variant0) + variants(variant1)
for line in lines:
if line[:4] == 'mask':
mask = line.strip().split()[2]
if line[:3] == 'mem':
addr = int(line.strip().split(' ')[0].split('[')[1][:-1])
val = int(line.strip().split(' ')[2])
maddr = [{'0':a,'1':'1','X':'X'}[m] for (m,a) in zip(mask,'{:036b}'.format(addr))]
for variant in variants(maddr):
mem[int(''.join(variant),2)] = val
print(sum([mem[addr] for addr in mem]))
```
#### File: 2020/20/20.py
```python
import copy
import random
with open('tiles.txt') as fh:
lines = fh.readlines()
tilestrs = ''.join(lines).split('\n\n')
tilestrs = {int(t.split('\n')[0][5:9]):'\n'.join(t.strip().split('\n')[1:]) for t in tilestrs}
tiles = {}
for tilekey,tilestr in tilestrs.items():
tile = []
for rowstr in tilestr.split('\n'):
tile.append(rowstr.strip())
tiles[tilekey] = tile
def ptile(tile):
print('\n'.join([' '.join(r) for r in tile]))
def vreflect(tile):
return [t for t in list(reversed(tile))]
def hreflect(tile):
return [list(reversed(t)) for t in tile]
def rotate(tile, degree):
ttile = tile
res = ttile
while degree > 0:
res = [['' for c in range(len(ttile))] for r in range(len(ttile[0]))]
for row in range(len(ttile[0])):
for col in range(len(ttile)):
res[row-1][col] = ttile[col][-row]
ttile = res
degree -= 1
return res
def transform(tile, vref, href, rot):
ttile = tile
if vref:
ttile = vreflect(ttile)
if href:
ttile = hreflect(ttile)
if rot:
ttile = rotate(ttile, rot)
return ttile
def memohash(vref, href, rot):
return (100 if vref else 0) + (10 if href else 0) + rot
memo = {}
def memoget(id, vref, href, rot):
if id not in memo:
return None
return memo[id].get(memohash(vref, href, rot), None)
def memoset(id, vref, href, rot, tile):
if id not in memo:
memo[id] = {}
memo[id][memohash(vref, href, rot)] = tile
def variants(id):
vars = []
for vref in [False,True]:
for href in [False,True]:
for rot in range(0,4):
v = memoget(id, vref, href, rot)
if not v:
v = transform(tiles[id], vref, href, rot)
memoset(id, vref, href, rot, v)
vars.append((id,vref,href,rot))
return vars
def fit(tile, othertile, pos):
# Pos = 0 -> other is to the right
# Pos = 1 -> other is above
# Pos = 2 -> other is to the left
# Pos = 3 -> other is below
if pos == 0:
edge = [r[-1] for r in tile]
otheredge = [r[0] for r in othertile]
if pos == 1:
edge = tile[0]
otheredge = othertile[-1]
if pos == 2:
edge = [r[0] for r in tile]
otheredge = [r[-1] for r in othertile]
if pos == 3:
edge = tile[-1]
otheredge = othertile[0]
for (e,o) in zip(edge,otheredge):
if e != o:
return False
return True
def memofithash(memotile, othermemotile, pos):
return str(memotile) + str(othermemotile) + str(pos)
memofitd = {}
def memofit(memotile, othermemotile, pos):
mfh = memofithash(memotile, othermemotile, pos)
if mfh not in memofitd:
memofitd[mfh] = fit(memoget(*memotile),memoget(*othermemotile),pos)
return memofitd[mfh]
# I counted 144 tiles, so it's a 12x12 square. If we use one of the corners as
# the starting point, then we need enough room for the whole puzzle to fill one
# quadrant. So use a 23x23 grid. For algorithmic simplicity, add an extra border
# slots around the edge
grid = [[None for _ in range(25)] for _ in range(25)]
pool = list(tiles.keys())
random.shuffle(list(reversed(pool)))
# Arbitrarily select tile 1669 as the starting point, with no transformations
grid[12][12] = (1669,0,0,0)
pool.remove(1669)
variants(1669)
def solve():
for row in range(len(grid)):
for col in range(len(grid)):
print(('[' + str(grid[row][col][0]) + ']' if grid[row][col] else '......'), end='')
print('')
print(pool)
for row in range(1, len(grid) - 1):
for col in range(1, len(grid[row]) - 1):
# If cell is already filled, we can't add a tile to it
if grid[row][col]:
continue
# If no neighbours are filled, don't waste time on this cell.
# This is the part that benefits from the extra border
right = grid[row][col+1]
above = grid[row-1][col]
left = grid[row][col-1]
below = grid[row+1][col]
if not right and not above and not left and not below:
continue
# Try all variants of all tiles from the pool
for id in pool:
for variant in variants(id):
if right and not memofit(variant, right, 0):
continue
if above and not memofit(variant, above, 1):
continue
if left and not memofit(variant, left, 2):
continue
if below and not memofit(variant, below, 3):
continue
# Found a variant that works. Remove from the pool, add to the
# grid, and recurse
idx = pool.index(id)
pool.remove(id)
grid[row][col] = variant
solve()
# If the pool is empty after recursing, we have a solution.
if not pool:
return
# Otherwise the solve failed and we are backtracking. Try
# the next variant.
grid[row][col] = None
pool.insert(idx,id)
solve()
for id,variants in memo.items():
for mh,variant in variants.items():
pruned = copy.deepcopy(variant)
pruned = pruned[1:-1]
pruned = [p[1:-1] for p in pruned]
memo[id][mh] = pruned
minrow = 0
for (idx,row) in enumerate(grid):
filled = 0
for cell in row:
if cell:
filled = 1
break
if filled:
minrow = idx
break
maxrow = 0
for (idx,row) in reversed(list(enumerate(grid))):
filled = 0
for cell in row:
if cell:
filled = 1
break
if filled:
maxrow = idx
break
mincol = 0
for (idx,cell) in enumerate(grid[minrow]):
if cell:
mincol = idx
break
maxcol = 0
for (idx,cell) in reversed(list(enumerate(grid[maxrow]))):
if cell:
maxcol = idx
break
trimmedgrid = grid[minrow:maxrow+1]
for idx,row in enumerate(trimmedgrid):
trimmedgrid[idx] = row[mincol:maxcol+1]
imagetiles = [[memoget(*c) for c in r] for r in trimmedgrid]
image = []
for tilerow in imagetiles:
for subrowidx in range(8):
subrow = []
for tile in tilerow:
subrow += tile[subrowidx]
image.append(subrow)
monsterimg = [list(' # '),
list('# ## ## ###'),
list(' # # # # # # ')]
monstervariants = []
for vref in [False,True]:
for href in [False,True]:
for rot in range(0,4):
monstervariants.append(transform(monsterimg, vref, href, rot))
for mvar in monstervariants:
for mrow in (mvar):
print(''.join(mrow))
print('')
inmonster = [[False for _ in r] for r in image]
def checkmonster(row, col, monster):
if row + len(monster) > len(image):
return False
if col + len(monster[0]) > len(image[row]):
return False
for mrow in range(len(monster)):
for mcol in range(len(monster[mrow])):
if monster[mrow][mcol] == '#' and image[row+mrow][col+mcol] != '#':
return False
return True
for row in range(len(image)):
for col in range(len(image[row])):
for mvar in monstervariants:
if checkmonster(row, col, mvar):
for mrow in range(len(mvar)):
for mcol in range(len(mvar[mrow])):
if mvar[mrow][mcol] == '#':
inmonster[row+mrow][col+mcol] = True
print('\n'.join([' '.join(r) for r in image]))
print('\n'.join(' '.join([{True:'#',False:' '}[c] for c in r]) for r in inmonster))
monstercount = 0
nonmonstercount = 0
for row in range(len(image)):
for col in range(len(image)):
if image[row][col] != '#':
continue
if inmonster[row][col]:
monstercount += 1
else:
nonmonstercount += 1
print(nonmonstercount)
```
#### File: 2020/22/22.py
```python
import copy
with open('cards.txt') as fh:
lines = fh.readlines()
lines = ''.join(lines)
deck1, deck2 = lines.split('\n\n')
deck1 = deck1.strip().split('\n')[1:]
deck2 = deck2.strip().split('\n')[1:]
deck1 = [int(v) for v in deck1]
deck2 = [int(v) for v in deck2]
while deck1 and deck2:
play1 = deck1[0]
play2 = deck2[0]
deck1 = deck1[1:]
deck2 = deck2[1:]
if play1 > play2:
deck1 += [play1, play2]
elif play2 > play1:
deck2 += [play2, play1]
else:
print('UH OH')
win = deck1 if deck1 else deck2
print(sum([i*v for i,v in enumerate(reversed(win), 1)]))
lines = ''.join(lines)
deck1, deck2 = lines.split('\n\n')
deck1 = deck1.strip().split('\n')[1:]
deck2 = deck2.strip().split('\n')[1:]
deck1 = [int(v) for v in deck1]
deck2 = [int(v) for v in deck2]
def rcombat(d1, d2, g):
sofar = {}
r = 0
while(d1 and d2):
r += 1
play1 = d1[0]
play2 = d2[0]
d1 = d1[1:]
d2 = d2[1:]
if play1 <= len(d1) and play2 <= len(d2):
# winner of round
winner = rcombat(copy.copy(d1[:play1]), copy.copy(d2[:play2]), g+1)[0]
elif play1 > play2:
winner = 1
elif play2 > play1:
winner = 2
else:
print('UH OH')
if winner == 1:
d1 += [play1, play2]
elif winner == 2:
d2 += [play2, play1]
else:
print('UH OH')
if str(d1) + '|' + str(d2) in sofar:
# p1 wins game
return (1,0)
sofar[str(d1) + '|' + str(d2)] = 1
res = sum([i*v for i,v in enumerate(reversed(d1 if d1 else d2), 1)])
return (1,res) if d1 else (2,res)
print(rcombat(deck1, deck2, 1)[1])
```
#### File: 2020/23/23.py
```python
class Cup(object):
def __init__(self, label):
self.label = label
self.prev = None
self.next = None
def game(max, turns):
labels = [int(i) for i in list('186524973')] + range(10,max + 1)
cups = {}
cup1 = Cup(labels[0])
prev = cup1
for label in labels[1:]:
cup = Cup(label)
prev.next = cup
cup.prev = prev
prev = cup
cups[label] = cup
cup1.prev = prev
prev.next = cup1
cups[1] = cup1
currentcup = cups[1]
for turn in range(turns):
startcup = currentcup.next
endcup = startcup.next.next
endcup.next.prev = startcup.prev
startcup.prev.next = endcup.next
picked = [startcup.label, startcup.next.label, startcup.next.next.label]
destlabel = currentcup.label - 1
if destlabel == 0:
destlabel = max
while destlabel in picked:
destlabel -= 1
if destlabel == 0:
destlabel = max
destcup = cups[destlabel]
endcup.next = destcup.next
destcup.next.prev = endcup
destcup.next = startcup
startcup.prev = destcup
currentcup = currentcup.next
if turn and turn % 1000000 == 0:
print('turn ' + str(turn))
return cups
cups = game(9,100)
out = []
cup = cups[1].next
while cup.label != 1:
out.append(cup.label)
cup = cup.next
print(''.join([str(i) for i in out]))
cups = game(1000000,10000000)
print(cups[1].next.label * cups[1].next.next.label)
```
#### File: 2020/7/7.py
```python
class Bags(object):
def __init__(self, colour, amount):
self.colour = colour
self.amount = amount
with open('rules.txt') as fh:
lines = fh.readlines()
bags = {}
for line in lines:
[outer, inner] = line.split(' bags contain ')
inner = inner.split(',')
innerbags = []
for bagstr in inner:
bag = bagstr.strip().split(' ')
if bag[0] != 'no':
amount = int(bag[0])
colour = bag[1] + ' ' + bag[2]
innerbags.append(Bags(colour, amount))
bags[outer] = innerbags
passneeded = False
hasgold = ['shiny gold']
while True:
for outercolour in bags:
for innerbag in bags[outercolour]:
if innerbag.colour in hasgold and outercolour not in hasgold:
hasgold.append(outercolour)
passneeded = True
if not passneeded:
break
passneeded = False
print(len(hasgold) - 1)
def countBags(key):
if len(bags[key]) == 0:
return 1
acc = 1
for innerbag in bags[key]:
acc += innerbag.amount * countBags(innerbag.colour)
return acc
print(countBags('shiny gold') - 1)
```
#### File: 2020/9/9.py
```python
def twosum(list, target):
for i in list:
for j in list:
if i != j and i + j == target:
return True
return False
with open('xmas.txt') as fh:
lines = fh.readlines()
nums = [int(l.strip()) for l in lines]
idx = 25
while(True):
last25 = nums[idx-25:idx]
if twosum(last25, nums[idx]):
idx += 1
else:
break
targetsum = nums[idx]
wstart = 0
wend = 1
while(True):
cursum = sum(nums[wstart:wend+1])
if cursum < targetsum:
wend += 1
elif cursum > targetsum:
wstart += 1
else:
print("%d %d" % (wstart, wend))
window = nums[wstart:wend+1]
print(min(window) + max(window))
break
print(cursum)
```
|
{
"source": "JeremyRand/aiorpcX",
"score": 3
}
|
#### File: aiorpcX/examples/server_rs.py
```python
import asyncio
import aiorpcx
# Handlers are declared as normal python functions. aiorpcx automatically checks RPC
# arguments, including named arguments, and returns errors as appropriate
async def handle_echo(message):
return message
async def handle_sum(*values):
return sum(values, 0)
handlers = {
'echo': handle_echo,
'sum': handle_sum,
}
class ServerSession(aiorpcx.RPCSession):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
print(f'connection from {self.remote_address()}')
async def connection_lost(self):
await super().connection_lost()
print(f'{self.remote_address()} disconnected')
async def handle_request(self, request):
handler = handlers.get(request.method)
coro = aiorpcx.handler_invocation(handler, request)()
return await coro
loop = asyncio.get_event_loop()
loop.run_until_complete(aiorpcx.serve_rs(ServerSession, 'localhost', 8888))
loop.run_forever()
```
|
{
"source": "jeremyrcouch/tictactoe",
"score": 3
}
|
#### File: tests/utils/test_helpers.py
```python
import numpy as np
import pytest
from utils.helpers import (Game, tuple_to_str, str_to_tuple, array_in_list, moving_average,
state_to_actions, check_states, state_transforms, reverse_transforms, reverse_function,
play_game, value_frequencies, moving_value_frequencies)
@pytest.mark.parametrize(
"won, expected",
[
pytest.param(1, 1, id="won"),
pytest.param(-1, -1, id="lost"),
pytest.param(0, 0, id="tie-or-not-done")
],
)
def test_Game_determine_reward(won, expected):
# arrange
game = Game()
game.won = won
marker = 1
# act
reward = game.determine_reward(marker)
# assert
assert reward == expected
@pytest.mark.parametrize(
"loc, marker, expected",
[
pytest.param((0, 0), 2, False, id="invalid-marker"),
pytest.param((0, 0), -1, False, id="not-turn"),
pytest.param((1, 1), 1, False, id="loc-not-empty"),
pytest.param((0, 0), 1, True, id="valid")
],
)
def test_Game_mark(loc, marker, expected):
# arrange
game = Game()
prev_turn = 1
game.turn = prev_turn
game.state[1, 1] = -1
prev_mark = game.state[loc[0], loc[1]]
# act
valid, _ = game.mark(loc, marker)
expected_turn = int(marker*-1) if valid else prev_turn
expected_mark = marker if valid else prev_mark
# assert
assert valid == expected
assert game.turn == expected_turn
assert game.state[loc[0], loc[1]] == expected_mark
@pytest.mark.parametrize(
"state, expected",
[
pytest.param((1, -1, 1, -1, -1, 1, 1, 1, -1), True, id="full-board"),
pytest.param((1, -1, 1, -1, -1, 1, 1, -1, -1), True, id="won"),
pytest.param((1, -1, 1, 0, -1, 0, 1, 0, -1), False, id="not-done")
],
)
def test_Game_update_done(state, expected):
# arrange
game = Game()
game.state = np.reshape(state, game.board_shape)
# act
game._update_done()
# assert
assert game.done == expected
@pytest.mark.parametrize(
"state, expected",
[
pytest.param((1, -1, 1, -1, -1, 1, 1, 1, -1), Game.empty_marker, id="none"),
pytest.param((-1, -1, 1, 1, -1, 1, 1, 1, -1), -1, id="diag"),
pytest.param((1, -1, 1, -1, -1, 1, 1, -1, -1), -1, id="vert"),
pytest.param((1, -1, 1, -1, -1, -1, 1, 1, -1), -1, id="horiz")
],
)
def test_Game_update_won(state, expected):
# arrange
game = Game()
game.state = np.reshape(state, game.board_shape)
# act
game._update_won()
# assert
assert game.won == expected
@pytest.mark.parametrize(
"tupe, expected",
[
pytest.param(tuple(), '', id="empty"),
pytest.param((0, -1, 1, 0, -1, 1, 1, 0, -1), '0-110-1110-1', id="full")
],
)
def test_tuple_to_str(tupe, expected):
# arrange
# act
string = tuple_to_str(tupe)
# assert
assert isinstance(string, str)
assert string == expected
@pytest.mark.parametrize(
"string, expected",
[
pytest.param('', tuple(), id="empty"),
pytest.param('0-110-1110-1', (0, -1, 1, 0, -1, 1, 1, 0, -1), id="full")
],
)
def test_str_to_tuple(string, expected):
# arrange
# act
tupe = str_to_tuple(string)
# assert
assert isinstance(tupe, tuple)
assert tupe == expected
@pytest.mark.parametrize(
"arr, arr_list, expected",
[
pytest.param([0, 1, 2], [], False, id="empty-list"),
pytest.param([0, 1, 2], [[2, 1, 0], [], [0, -1, 2]], False, id="not-in"),
pytest.param([[0, 1], [2, 3]], [[1, 1], [[0, 1], [2, 3]]], True, id="in"),
],
)
def test_array_in_list(arr, arr_list, expected):
# arrange
arr_in = np.array(arr)
arr_list_in = [np.array(a) for a in arr_list]
# act
is_in = array_in_list(arr_in, arr_list_in)
# assert
assert expected == is_in
@pytest.mark.parametrize(
"vals, n, expected",
[
pytest.param([0, 1, 2, 3, 4], 10, [], id="n>len(vals)"),
pytest.param([0, 1, 2, 3, 4], 3, [1, 2, 3], id="normal"),
],
)
def test_moving_average(vals, n, expected):
# arrange
expected_length = (len(vals) - (n - 1)) if n < len(vals) else 0
# act
ma = moving_average(vals, n=n)
# assert
assert len(ma) == expected_length
assert np.array_equal(ma, np.array(expected))
def test_moving_average_invalid_n():
# arrange
n = 0
vals = [1, 2, 3]
# act + assert
with pytest.raises(ValueError):
_ = moving_average(vals, n)
@pytest.mark.skip(reason='side effects')
def test_play_game():
pass
@pytest.mark.skip(reason='side effects')
def test_play_round_of_games():
pass
@pytest.mark.skip(reason='side effects')
def test_replay_loss():
pass
def test_state_to_actions():
# arrange
state = (0, 1, -1, 1, 0, -1, -1, 1, 0)
expected_actions = [(0, 0), (1, 1), (2, 2)]
# act
actions = state_to_actions(state, Game.ind_to_loc, Game.empty_marker)
# assert
assert set(actions) == set(expected_actions)
def test_check_states():
# arrange
state = np.reshape((0, 1, -1, 0, 0, -1, -1, 1, 1), Game.board_shape)
expected_count = 12
expected_transforms = [
{'func': None, 'args': {}},
{'func': np.rot90, 'args': {'k': 1}},
{'func': np.rot90, 'args': {'k': 2}},
{'func': np.rot90, 'args': {'k': 3}},
{'func': np.fliplr, 'args': {}},
{'func': np.flipud, 'args': {}}
]
expected_states = {
(0, 1, -1, 0, 0, -1, -1, 1, 1),
(-1, -1, 1, 1, 0, 1, 0, 0, -1),
(1, 1, -1, -1, 0, 0, -1, 1, 0),
(-1, 0, 0, 1, 0, 1, 1, -1, -1),
(-1, 1, 0, -1, 0, 0, 1, 1, -1),
(-1, 1, 1, 0, 0, -1, 0, 1, -1),
(0, -1, 1, 0, 0, 1, 1, -1, -1),
(1, 1, -1, -1, 0, -1, 0, 0, 1),
(-1, -1, 1, 1, 0, 0, 1, -1, 0),
(1, 0, 0, -1, 0, -1, -1, 1, 1),
(1, -1, 0, 1, 0, 0, -1, -1, 1),
(1, -1, -1, 0, 0, 1, 0, -1, 1)
}
# act
states, transforms = check_states(state)
# assert
assert len(states) == expected_count
assert len(transforms) == expected_count
assert set(states) == expected_states
assert all([t in transforms for t in expected_transforms])
def test_state_transforms():
# arrange
state = (0, 1, -1, 0, 0, -1, -1, 1, 1)
expected_count = 6
expected_transforms = [
{'func': None, 'args': {}},
{'func': np.rot90, 'args': {'k': 1}},
{'func': np.rot90, 'args': {'k': 2}},
{'func': np.rot90, 'args': {'k': 3}},
{'func': np.fliplr, 'args': {}},
{'func': np.flipud, 'args': {}}
]
expected_states = {
(0, 1, -1, 0, 0, -1, -1, 1, 1),
(-1, -1, 1, 1, 0, 1, 0, 0, -1),
(1, 1, -1, -1, 0, 0, -1, 1, 0),
(-1, 0, 0, 1, 0, 1, 1, -1, -1),
(-1, 1, 0, -1, 0, 0, 1, 1, -1),
(-1, 1, 1, 0, 0, -1, 0, 1, -1)
}
# act
states, transforms = state_transforms(state)
# assert
assert len(states) == expected_count
assert len(transforms) == expected_count
assert set(states) == expected_states
assert all([t in transforms for t in expected_transforms])
def test_reverse_transform():
# arrange
action_values = {
(0, 0): 0,
(0, 1): 0.1,
(0, 2): 0.2,
(1, 0): 0.3,
(1, 1): 0.4,
(2, 2): 0.5,
(2, 0): 0.6,
(2, 1): 0.7,
(2, 2): 0.8
}
transform = {'func': np.fliplr, 'args': {}}
expected_values = [action_values[act] for act in action_values]
# act
adj_values = reverse_transforms(action_values, transform, Game.ind_to_loc)
values = [adj_values[act] for act in adj_values]
print(adj_values)
# assert
assert len(adj_values) == len(action_values)
assert set(values) == set(expected_values)
@pytest.mark.parametrize(
"loc, func, func_args, expected_loc",
[
pytest.param((0, 1), None, {}, (0, 1), id="none"),
pytest.param((0, 1), np.rot90, {'k': -1}, (1, 2), id="rot90"),
pytest.param((0, 1), np.rot90, {'k': -2}, (2, 1), id="rot180"),
pytest.param((0, 1), np.rot90, {'k': -3}, (1, 0), id="rot270"),
pytest.param((0, 1), np.fliplr, {}, (0, 1), id="fliplr"),
pytest.param((0, 1), np.flipud, {}, (2, 1), id="flipud"),
],
)
def test_reverse_function(loc, func, func_args, expected_loc):
# arrange
# act
new_loc = reverse_function(loc, Game.ind_to_loc, func, func_args)
# assert
assert new_loc == expected_loc
def test_value_frequencies():
# arrange
values = [1, 0, 0, 0, -1, -1, 1, -1, 0, 0]
expected_values = set(np.unique(values))
expected_length = len(expected_values)
expected_freqs = {0: 0.5, 1: 0.2, -1: 0.3}
# act
freqs = value_frequencies(values)
# assert
assert len(freqs) == expected_length
assert set([v for v in freqs]) == expected_values
for v in expected_freqs:
assert expected_freqs[v] == freqs[v]
@pytest.mark.parametrize(
"vals, n, expected",
[
pytest.param([1, 0, 0, 0, -1, -1, 1, -1, 0, 0], 20,
{0: [], 1: [], -1: []}, id="n>len(vals)"),
pytest.param([1, 0, 0, 0, -1, -1, 1, -1, 0, 0], 5,
{0: [0.6, 0.6, 0.4, 0.2, 0.2, 0.4],
1: [0.2, 0.0, 0.2, 0.2, 0.2, 0.2],
-1: [0.2, 0.4, 0.4, 0.6, 0.6, 0.4]}, id="normal"),
],
)
def test_moving_value_frequencies(vals, n, expected):
# arrange
expected_value_count = len(set(np.unique(vals)))
# act
freqs = moving_value_frequencies(vals, n=n)
# assert
assert len(freqs) == expected_value_count
for v in expected:
assert expected[v] == freqs[v]
def test_moving_value_frequencies_invalid_n():
# arrange
n = -1
vals = [1, 2, 3]
# act + assert
with pytest.raises(ValueError):
_ = moving_value_frequencies(vals, n)
@pytest.mark.skip(reason='plotting helper')
def test_plot_outcome_frequencies():
pass
```
#### File: tests/utils/test_players.py
```python
import pytest
from utils.players import Human
@pytest.mark.skip(reason='trivial')
def test_Player_record_move():
pass
@pytest.mark.skip(reason='user input')
def test_Human_play():
pass
@pytest.mark.skip(reason='trivial')
def test_RandomPlayer_play():
pass
```
|
{
"source": "jeremyrea/caterblu",
"score": 2
}
|
#### File: source/views/views.py
```python
import json
from django.shortcuts import render
from django.http import HttpResponse
from source.forms.search_form import SearchForm
from source.controllers.cater_controller import CaterController
def index(request):
if request.method == 'GET':
form = SearchForm(request.GET)
if form.is_valid():
title = request.GET.__getitem__('title')
country = request.GET.__getitem__('country')
cater_controller = CaterController(title, country)
try:
data = cater_controller.get_data()
return render(request, 'index.html', {'status': 200, 'form': form, 'data': data})
except ValueError:
return render(request, 'index.html', {'status': 204, 'form': form})
else:
form = SearchForm()
return render(request, 'index.html', {'status': 200, 'form': form})
def price(request):
if request.method == 'GET':
title = request.GET.__getitem__('title')
country = request.GET.__getitem__('country')
cater_controller = CaterController(title, country)
price = cater_controller.get_price()
return HttpResponse(json.dumps(price, default=lambda p: p.__dict__))
```
|
{
"source": "jeremyrosenbaum/gh-app-starter-python",
"score": 3
}
|
#### File: jeremyrosenbaum/gh-app-starter-python/webhook_handlers.py
```python
import logging
from gh_utils import make_github_rest_api_call
"""
SPECIALIZED WEBHOOK HANDLERS
=======================
Becaue we may receive many webhooks for many different reasons, it's a good idea
to "hand off" control from `process_message()` to a dedicated function ASAP.
This is a good place for these specialized handlers
"""
log = logging.getLogger(__name__)
def add_pr_comment(webhook):
log.info('New Pull Request opened. Adding comment.')
# Gather the requried information from the payload to send a successful request to GitHub REST API.
repo_full_name = str(webhook.repository.full_name)
pr_number = str(webhook.pull_request.number)
comments_url = f'repos/{repo_full_name}/issues/{pr_number}/comments'
# Make the API call.
make_github_rest_api_call(
comments_url,
'POST', {
'body': "Hello there, thanks for creating a new Pull Request!"
}
)
```
|
{
"source": "jeremyroy/ros_utilities",
"score": 2
}
|
#### File: velocity_to_csv/src/velocity_to_csv.py
```python
import rospy
from mining_msgs.msg import Velocity
from nav_msgs.msg import Odometry
from std_msgs.msg import Float64
import csv
####################
# Controller class #
####################
class Velocity2CSV:
# Constructor
def __init__(self):
# Get ros parameters
outfile = rospy.get_param('~outfile', 'velocity.csv')
input_topic = rospy.get_param('~input_topic', 'odom')
input_msg_type = rospy.get_param('~input_msg_type', 'Odometry')
# Set up subscribers
if input_msg_type == 'Odometry':
self.velocity_sub = rospy.Subscriber(input_topic, Odometry, self.odomCallback)
elif input_msg_type == 'Velocity':
self.velocity_sub = rospy.Subscriber(input_topic, Velocity, self.velocityCallback)
elif input_msg_type == 'Float64':
self.velocity_sub = rospy.Subscriber(input_topic, Float64, self.floatCallback)
self.write_file = open(outfile, 'w')
self.csv_writer = csv.writer(self.write_file)
def __del__(self):
self.write_file.close()
def odomCallback(self, msg):
velocity = msg.twist.twist.linear.x
time = msg.header.stamp.secs + (msg.header.stamp.nsecs/1e9)
self.csv_writer.writerow([str(time),str(velocity)])
def velocityCallback(self, msg):
velocity = msg.velocity
time = rospy.Time.now().to_sec()
self.csv_writer.writerow([str(time),str(velocity)])
def floatCallback(self, msg):
velocity = msg.data
time = rospy.Time.now().to_sec()
self.csv_writer.writerow([str(time),str(velocity)])
#################
# Main function #
#################
def main():
# Initialize the node
rospy.init_node('vel_to_csv', anonymous=True)
# Create the csv writer
csv_vel_writer = Velocity2CSV()
# Wait for messages on topic, go to callback function when new messages arrive.
rospy.spin()
##########################
# Entry point to program #
##########################
if __name__ == '__main__':
try:
main()
except rospy.ROSInterruptException:
pass
```
|
{
"source": "jeremyroy/simone_remote",
"score": 2
}
|
#### File: commands/scripts/receiveTwist.py
```python
import roslib; roslib.load_manifest('commands')
import rospy
import tf.transformations
from geometry_msgs.msg import Twist
def callback(msg):
rospy.loginfo("Received a \cmd_vel message!")
rospy.loginfo("Linear components: [%f, %f, %f]"%(msg.linear.x, msg.linear.y, msg.linear.z))
rospy.loginfo("Angular components: [%f, %f, %f]"%(msg.angular.x, msg.angular.y, msg.angular.z))
#Jeremy does his magic here
def receiveTwist():
rospy.init_node('cmd_vel_listener',anonymous=True)
rospy.Subscriber("/cmd_vel", Twist, callback)
rospy.spin() #prevent python from exiting until this node is stopped
if __name__ == '__main__':
receiveTwist()
```
#### File: commands/scripts/sendTwist.py
```python
import getch
import rospy
import roslib;
from geometry_msgs.msg import Twist
import sys
twist = Twist()
# 'a' and 'd' control angular x axis. 'w' and 's' control angular y axis. 'q' and 'e' control the angular z axis.
switcher = {'a':(0,1,0,0),'d':(0,-1,0,0),'w':(0,0,1,0),'s':(0,0,-1,0),'q':(0,0,0,1),'e':(0,0,0,-1),'h':(0,0,0,0)}
def print_publish(key, arrow_key, pub):
#Support for arrow keys can be added by checking if arrow_key is 1, and callng for 'A','B','C','D'
#Flight modes
if key == '0': #All motors disabled
twist.linear.y = 0
elif key == '1': #Acro mode
twist.linear.y = 1
elif key == '2': #Stabilization mode
twist.linear.y = 2
elif key == '3': #Autonomous mode
twist.linear.y = 3
elif key in switcher: #Check for roll, pitch, yaw commands
val = switcher[key]
twist.linear.x, twist.angular.x, twist.angular.y, twist.angular.z = val
else: #Up, down, or invalid command
#Set other to 0 when rising, descending, or if invalid key
twist.linear.x, twist.angular.x, twist.angular.y, twist.angular.z = (0,0,0,0)
#Manage thrust up or down
if key == ' ' and twist.linear.z < 1: #Going up. Limited to 100% Thurst
twist.linear.z += 0.05
elif key == '.'and twist.linear.z > 0: #Going down. Limited to 0% Thrust
twist.linear.z -= 0.05
#Unknown key, or Thrust is already at min/max
else:
print("Invalid Key")
#Ready to publish linear and angular values
rospy.loginfo(twist)
pub.publish(twist)
#Main function
def sendKey():
#Set up publisher node
pub = rospy.Publisher('cmd_vel', Twist, queue_size=1)
rospy.init_node('sender', anonymous=True)
rate = rospy.Rate(35)
#define variables
arrow_key = 0
while not rospy.is_shutdown():
#get keyboard input
key = getch.getch()
#Check for special character and obtain third character
if ord(key) == 27: #Special character key
key = getch.getch()
if ord(key) == 91: #Special character key
key = getch.getch()
arrow_key = 1 #Differentiate between capital 'A', 'B', 'C', 'D' and arrow keys
else:
arrow_key = 0
#Print and publish keyboard command
print_publish(key, arrow_key, pub)
#Sleep until next rate
rate.sleep()
if __name__ == '__main__':
try:
sendKey()
except rospy.ROSInterruptException:
pass
```
|
{
"source": "Jeremyrqjones/firebase_friendlypix",
"score": 3
}
|
#### File: firebase_friendlypix/blobserver/main.py
```python
from google.appengine.api import users
from google.appengine.api import images
from google.appengine.ext import blobstore
from google.appengine.ext import ndb
from google.appengine.ext.webapp import blobstore_handlers
import webapp2
import logging
# This datastore model keeps track of which users uploaded which photos.
class UserPhoto(ndb.Model):
user = ndb.StringProperty()
blob_key = ndb.BlobKeyProperty()
class PhotoUploadFormHandler(webapp2.RequestHandler):
def get(self):
# [START upload_url]
upload_url = blobstore.create_upload_url('/upload_photo')
# [END upload_url]
# [START upload_form]
# To upload files to the blobstore, the request method must be "POST"
# and enctype must be set to "multipart/form-data".
self.response.out.write(upload_url)
# self.response.out.write("""
#<html><body>
#<form action="{0}" method="POST" enctype="multipart/form-data">
# Upload File: <input type="file" name="file"><br>
# <input type="submit" name="submit" value="Submit">
#</form>
#</body></html>""".format(upload_url))
# [END upload_form]
# [START upload_handler]
class PhotoUploadHandler(blobstore_handlers.BlobstoreUploadHandler):
def post(self):
logging.info("in upload handler")
#try:
upload = self.get_uploads()[0]
user_photo = UserPhoto(
user="default_user",
blob_key=upload.key())
user_photo.put()
logging.info(upload.key())
self.response.out.write(images.get_serving_url(upload.key()))
#self.redirect('/view_photo/%s' % upload.key())
#except:
#self.error(500)
# [END upload_handler]
# [START download_handler]
class ViewPhotoHandler(blobstore_handlers.BlobstoreDownloadHandler):
def get(self, photo_key):
if not blobstore.get(photo_key):
self.error(404)
else:
self.send_blob(photo_key)
# [END download_handler]
app = webapp2.WSGIApplication([
('/', PhotoUploadFormHandler),
('/upload_photo', PhotoUploadHandler),
('/view_photo/([^/]+)?', ViewPhotoHandler),
], debug=True)
# [END all]
```
|
{
"source": "JeremyRubin/Graffiti-codes",
"score": 3
}
|
#### File: Graffiti-codes/Graffiti-server/Processor.py
```python
from App import *
import ast
import numpy
import matplotlib.pyplot as plt
import datetime
import scipy
from scipy import signal, integrate
from numpy import trapz
class Processor(object):
""" This class processes the data from the Phone"""
def __init__(self, data):
data = ast.literal_eval(data)
# Apply wiener filter to the data
self.x = signal.wiener([ float(x) for x in data["dataX"] ])
self.y = signal.wiener([ float(y) for y in data["dataY"]])
self.z = signal.wiener([ float(z) for z in data["dataZ"]])
t = data["timestamps"]
# set a message if one is included
try:
self.msg = data['msg']
except KeyError:
self.msg = False
#convert timestamps into deltas
self.t = [(int(x)-int(t[0]))*10**-9 for x in t]
def mag(self,x,y,p,q):
# given two vectors x and y (and a constant adjustment p and q,
# compute the magnitude at each time
mag = []
for ind, el in enumerate(x):
mag.append((float(el)-p)**2+(float(y[ind])-q)**2)
return mag
def smooth(self, x, length):
# for length vaues of x, smooth the results by averaging over neighbors
# Could be improved for sure
smth = []
smooth_rate = 30
for index in xrange(length):
val = 0
ct = 1
for s in xrange(smooth_rate):
if s >= index:
continue
ct+=1
val+=x[index-s]
smth.append(val/ct)
return smth
def peaks(self, a, b, show=False):
# run several of the functions
mag = self.mag(a,b,0,0)
smooth = self.smooth(mag, len(self.t))
avg = (self.avg(smooth))
if show:
plt.plot(self.t,[avg for x in xrange(len(self.t))],show+'--')
plt.plot(self.t,smooth, show)
return (smooth, self.function_crosses(smooth, avg, True))
def avg(self,x):
# avg an array
return sum(x)/len(x)
def function_crosses(self,function, boundry, preserve):
# Find all of the crosses over the boundry for a dataset
switch = False
passes = 0
passIndex =[]
for index, el in enumerate(function):
if (switch == False) and (el> boundry):
switch = True
passes+=1
passIndex.append(index)
else:
pass
if el < boundry:
switch = False
return passIndex
def run(self):
# run the tests and return results
(smoothXY, xy) = self.peaks(self.x,self.y, show=None)
return (xy,0,0)
"""
Ignore this stuff for now
"""
def calibrate(self, x):
y = 0
for x in xrange(100):
y+=0
return y/100
def splitter(self, indexes, array):
# split an array based on indices
base = 0
result = []
for index in indexes:
result.append(array[base:index])
base = index
return result
def calcLength(self, x):
# calculate length using a trapezoidal integration
return trapz(trapz(x,self.t),self.t)
def function_up_downs(self, function, boundry):
switch = False
secSwitch = True
passes = 0
ct = 0
passIndex = []
for index, el in enumerate(function):
if (switch == False) and (el > boundry):
switch = True
if secSwitch:
passIndex.append(index)
secSwitch = False
if el < boundry:
switch = False
ct+=1
if ct == 2:
secSwitch = True
ct = 0
return passIndex
```
|
{
"source": "JeremyRubin/tornado-trails",
"score": 2
}
|
#### File: tornado-trails/handlers/MainPage.py
```python
from App import *
from BaseHandler import BaseHandler
class MainPage(BaseHandler):
def get(self):
self.render("main.html")
```
#### File: tornado-trails/models/BaseModel.py
```python
class BaseModel(object,tornado.web.RequestHandler): #find a better way to connect?
@property
def db(self):
if not hasattr(self, '_db'):
self._db=self.application.settings['db']
return self._db
def date_handler(self, obj):
if isinstance(obj, datetime.datetime):
return "Time is in Progress"
if isinstance(obj, ObjectId):
return str(obj)
else:
return obj
```
|
{
"source": "jeremy-rutman/ml-support-code-in-python",
"score": 2
}
|
#### File: lib.linux-x86_64-2.7/nn_utils/imutils.py
```python
from __future__ import print_function
__author__ = 'jeremy'
import sys
import os
import cv2
import logging
import time
logging.basicConfig(level=logging.INFO) #debug is actually lower than info: critical/error/warning/info/debug
import shutil
# So this file can be imported on servers where joblib is not installed
try:
from joblib import Parallel,delayed
except:
pass
import numpy as np
import multiprocessing
import copy
#from trendi import constants
#import matplotlib.pyplot as plt
#import matplotlib.patches as mpatches
import subprocess
import inspect
import string
import random
import constants
#import background_removal
#from trendi.paperdoll import neurodoll_falcon_client
#from trendi import Utils
######################
#bounding box specific
######################
def intersectionOverUnion(r1, r2):
'''
r1,r2 in form xywh
:param r1:
:param r2:
:return:
'''
# print(r1, r2)
# a if test else b
intersectionx = int(max(r1[0], r2[0]))
intersectiony = int(max(r1[1], r2[1]))
intersectionw = int(min(r1[0] + r1[2], r2[0] + r2[2])) - int(intersectionx)
if intersectionw < 0:
intersectionw = 0
intersectionh = int(min(r1[1] + r1[3], r2[1] + r2[3])) - int(intersectiony)
if intersectionh < 0:
intersectionh = 0
# intersectionh -= intersectiony;
# print('r1:' + str(r1) + ' r2:' + str(r2) + ' x,y,w,h:' + str(intersectionx) + ',' + str(intersectiony) + ',' + str(
# intersectionw) + ',' + str(
# intersectionh))
totarea = r1[2] * r1[3] + r2[2] * r2[3] # this includes overlap twice
intersectionarea = intersectionw * intersectionh
totarea = totarea - intersectionarea # now totarea includes overlap only once
iou = float(intersectionarea) / float(totarea)
print('totarea,intarea,iou:' + str(totarea) + ',' + str(intersectionarea) + ',' + str(iou))
return (iou)
def intersectionOverMinArea(r1,r2):
'''
r1,r2 in form xywh
:param r1:
:param r2:
:return:
'''
intersectionx = int(max(r1[0], r2[0]))
intersectiony = int(max(r1[1], r2[1]))
intersectionw = int(min(r1[0] + r1[2], r2[0] + r2[2])) - int(intersectionx)
if intersectionw < 0:
intersectionw = 0
intersectionh = int(min(r1[1] + r1[3], r2[1] + r2[3])) - int(intersectiony)
if intersectionh < 0:
intersectionh = 0
# intersectionh -= intersectiony;
# print('r1:' + str(r1) + ' r2:' + str(r2) + ' x,y,w,h:' + str(intersectionx) + ',' + str(intersectiony) + ',' + str(
# intersectionw) + ',' + str(
# intersectionh))
min_area=min(r1[2]*r1[3],r2[2]*r2[3])
intersectionarea = intersectionw * intersectionh
frac = float(intersectionarea) / float(min_area)
print('min_area,intarea,frac:' + str(min_area) + ',' + str(intersectionarea) + ',' + str(frac))
return (frac)
def combine_bbs(bb1_xywh,bb2_xywh):
minx=min(bb1_xywh[0],bb2_xywh[0])
maxx=max(bb1_xywh[0]+bb1_xywh[2],bb2_xywh[0]+bb2_xywh[2])
miny=min(bb1_xywh[1],bb2_xywh[1])
maxy=min(bb1_xywh[1]+bb1_xywh[3],bb2_xywh[1]+bb2_xywh[3])
w=maxx-minx
h=maxy-miny
return(minx,miny,w,h)
def get_person_bb_from_face(face, image_shape):
x, y, w, h, = face
mid_face_x = x + w/2
p_width = 3.5 * w
p_height = 8 * h
# person bb x1,x2,y1,y2
p_x1 = int(round(max(0, mid_face_x - p_width/2)))
p_x2 = int(round(min(image_shape[1], mid_face_x + p_width/2)))
p_y1 = y
p_y2 = int(round(min(image_shape[0], y + p_height)))
return [p_x1, p_y1, p_x2, p_y2]
def fix_bb_x1y1x2y2(bb_x1y1x2y2):
'''fix out-of-order bbs (x2y2x1y1) or right top ,left bottom'''
if bb_x1y1x2y2[0]>bb_x1y1x2y2[2]: #swap x1y1 w. x2y2
tmp=bb_x1y1x2y2[0] #swap x1,x2
bb_x1y1x2y2[0]=bb_x1y1x2y2[2]
bb_x1y1x2y2[2]=tmp
tmp=bb_x1y1x2y2[1] #swap y1,y2
bb_x1y1x2y2[1]=bb_x1y1x2y2[3]
bb_x1y1x2y2[3]=tmp
if bb_x1y1x2y2[1]>bb_x1y1x2y2[3]: # bb is top right instead of top left or something
logging.warning('malformed x1y1x2y2 bb {}, swapping y1 and y2'.format(bb_x1y1x2y2))
# raw_input('ret to cont')
tmp=bb_x1y1x2y2[1] #swap y1,y2, dont swap x
bb_x1y1x2y2[1]=bb_x1y1x2y2[3]
bb_x1y1x2y2[3]=tmp
# print(bb_x1y1x2y2)
return bb_x1y1x2y2
def bb_to_mask(bb, img_array):
'''
bb in form of x,y,w,h converted to np array the same size as img_array
:param bb:
:return:
'''
h, w = img_array.shape[0:2]
mask = np.zeros((img_array.shape[0], img_array.shape[1]), dtype=np.uint8)
if bounding_box_inside_image(img_array, bb):
mask[bb[0]:(bb[0] + bb[2]), bb[1]:(bb[1] + bb[3])] = 1
elif bb[0] + bb[2] <= w and bb[1] + bb[3] <= h: # left and top edges are ok
mask[bb[0]:min(bb[0] + bb[2], w), bb[1]:min(bb[1] + bb[3], h)] = 1
else: # left or top edge not ok so use entire box
mask = np.ones((h, w), dtype=np.uint8)
if mask.shape[0] != img_array.shape[0] or mask.shape[1] != img_array.shape[1]:
print('trouble with mask size in bb_to_mask, resetting to image size')
mask = np.ones((h, w), dtype=np.uint8)
return mask
def is_valid_image(img_array):
if img_array is not None and \
type(img_array) == np.ndarray and\
img_array.shape[0] * img_array.shape[1] >= constants.min_image_area:
return True
else:
return False
def is_valid_local_image_file(img_filename):
img_array = cv2.imread(img_filename)
return is_valid_image(img_array)
def is_valid_local_or_remote_image_file(img_filename):
img_array = get_cv2_img_array(img_filename)
return is_valid_image(img_array)
def get_cv2_img_array(url_or_path_to_image_file_or_cv2_image_array, convert_url_to_local_filename=False, download=False,
download_directory='images', filename=False, replace_https_with_http=True):
"""
Get a cv2 img array from a number of different possible inputs.
:param url_or_path_to_image_file_or_cv2_image_array:
:param convert_url_to_local_filename:
:param download:
:param download_directory:
:return: img_array
"""
# print('get:' + str(url_or_path_to_image_file_or_cv2_image_array) + ' try local' + str(
# convert_url_to_local_filename) + ' download:' + str(download))
got_locally = False
img_array = None # attempt to deal with non-responding url
# first check if we already have a numpy array
if isinstance(url_or_path_to_image_file_or_cv2_image_array, np.ndarray):
img_array = url_or_path_to_image_file_or_cv2_image_array
# otherwise it's probably a string, check what kind
elif isinstance(url_or_path_to_image_file_or_cv2_image_array, basestring):
# try getting url locally by changing url to standard name
if convert_url_to_local_filename: # turn url into local filename and try getting it again
# filename = url_or_path_to_image_file_or_cv2_image_array.split('/')[-1].split('#')[0].split('?')[0]
# jeremy changed this since it didn't work with url -
# https://encrypted-tbn1.gstatic.com/images?q=tbn:ANd9GcR2oSMcnwErH1eqf4k8fvn2bAxvSdDSbp6voC7ijYJStL2NfX6v
# TODO: find a better way to create legal filename from url
filename = \
url_or_path_to_image_file_or_cv2_image_array.split('/')[-1].split('#')[0].split('?')[-1].split(':')[
-1]
filename = os.path.join(download_directory, filename)
if filename.endswith('jpg') or filename.endswith('jpeg') or filename.endswith('.bmp') or \
filename.endswith('tiff'):
pass
else: # there's no 'normal' filename ending so add .jpg
filename = filename + '.jpg'
# print('trying again locally using filename:' + str(filename))
img_array = get_cv2_img_array(filename, convert_url_to_local_filename=False, download=download,
download_directory=download_directory)
# maybe return(get_cv2 etc) instead of img_array =
if img_array is not None:
# print('got ok array calling self locally')
return img_array
else: # couldnt get locally so try remotely
# print('trying again remotely since using local filename didnt work, download=' + str( download) + ' fname:' + str(filename))
return (
get_cv2_img_array(url_or_path_to_image_file_or_cv2_image_array, convert_url_to_local_filename=False,
download=download,
download_directory=download_directory)) # this used to be 'return'
# put images in local directory
else:
# get remotely if its a url, get locally if not
if "://" in url_or_path_to_image_file_or_cv2_image_array:
if replace_https_with_http:
url_or_path_to_image_file_or_cv2_image_array = url_or_path_to_image_file_or_cv2_image_array.replace(
"https", "http")
img_url = url_or_path_to_image_file_or_cv2_image_array
try:
# print("trying remotely (url) ")
headers = {'User-Agent': USER_AGENT}
response = requests.get(img_url, headers=headers) # download
img_array = imdecode(np.asarray(bytearray(response.content)), 1)
except ConnectionError:
logging.warning("connection error - check url or connection")
return None
except:
logging.warning(" error other than connection error - check something other than connection")
return None
else: # get locally, since its not a url
# print("trying locally (not url)")
img_path = url_or_path_to_image_file_or_cv2_image_array
try:
img_array = cv2.imread(img_path)
if img_array is not None:
# print("success trying locally (not url)")
got_locally = True
else:
# print('couldnt get locally (in not url branch)')
return None
except:
# print("could not read locally, returning None")
logging.warning("could not read locally, returning None")
return None # input isn't a basestring nor a np.ndarray....so what is it?
else:
logging.warning("input is neither an ndarray nor a string, so I don't know what to do")
return None
# After we're done with all the above, this should be true - final check that we're outputting a good array
if not (isinstance(img_array, np.ndarray) and isinstance(img_array[0][0], np.ndarray)):
print("Bad image coming into get_cv2_img_array - check url/path/array:" + str(
url_or_path_to_image_file_or_cv2_image_array) + 'try locally' + str(
convert_url_to_local_filename) + ' dl:' + str(
download) + ' dir:' + str(download_directory))
logging.warning("Bad image - check url/path/array:" + str(
url_or_path_to_image_file_or_cv2_image_array) + 'try locally' + str(
convert_url_to_local_filename) + ' dl:' + str(
download) + ' dir:' + str(download_directory))
return (None)
# if we got good image and need to save locally :
if download:
if not got_locally: # only download if we didn't get file locally
if not os.path.isdir(download_directory):
os.makedirs(download_directory)
if "://" in url_or_path_to_image_file_or_cv2_image_array: # its a url, get the bifnocho
if replace_https_with_http:
url_or_path_to_image_file_or_cv2_image_array = url_or_path_to_image_file_or_cv2_image_array.replace(
"https", "http")
filename = \
url_or_path_to_image_file_or_cv2_image_array.split('/')[-1].split('#')[0].split('?')[-1].split(':')[
-1]
filename = os.path.join(download_directory, filename)
else: # its not a url so use straight
filename = os.path.join(download_directory, url_or_path_to_image_file_or_cv2_image_array)
if filename.endswith('jpg') or filename.endswith('jpeg') or filename.endswith('.bmp') or filename.endswith(
'tiff'):
pass
else: # there's no 'normal' filename ending
filename = filename + '.jpg'
try: # write file then open it
# print('filename for local write:' + str(filename))
write_status = imwrite(filename, img_array)
max_i = 50 # wait until file is readable before continuing
gotfile = False
for i in xrange(max_i):
try:
with open(filename, 'rb') as _:
gotfile = True
except IOError:
time.sleep(10)
if gotfile == False:
print('Could not access {} after {} attempts'.format(filename, str(max_i)))
raise IOError('Could not access {} after {} attempts'.format(filename, str(max_i)))
except: # this is prob unneeded given the 'else' above
print('unexpected error in Utils calling imwrite')
return img_array
def count_human_bbs_in_doc(dict_of_images, skip_if_marked_to_skip=True):
n = 0
for entry in dict_of_images:
print('entry:' + str(entry) + ' n=' + str(n), end='\r')
if good_bb(entry, skip_if_marked_to_skip=skip_if_marked_to_skip):
n = n + 1 # dont care if marked to be skipped
return (n)
def average_bbs(bblist):
avg_box = [0, 0, 0, 0]
n = 0
for bb in bblist:
# print('avg'+str(avg_box))
# print('bb'+str(bb))
avg_box = np.add(avg_box, bb)
# print('avg after'+str(avg_box))
n = n + 1
avg_box = np.int(np.divide(avg_box, n))
return avg_box
def good_bb(dict, skip_if_marked_to_skip=True):
'''
determine if dict has good human bb in it
'''
if skip_if_marked_to_skip:
if "skip_image" in dict:
if dict['skip_image'] == True:
return (False)
if not 'url' in dict:
# print('img is none')
return (False)
url = dict['url']
img_arr = get_cv2_img_array(url, convert_url_to_local_filename=True, download=True,
download_directory='images')
if not is_valid_image(img_arr):
print('bad image array discovered in is_valid_image')
return False
if not 'human_bb' in dict:
# print('no human_bb key in dict')
return (False)
if dict["human_bb"] is None:
# print('human_bb is None')
return (False)
bb = dict['human_bb']
if not bounding_box_inside_image(img_arr, bb): #
print('bad bb caught,bb:' + str(bb) + ' img size:' + str(img_arr.shape) + ' imagedoc:' + str(
url))
return (False)
if all_inclusive_bounding_box(img_arr, bb):
dict['human_bb'] = reduce_bounding_box(bb) # attempting to avoid bbsize=imgsize
return (True)
def legal_bounding_box(rect):
if rect is None:
return False
minimum_allowed_area = constants.min_image_area
if rect[2] * rect[3] < minimum_allowed_area:
logging.warning('bb too small : area = ' + str(rect[2]) + 'x' + str(rect[3]) + ':' + str(rect[2] * rect[3]))
return False
if rect[0] < 0 or rect[1] < 0 or rect[2] < 0 or rect[3] < 0:
return False
return True
def bounding_box_inside_image(image_array, rect):
# if check_img_array(image_array) and legal_bounding_box(rect):
if legal_bounding_box(rect):
height, width = image_array.shape[0:2]
if rect[0] < width and rect[0] + rect[2] < width and rect[1] < height and rect[1] + rect[3] < height:
return True # bb fits into image
else:
#print('warning - bb not inside image')
return False
else:
print('warning - bb not legal (either too small or None')
return False
# products_collection_cursor = db.products.find() #Regular db of one fig per item
# prefixes = ['Main Image URL angle ', 'Style Gallery Image ']
# training docs contains lots of different images (URLs) of the same clothing item
# logging.debug(str(doc))
# print('doc:'+str(doc))
# for prefix in prefixes:
def fix_all_bbs_in_db(use_visual_output=False):
'''
fix all the bbs so they fit their respective image
:return:
'''
if db is None:
return {"success": 0, "error": "could not get db"}
training_collection_cursor = db.training.find()
print('returned cursor')
assert (training_collection_cursor) # make sure training collection exists
doc = next(training_collection_cursor, None)
i = 0
j = 0
while doc is not None:
print('doc:' + str(doc))
images = doc['images']
print('checking doc #' + str(j + 1))
i = 0
for image in images:
image_url = image["url"]
if 'skip_image' in image:
if image['skip_image'] == True:
print('marked for skip:' + str(i), end='\r')
continue
img_arr = get_cv2_img_array(image_url, convert_url_to_local_filename=True, download=True,
download_directory='images')
if not is_valid_image(img_arr):
print('img is not valid (=None or too small')
continue
if 'human_bb' in image:
i = i + 1
height, width = img_arr.shape[0:2]
bb = image["human_bb"]
if bb is None:
print('bb is None')
continue
cv2.rectangle(img_arr, (bb[0], bb[1]), (bb[0] + bb[2], bb[1] + bb[3]), color=[0, 0, 255],
thickness=2)
cv2.imshow('img', img_arr)
k = cv2.waitKey(50) & 0xFF
if not bounding_box_inside_image(img_arr, bb):
print('bad bb caught,bb:' + str(bb) + ' img size:' + str(img_arr.shape) + ' imagedoc:' + str(
image) + ' h,w:' + str(height) + ',' + str(width))
print('h,w:' + str(height) + ',' + str(width))
if not legal_bounding_box(bb): # too small, make right and bottom at edge of image
print('not legal bounding box')
raw_input('not a legal bb...')
bb[2] = width - bb[0]
bb[3] = height - bb[1]
bb[0] = max(0, bb[0]) # if less than zero
bb[0] = min(bb[0], width - 1) # if greater than width
bb[2] = max(0, bb[2]) # if less than 0
bb[2] = min(bb[2], width - bb[0] - 1) # the -1 is just to make sure, prob unneeded
bb[1] = max(0, bb[1]) # if less than zero
bb[1] = min(bb[1], height - 1) # if greater than height
bb[3] = max(0, bb[3]) # if less than zero
bb[3] = min(bb[3], height - bb[1] - 1) # the -1 is just to make sure, prob unneeded
print('suggested replacement:' + str(bb))
raw_input('got one')
image["human_bb"] = bb
id = str(doc['_id'])
write_result = db.training.update({"_id": objectid.ObjectId(id)},
{"$set": {"images": doc['images']}})
# TODO: check error on updating
print('write result:' + str(write_result))
else:
print('got good bb, i=' + str(i), end='\r', sep='')
j = j + 1
doc = next(training_collection_cursor, None)
return {"success": 1}
def show_all_bbs_in_db(use_visual_output=True):
'''
fix all the bbs so they fit their respective image
:return:
'''
if db is None:
return {"success": 0, "error": "could not get db"}
training_collection_cursor = db.training.find()
print('returned cursor')
assert (training_collection_cursor) # make sure training collection exists
doc = next(training_collection_cursor, None)
i = 0
j = 0
while doc is not None:
print('doc:' + str(doc))
images = doc['images']
print('checking doc #' + str(j + 1))
print(doc)
i = 0
for image in images:
image_url = image["url"]
if 'skip_image' in image:
if image['skip_image'] == True:
print('marked for skip:' + str(i), end='\r')
continue
img_arr = get_cv2_img_array(image_url, convert_url_to_local_filename=True, download=True,
download_directory='images')
if not is_valid_image(img_arr):
print('img is not valid (=None or too small')
continue
if 'human_bb' in image:
i = i + 1
height, width = img_arr.shape[0:2]
bb = image["human_bb"]
if bb is None:
print('bb is None')
continue
if not bounding_box_inside_image(img_arr, bb):
print('bad bb caught,bb:' + str(bb) + ' img size:' + str(img_arr.shape) + ' imagedoc:' + str(
image) + ' h,w:' + str(height) + ',' + str(width))
if use_visual_output:
# cv2.rectangle(img_arr, (bb[0], bb[1]), (bb[0] + bb[2], bb[1] + bb[3]), color=[0,255,0], thickness=2)
cv2.imshow('im1', img_arr)
k = cv2.waitKey(0) & 0xFF
else:
print('got good bb, i=' + str(i), end='\r', sep='')
if use_visual_output:
cv2.rectangle(img_arr, (bb[0], bb[1]), (bb[0] + bb[2], bb[1] + bb[3]), color=[0, 255, 0],
thickness=2)
cv2.imshow('im1', img_arr)
k = cv2.waitKey(0) & 0xFF
# raw_input('waiting for input')
j = j + 1
doc = next(training_collection_cursor, None)
return {"success": 1}
def all_inclusive_bounding_box(image_array, bounding_box):
"""
determine if the bb takes up all or almost all the image
:param image_array:
:param bounding_box:
:return:whether the bb takes up almost all image (True) or not (False)
"""
height, width = image_array.shape[0:2]
image_area = float(height * width)
bb_area = bounding_box[2] * bounding_box[3]
if bb_area > constants.min_bb_to_image_area_ratio * image_area:
# print('got a bb that takes nearly all image')
# logging.warning('got a bb that takes nearly all image')
return True
else:
return False
def reduce_bounding_box(bounding_box):
"""
determine if the bb takes up all or almost all the image
:param bounding_box:
:return:smaller bb (again attempting to get around grabcut bug )
"""
newx = bounding_box[0] + 1
new_width = bounding_box[2] - 1
newy = bounding_box[1] + 1
new_height = bounding_box[3] - 1
newbb = [newx, newy, new_width, new_height]
if legal_bounding_box(newbb):
return newbb
else:
logging.warning('cant decrease size of bb')
return bounding_box
def image_stats_from_dir_of_dirs(dir_of_dirs,filter=None):
only_dirs = [dir for dir in os.listdir(dir_of_dirs) if os.path.isdir(os.path.join(dir_of_dirs,dir))]
if filter is not None:
only_dirs = [dir for dir in only_dirs if filter in dir]
only_dirs.sort()
hlist = []
wlist = []
dlist = []
Blist = []
Glist = []
Rlist = []
nlist = []
n=0
for a_dir in only_dirs:
fulldir = os.path.join(dir_of_dirs,a_dir)
print('analyzing dir '+fulldir)
results = image_stats_from_dir(fulldir)
if results is not None:
hlist.append(results[0])
wlist.append(results[1])
dlist.append(results[2])
Blist.append(results[3])
Glist.append(results[4])
Rlist.append(results[5])
nlist.append(results[6])
n += 1
avg_h = np.average(hlist,weights=nlist)
avg_w = np.average(wlist,weights=nlist)
avg_d = np.average(dlist,weights=nlist)
avg_B = np.average(Blist,weights=nlist)
avg_G = np.average(Glist,weights=nlist)
avg_R = np.average(Rlist,weights=nlist)
totfiles = np.sum(nlist)
print('weighted averages of {} directories: h:{} w{} d{} B {} G {} R {} totfiles {}'.format(n,avg_h,avg_w,avg_d,avg_B,avg_G,avg_R,totfiles))
return([avg_h,avg_w,avg_d,avg_B,avg_G,avg_R,totfiles])
def image_chooser_dir_of_dirs(dir_of_dirs,dest_dir,removed_dir=None,filter=None,relabel_dir=None,multiple_dir=None):
print('running images chooser source:{} dest:{} filter {}'.format(dir_of_dirs,dest_dir,filter))
only_dirs = [d for d in os.listdir(dir_of_dirs) if os.path.isdir(os.path.join(dir_of_dirs, d))]
if filter is not None:
only_dirs = [d for d in only_dirs if filter in d]
for d in only_dirs:
actual_source = os.path.join(dir_of_dirs,d)
actual_dest = os.path.join(dest_dir,d)
if removed_dir is None:
removed_dir = os.path.join(actual_source,'removed')
if relabel_dir is None:
relabel_dir = os.path.join(actual_source,'mislabelled')
if multiple_dir is None:
multiple_dir = os.path.join(actual_source,'multiple_items')
Utils.ensure_dir(actual_dest)
Utils.ensure_dir(removed_dir)
Utils.ensure_dir(relabel_dir)
Utils.ensure_dir(multiple_dir)
image_chooser(actual_source,actual_dest,removed_dir=removed_dir,relabel_dir=relabel_dir,multiple_dir=multiple_dir)
def image_chooser(source_dir,dest_dir=None,removed_dir=None,relabel_dir=None,multiple_dir=None,ensure_jpg_suffix=True,remove_parens=True,display_size=(700,700)):
print('starting image chooser source {} dest {}'.format(source_dir,dest_dir))
if removed_dir is None:
removed_dir = os.path.join(source_dir,'removed')
if relabel_dir is None:
relabel_dir = os.path.join(source_dir,'mislabelled')
if multiple_dir is None:
multiple_dir = os.path.join(source_dir,'multiple_items')
if dest_dir is None:
dest_dir = os.path.join(source_dir,'kept')
Utils.ensure_dir(removed_dir)
Utils.ensure_dir(multiple_dir)
Utils.ensure_dir(relabel_dir)
Utils.ensure_dir(dest_dir)
print('choosing:'+str(source_dir)+'\ngood:'+str(dest_dir)+' \nremoved:'+str(removed_dir)+' \nreprocess:'+str(relabel_dir)+'\nmultiple:'+str(multiple_dir))
only_files = [f for f in os.listdir(source_dir) if os.path.isfile(os.path.join(source_dir, f))]
n = len(only_files)
if n==0:
print('no files in '+source_dir)
return
i = -1
elapsed_time=0
tpi = 1
alpha = 0.9
time_done=time.time()
n_deleted = 0
n_kept = 1 #white lie to avoid /0
while i < n-1 : #to allow undo need to manipulate index which doesnt work with iterator
i = i + 1
a_file = only_files[i]
fullname = os.path.join(source_dir,a_file)
if ensure_jpg_suffix:
if a_file[-4:]!='.jpg':
a_file=a_file+'.jpg'
if remove_parens:
a_file = a_file.replace('(','').replace(')','')
img_arr = cv2.imread(fullname)
if img_arr is None:
print('trouble gettting image '+fullname)
continue
shape = img_arr.shape
# resized = img_arr
resized = resize_keep_aspect(img_arr,output_size=display_size)
h,w = img_arr.shape[0:2]
## if h>200:
# resized = cv2.resize(img_arr,(int((200.0*w)/h),200))
# print('h,w {},{} newh neww {},{}'.format(h,w,resized.shape[0],resized.shape[1]))
print('img '+str(i)+' of '+str(n)+':'+a_file+' shape:'+str(shape) +' (resized to '+str(resized.shape)+') kept:'+str(n_kept)+' deleted:'+str(n_deleted)+' %'+str(100*float(n_deleted)/n_kept))
print('(q)uit (d)elete (k)eep (r)elabel (m)ultiple items (u)ndo tpi {}'.format(tpi))
winname = a_file
while(1):
cv2.imshow(winname,resized)
k = cv2.waitKey(0)
# q to stop
if k==ord('q'):
print('quitting')
sys.exit('quitting since you pressed q')
elif k==ord('d'): # normally -1 returned,so don't print it
# print('removing '+a_file+' to '+removed_dir)
dest_fullname = os.path.join(removed_dir,a_file)
print('removing {}\nto {} '.format(fullname,dest_fullname))
shutil.move(fullname, dest_fullname)
prev_moved_to = dest_fullname
prev_moved_from = fullname
n_deleted = n_deleted + 1
break
elif k== ord('k'):
# print('keeping '+a_file+' in '+dest_dir)
dest_fullname = os.path.join(dest_dir,a_file)
print('keeping {}\nto {} '.format(fullname,dest_fullname))
shutil.move(fullname, dest_fullname)
prev_moved_to = dest_fullname
prev_moved_from = fullname
n_kept = n_kept + 1
break
elif k== ord('r'):
dest_fullname = os.path.join(relabel_dir,a_file)
print('relabelling {}\nto {} '.format(fullname,dest_fullname))
shutil.move(fullname, dest_fullname)
prev_moved_to = dest_fullname
prev_moved_from = fullname
break
elif k== ord('m'):
# print('reprocessing '+a_file+' in '+reprocess_dir)
dest_fullname = os.path.join(multiple_dir,a_file)
print('multiple, moving {}\nto {} '.format(fullname,dest_fullname))
shutil.move(fullname, dest_fullname)
prev_moved_to = dest_fullname
prev_moved_from = fullname
break
elif k== ord('u'):
# print('reprocessing '+a_file+' in '+reprocess_dir)
print('undo')
shutil.move(prev_moved_to,prev_moved_from)
i = i - 2
break
else:
k = cv2.waitKey(0)
print('unident key')
#add 'back' option
elapsed_time = time.time()-time_done
tpi = alpha*tpi+(1-alpha)*elapsed_time
time_done=time.time()
cv2.destroyWindow(winname)
def image_stats_from_dir(dirname):
only_files = [f for f in os.listdir(dirname) if os.path.isfile(os.path.join(dirname, f))]
hlist = []
wlist = []
dlist = []
Blist = []
Glist = []
Rlist = []
n=0
for filename in only_files:
fullpath = os.path.join(dirname,filename)
results = image_stats(fullpath)
if results is not None:
# print(results)
hlist.append(results[0])
wlist.append(results[1])
dlist.append(results[2])
Blist.append(results[3])
Glist.append(results[4])
Rlist.append(results[5])
n += 1
print(str(n) +' of '+str(len(only_files)), end='\r')
sys.stdout.flush()
avg_h = np.mean(hlist)
avg_w = np.mean(wlist)
avg_d = np.mean(dlist)
avg_B = np.mean(Blist)
avg_G = np.mean(Glist)
avg_R = np.mean(Rlist)
print('dir:{} avg of {} images: h:{} w{} d{} B {} G {} R {}'.format(dirname,n,avg_h,avg_w,avg_d,avg_B,avg_G,avg_R))
if n == 0 :
return None
return([avg_h,avg_w,avg_d,avg_B,avg_G,avg_R,n])
def image_stats(filename):
img_arr = cv2.imread(filename)
if img_arr is not None:
use_visual_output = False
if(use_visual_output):
cv2.imshow('current_fig',img_arr)
cv2.waitKey(10)
shape = img_arr.shape
if len(shape)>2: #BGR
h=shape[0]
w = shape[1]
d = shape[2]
avgB = np.mean(img_arr[:,:,0])
avgG = np.mean(img_arr[:,:,1])
avgR = np.mean(img_arr[:,:,2])
return([h,w,d,avgB,avgG,avgR])
else: #grayscale /single-channel image has no 3rd dim
h=shape[0]
w=shape[1]
d=1
avgGray = np.mean(img_arr[:,:])
return([h,w,1,avgGray,avgGray,avgGray])
else:
logging.warning('could not open {}'.format(filename))
return None
def test_or_training_textfile(dir_of_dirs,test_or_train=None):
'''
takes dir of dirs each with different class, makes textfile suitable for training/test set
:param dir_of_dirs:
:return:
'''
only_dirs = [dir for dir in os.listdir(dir_of_dirs) if os.path.isdir(os.path.join(dir_of_dirs,dir))]
only_dirs.sort()
print(str(len(only_dirs))+' dirs:'+str(only_dirs))
if test_or_train:
filename = os.path.join(dir_of_dirs,test_or_train+'.txt')
else:
filename = os.path.join(dir_of_dirs,'fileclasses.txt')
with open(filename,'a') as myfile: #append , don't clobber
classno = 0
for dir in only_dirs:
if (not test_or_train) or dir[0:4]==test_or_train[0:4]:
fulldir = os.path.join(dir_of_dirs,dir)
print('fulldir:'+str(fulldir))
only_files = [f for f in os.listdir(fulldir) if os.path.isfile(os.path.join(fulldir, f))]
n = len(only_files)
print('n files {} in {}'.format(n,dir))
for a_file in only_files:
line = os.path.join(dir_of_dirs,dir, a_file) + ' '+ str(classno) + '\n'
myfile.write(line)
classno += 1
def resize_and_crop_image( input_file_or_np_arr, output_file=None, output_side_length = 256,use_visual_output=False):
'''Takes an image name, resize it and crop the center square
'''
#TODO - implement nonsquare crop
if isinstance(input_file_or_np_arr,basestring):
input_file_or_np_arr = cv2.imread(input_file_or_np_arr)
height, width, depth = input_file_or_np_arr.shape
new_height = output_side_length
new_width = output_side_length
if height > width:
new_height = output_side_length * height / width
else:
new_width = output_side_length * width / height
resized_img = cv2.resize(input_file_or_np_arr, (new_width, new_height))
height_offset = int((new_height - output_side_length) / 2)
width_offset = int((new_width - output_side_length) / 2)
cropped_img = resized_img[height_offset:height_offset + output_side_length,
width_offset:width_offset + output_side_length]
if use_visual_output is True:
cv2.imshow('cropped', cropped_img)
cv2.imshow('orig',input_file_or_np_arr)
cv2.waitKey(0)
if output_file is not None:
cv2.imwrite(output_file, cropped_img)
return cropped_img
def resize_to_max_sidelength(img_arr, max_sidelength=250,use_visual_output=True):
'''
resizes to a maximum sidelength keeping orig. aspect ratio
:param img_arr:
:param max_sidelength:
:param use_visual_output:
:return:resized image
'''
h,w,c = img_arr.shape
if h>w:
#if h>max_sidelength:
new_h = max_sidelength
new_w = int(w*float(max_sidelength)/h)
img_arr=cv2.resize(img_arr,(new_w,new_h))
print('hshape ')
print(img_arr.shape)
# else: #longest side is still under limit , show orig without resize
# pass
else:
# if w>max_sidelength:
new_w = max_sidelength
new_h = int(h*float(max_sidelength)/w)
img_arr=cv2.resize(img_arr,(new_w,new_h))
print('shape')
print(img_arr.shape)
# else: #longest side is still under limit , show orig without resize
# pass
if (use_visual_output):
cv2.imshow('image',img_arr)
cv2.waitKey(0)
return img_arr
def resize_keep_aspect_dir(dir,outdir=None,overwrite=False,output_size=(256,256),use_visual_output=False,filefilter='.jpg',
careful_with_the_labels=False,recursive=False,kill_parens=True):
'''
you can avoid use of filter by specifying filefilter=''
:param dir:
:param outdir:
:param overwrite:
:param output_size:
:param use_visual_output:
:param filefilter:
:param careful_with_the_labels:
:param recursive:
:return:
'''
if recursive:
allfiles = []
for root,dirs,files in os.walk(dir):
path = root.split(os.sep)
# print('root {}\ndirs {} '.format(root,dirs))
allfiles = allfiles + [os.path.join(root,f) for f in files if filefilter in f]
# raw_input('ret to cont')
files = allfiles
else:
files = [ os.path.join(dir,f) for f in os.listdir(dir) if filefilter in f]
print(str(len(files))+' files in '+dir)
for file in files:
# fullname = os.path.join(dir,file)
if overwrite:
newname = file
else:
filebase = os.path.basename(file)
basedir = os.path.dirname(file)
# print('file {}\nbase {}\nalone {}'.format(file,basedir,filebase))
if outdir:
Utils.ensure_dir(outdir)
newname = os.path.join(outdir,filebase)
else:
newname = filebase.split(filefilter)[0]+'_'+str(output_size[0])+'x'+str(output_size[1])+filefilter
newname = os.path.join(basedir,newname)
if kill_parens:
newname=newname.replace('(','') #
newname=newname.replace(')','')
print('infile:{}\ndesired size:{}\noutfile {}'.format(file,output_size,newname))
# raw_input('ret to cont')
resize_keep_aspect(file, output_file=newname, output_size = output_size,use_visual_output=use_visual_output,careful_with_the_labels=careful_with_the_labels)
def resize_keep_aspect(input_file_or_np_arr, output_file=None, output_size = (256,256),use_visual_output=False,careful_with_the_labels=False, copy_edge_pixeles=False):
'''
Takes an image name/arr, resize keeping aspect ratio, filling extra areas with edge values
:param input_file_or_np_arr:
:param output_file:name for output
:param output_size:size of output image (height,width)
:param use_visual_output:
:return:
'''
if isinstance(input_file_or_np_arr,basestring):
input_file_or_np_arr = cv2.imread(input_file_or_np_arr)
if input_file_or_np_arr is None:
logging.warning('got a bad image')
return
inheight, inwidth = input_file_or_np_arr.shape[0:2]
if inheight < 2 or inwidth < 2 : #1-pixel wide or high images cant be resized. actually just failed on a 6x7756 image->256x256...
logging.warning('got a zero height or width imge {}'.format(input_file_or_np_arr.shape))
return
outheight, outwidth = output_size[:]
if outheight == 0 or outwidth == 0:
logging.warning('got a zero height or resize request {}'.format(output_size))
return
if inheight==outheight and inwidth==outwidth:
logging.info('resize not needed , in {}x{} out {}x{}'.format(inheight,inwidth,outheight,outwidth))
return input_file_or_np_arr
logging.info('doing resize , input hxw {} {} output hxw {} {}'.format(inheight,inwidth,outheight,outwidth))
if inheight == 0 or inwidth == 0:
logging.warning('got a bad image')
return
out_ar = float(outheight)/outwidth
in_ar = float(inheight)/inwidth
if len(input_file_or_np_arr.shape) == 3:
indepth = input_file_or_np_arr.shape[2]
output_img = np.zeros([outheight,outwidth,indepth],dtype=np.uint8)
else:
indepth = 1
output_img = np.zeros([outheight,outwidth],dtype=np.uint8)
# print('input:{}x{}x{}'.format(inheight,inwidth,indepth))
actual_outheight, actual_outwidth = output_img.shape[0:2]
# print('output:{}x{}'.format(actual_outheight,actual_outwidth))
if out_ar < in_ar: #resize height to output height and fill left/right
factor = float(inheight)/outheight
new_width = int(float(inwidth) / factor)
try:
resized_img = cv2.resize(input_file_or_np_arr, (new_width, outheight))
except:
e = sys.exc_info()[0]
logging.warning('error on resizing {} to {} error:{}'.format(input_file_or_np_arr.shape,output_size,e))
# raw_input('ret to cont')
return
# print('<resize size:'+str(resized_img.shape)+' desired width:'+str(outwidth)+' orig width resized:'+str(new_width))
width_offset = (outwidth - new_width ) / 2
logging.debug('output ar< input ar , width padding around '+str(width_offset)+ ' to '+str(width_offset+new_width))
output_img[:,width_offset:width_offset+new_width] = resized_img[:,:]
for n in range(0,width_offset): #doing this like the below runs into a broadcast problem which could prob be solved by reshaping
# output_img[:,0:width_offset] = resized_img[:,0]
# output_img[:,width_offset+new_width:] = resized_img[:,-1]
if copy_edge_pixeles:
output_img[:,n] = resized_img[:,0]
output_img[:,n+new_width+width_offset] = resized_img[:,-1]
else: #resize width to output width and fill top/bottom
factor = float(inwidth)/outwidth
new_height = int(float(inheight) / factor)
try:
resized_img = cv2.resize(input_file_or_np_arr, (outwidth, new_height))
except:
e = sys.exc_info()[0]
logging.warning('error on resizing {} to {} error:{}'.format(input_file_or_np_arr.shape,output_size,e))
# raw_input('ret to cont')
return
height_offset = (outheight - new_height) / 2
logging.debug('output ar >= input ar , height padding around '+str(height_offset)+' to '+str(height_offset+new_height))
output_img[height_offset:height_offset+new_height,:] = resized_img[:,:]
if copy_edge_pixeles:
output_img[0:height_offset,:] = resized_img[0,:]
output_img[height_offset+new_height:,:] = resized_img[-1,:]
# print('resize size:'+str(resized_img.shape)+' desired height:'+str(outheight)+' orig height resized:'+str(new_height))
# print('orig dims {} resized to {}'.format(input_file_or_np_arr.shape,output_img.shape))
if careful_with_the_labels:
#kill any extranneous labels that have popped up
# print('uniques in source:'+str(np.unique(input_file_or_np_arr)))
# print('uniques in dest:'+str(np.unique(output_img)))
for u in np.unique(output_img):
if not u in input_file_or_np_arr: #
# print('found new val in target:'+str(u))
output_img[output_img==u] = 0
# print('uniques in dest:'+str(np.unique(output_img)))
# assert((np.unique(output_img)==np.unique(input_file_or_np_arr)).all()) this fails , bool attr has no all()
if use_visual_output is True:
cv2.imshow('output', output_img)
cv2.imshow('orig',input_file_or_np_arr)
# cv2.imshow('res',resized_img)
cv2.waitKey(0)
if output_file is not None:
cv2.imwrite(output_file, output_img)
return output_img
#dst = cv2.inpaint(img,mask,3,cv2.INPAINT_TELEA)
def resize_by_adding_border(img_arr,output_size,visual_output=False,copy_edge_pixels=False):
img_arr = get_cv2_img_array(img_arr)
if img_arr.shape[0]>output_size[0] or img_arr.shape[1]>output_size[1]:
logging.warning('image to be bordered larger than requested size')
img_arr = resize_keep_aspect(img_arr,output_size=output_size)
return img_arr #this is not really what was asked for but it keeps the peace
border_sizes = (output_size[0] - img_arr.shape[0],output_size[1] - img_arr.shape[1])
new_image = np.zeros([output_size[0],output_size[1],3],dtype=np.uint8)
top = border_sizes[0]/2
bottom = top + img_arr.shape[0]
left = border_sizes[1]/2
right = left + img_arr.shape[1]
new_image[top:bottom,left:right] = img_arr
logging.debug('top {} bot {} d {} l {} r {} d {} imgarr {} '.format(top,bottom,bottom-top,left,right,right-left,img_arr.shape))
if visual_output:
cv2.imshow('resized from {} to {}'.format(img_arr.shape,new_image.shape),new_image)
cv2.imshow('orig',img_arr)
cv2.waitKey(0)
return(new_image)
def undo_resize_keep_aspect(input_file_or_np_arr, output_file=None, output_size = (256,256),use_visual_output=False,careful_with_the_labels=False):
'''
Takes an image name/arr, resize keeping aspect ratio, filling extra areas with edge values
:param input_file_or_np_arr:
:param output_file:name for output
:param output_size:size of output image (height,width)
:param use_visual_output:
:return:
'''
if isinstance(input_file_or_np_arr,basestring):
input_file_or_np_arr = cv2.imread(input_file_or_np_arr)
if input_file_or_np_arr is None:
logging.warning('got a bad image')
return
#the shoe is on the other foot.
inheight, inwidth = output_size[:]
outheight, outwidth = input_file_or_np_arr.shape[0:2]
logging.info('undoing resize , original hxw {} {} resized hxw {} {}'.format(inheight,inwidth,outheight,outwidth))
if (inheight == 0) or (inwidth == 0):
logging.warning('got a bad image')
return
original_ar = float(inheight)/inwidth
resized_ar = float(outheight)/outwidth
if len(input_file_or_np_arr.shape) == 3:
indepth = input_file_or_np_arr.shape[2]
output_img = np.ones([outheight,outwidth,indepth],dtype=np.uint8)
else:
indepth = 1
output_img = np.ones([outheight,outwidth],dtype=np.uint8)
# print('input:{}x{}x{}'.format(inheight,inwidth,indepth))
actual_outheight, actual_outwidth = output_img.shape[0:2]
# print('output:{}x{}'.format(actual_outheight,actual_outwidth))
if original_ar > resized_ar: #unfil left/right and resize height to output height
factor = float(inheight)/outheight
new_width = int(float(inwidth) / factor)
width_offset = (outwidth - new_width ) / 2
remainder = outwidth - width_offset
logging.debug('orig ar> resized ar , width padding '+str(width_offset)+', taking from padding to '+str(remainder))
output_img = input_file_or_np_arr[:,width_offset:remainder]
# output_img[:,width_offset:width_offset+new_width] = resized_img[:,:]
output_img = cv2.resize(output_img, (output_size[1],output_size[0])) #cv2 does wxh not hxw
#print('>resize size:'+str(output_img.shape))
else: #resize width to output width and fill top/bottom
factor = float(inwidth)/outwidth
new_height = int(float(inheight) / factor)
height_offset = (outheight - new_height) / 2
remainder = outheight - height_offset
logging.debug('orig ar <= resized ar , height padding '+str(height_offset)+ ',filling to '+str(remainder)+' outsize:'+str(output_size))
output_img = input_file_or_np_arr[height_offset:remainder,:]
#print('intermediate outputsize:'+str(output_img.shape))
output_img = cv2.resize(output_img, (output_size[1],output_size[0])) #cv2 does wxh not hxw
logging.debug('resize size:'+str(output_img.shape))
# print('orig dims {} resized to {}'.format(input_file_or_np_arr.shape,output_img.shape))
if careful_with_the_labels:
#todo - the real way to do this is break into n channels and resize each individually
#this may possibly be done by putting a loop over channels and calling this function recursively n_chan times
#kill any extranneous labels that have popped up
# print('uniques in source:'+str(np.unique(input_file_or_np_arr)))
# print('uniques in dest:'+str(np.unique(output_img)))
for u in np.unique(output_img):
if not u in input_file_or_np_arr: #
# print('found new val in target:'+str(u))
output_img[output_img==u] = 0
# print('uniques in dest:'+str(np.unique(output_img)))
# assert((np.unique(output_img)==np.unique(input_file_or_np_arr)).all()) this fails , bool attr has no all()
if use_visual_output is True:
cv2.imshow('output', output_img)
cv2.imshow('orig',input_file_or_np_arr)
# cv2.imshow('res',resized_img)
cv2.waitKey(0)
if output_file is not None:
cv2.imwrite(output_file, output_img)
return output_img
#dst = cv2.inpaint(img,mask,3,cv2.INPAINT_TELEA)
def mask_to_rects(mask, visual_output=False, labels=constants.ultimate_21):
'''
given mask (eg from pixel level, not binary but several discrete values),
find boudning boxes for 'reasonably large' blobs, maybe return just one per mask value ?
:param mask:
:return:
'''
uniques = np.unique(mask)
if visual_output:
show_mask_with_labels(mask,labels=labels,visual_output=True)
bbs={}
for u in uniques :
if u == 0 :
continue #not intstd in bgnd
if 'skin' in labels:
if u==labels.index('skin'):
continue #not intstd in skin
img = np.array((mask==u)*255,dtype=np.uint8)
if len(img.shape)==3:
print('got multichan image , taking 1st')
img = img[:,:,0] #take first channel;
n_pixels = np.shape(np.where(img!=0))[1] #length of 1xn vector
print('size of mask=={} is {} (shape {})'.format(u,n_pixels,np.shape(np.where(img!=0))[1]))
if 0:
# thismask = img*255
# show_mask_with_labels(thismask,labels=constants.ultimate_21,visual_output=True)
cv2.imshow("mask=={}".format(u), img)
cv2.waitKey(0)
# contours = cv2.findContours(img,cv2.RETR_TREE,cv2.CHAIN_APPROX_SIMPLE,contours)
# contours = cv2.findContours(im2, cv2.RETR_LIST, cv2.CHAIN_APPROX_SIMPLE)
im2, contours, hierarchy = cv2.findContours(img.copy(),cv2.RETR_TREE,cv2.CHAIN_APPROX_SIMPLE) #igc.copy() required here , this seems like it must be a cv2 bug
# print('contours for {}: {}'.format(u,conts))
# cv2.drawContours(img*255,contours,-1,0,01)
# cv2.drawContours(im2,contours,-1,(100,255,100),5)
# cv2.imshow('contours',im2)
# cv2.waitKey(0)
print('n contours:'+str(len(contours)))
min_area_size = 1000 #this is kind of arbitrary , trying to keep out small blobs
n_contour = 0
im3 = np.zeros_like(img)
max_area = 0
next_area = 0
n_max = -1
n_next = -1
for cnt in contours:
cnt_len = cv2.arcLength(cnt, True)
cnt = cv2.approxPolyDP(cnt, 0.02*cnt_len, True)
area = cv2.contourArea(cnt)
if area > max_area: #instead of just keeping max one could also try to bound all contours
next_area=max_area
n_next = n_max
max_area=area
n_max = n_contour
if area > min_area_size : #and cv2.isContourConvex(cnt):
print('contour area of contour {} is {}'.format(n_contour,area))
if visual_output:
cv2.drawContours(im3,contours,n_contour,(50,255,50),2)
# cv2.imshow('current contour',im3)
n_contour+=1
if visual_output:
cv2.imshow('big contours',im3)
cv2.waitKey(0)
cv2.drawContours(im3,contours,n_max,(244,100,150),5)
x,y,w,h = cv2.boundingRect(contours[n_max])
# x,y,w,h=None,None,None,None
if visual_output :
cv2.rectangle(im3,(x,y),(x+w,y+h),(255,255,0),2)
cv2.imshow('the biggest contour(s)',im3)
cv2.waitKey(0)
print('contour {} is biggest at len {}, {} is second at {}'.format(n_max,max_area,n_next,next_area))
if max_area>min_area_size:
bbs[labels[u]] = [x,y,w,h]
return(bbs)
def resize_and_crop_maintain_bb( input_file_or_np_arr, output_file=None, output_width = 150, output_height = 200,use_visual_output=False,bb=None):
'''Takes an image name, resize it and crop the center square
'''
#TODO - implement nonsquare crop
#done
#TODO - implement non-square resize up to maximum deformation e.g. 10% xscale=2 yscale=2.2
if isinstance(input_file_or_np_arr,basestring):
print('got image name '+str(input_file_or_np_arr))
if bb is None:
if 'bbox_' in input_file_or_np_arr:
strs = input_file_or_np_arr.split('bbox_')
bb_str = strs[1]
coords = bb_str.split('_')
bb_x = int(coords[0])
bb_y = int(coords[1])
bb_w = int(coords[2])
bb_h = coords[3].split('.')[0] #this has .jpg or .bmp at the end
bb_h = int(bb_h)
bb=[bb_x,bb_y,bb_w,bb_h]
if bb_h == 0:
logging.warning('bad height encountered in imutils.resize_and_crop_image for '+str(input_file_or_np_arr))
return None
if bb_w == 0:
logging.warning('bad width encountered in imutils.resize_and_crop_image for '+str(input_file_or_np_arr))
return None
input_file_or_np_arr_name = input_file_or_np_arr
input_file_or_np_arr = cv2.imread(input_file_or_np_arr)
if input_file_or_np_arr is None:
logging.warning('input file {} is none'.format(input_file_or_np_arr_name))
return None
img_height, img_width, img_depth = input_file_or_np_arr.shape
if bb is None:
bb = [0,0, img_width,img_height]
print('no bbox given, using entire image')
print('bb (x,y,w,h) {} {} {} {} image:{}x{} desired:{}x{}'.format(bb[0],bb[1],bb[2],bb[3],img_width,img_height,output_width,output_height))
if bb[0]<0:
logging.warning('BB x out of bounds, being reset')
bb[0]=0
if bb[1]<0 :
bb[1]=0
logging.warning('BB y out of bounds, being reset')
if bb[0]+bb[2] > img_width:
logging.warning('BB width out of bounds, being reset')
bb[2]=img_width-bb[0]
if bb[1]+bb[3] > img_height:
logging.warning('BB height out of bounds, being reset')
bb[3]=img_height - bb[1]
orig_bb = copy.deepcopy(bb)
in_aspect = float(img_width)/img_height
out_aspect = float(output_width)/output_height
width_out_in_ratio = float(output_width)/img_width
height_out_in_ratio = float(output_height)/img_height
if width_out_in_ratio > height_out_in_ratio: #rescale by smallest amt possible
# if abs(1-width_out_in_ratio) < abs(1-height_out_in_ratio): #rescale by smallest amt possible
# if output_width > output_height: #rescale by smallest amt possible
#this may be wrong when width_input>1 and height_inout<1 or vice versa
new_width = int(width_out_in_ratio*img_width) #should be output_width. try round instead of int, didnt work
new_height = int(width_out_in_ratio*img_height) #may besomething other than output_height
bb = np.multiply(bb,width_out_in_ratio)
bb = [int(i) for i in bb]
print('match width, new w,h:{},{} new bb {},{},{},{}'.format(new_width,new_height,bb[0],bb[1],bb[2],bb[3]))
scaled_img = cv2.resize(input_file_or_np_arr,(new_width,new_height))
y1 = bb[1]
y2 = bb[1] + bb[3]
height_to_crop = new_height - output_height
output_extra_margin_over_bb = int(float(new_height-output_height )/2)
ymin = y1 - output_extra_margin_over_bb
print('tentative ymin '+str(ymin)+' extra margin '+str(output_extra_margin_over_bb))
if ymin<0:
ymin = 0
# ymax = bb[3]
ymax = output_height
else:
ymax = y2 + output_extra_margin_over_bb
if ymax>new_height:
ymax = new_height
# ymin = ymax - bb[3]
ymin = new_height-output_height
print('new ymin,ymax:{},{}'.format(ymin,ymax))
cropped_img = scaled_img[ymin:ymax,0:output_width,:] #crop image
bb[1] = bb[1]-ymin
else: #matching output height, width should be more than desired
new_width = int(height_out_in_ratio*img_width) #maybe other
new_height = int(height_out_in_ratio*img_height) #should be output_height
bb = np.multiply(bb,height_out_in_ratio)
bb = [int(i) for i in bb]
print('match height, new w,h:{},{} new bb {},{},{},{}'.format(new_width,new_height,bb[0],bb[1],bb[2],bb[3]))
scaled_img = cv2.resize(input_file_or_np_arr,(new_width,new_height))
x1 = bb[0]
x2 = bb[0] + bb[2]
width_to_crop = new_width - output_width
output_extra_margin_over_bb = int(float(new_width-output_width)/2)
bb_center_x
xmin = x1 - output_extra_margin_over_bb
print('tentative xmin '+str(xmin)+' extra margin '+str(output_extra_margin_over_bb))
if xmin<0:
xmin = 0
# xmax = bb[2]
xmax = output_width
else:
xmax = x2 + output_extra_margin_over_bb
if xmax>new_width:
xmax = new_width
xmin = new_width-output_width
print('new xmin,xmax:{},{}'.format(xmin,xmax))
cropped_img = scaled_img[0:output_height,xmin:xmax,:]
bb[0] = bb[0]-xmin
raw_input('enter to continue')
if use_visual_output is True:
cropped_copy = copy.deepcopy(cropped_img)
cv2.rectangle(cropped_copy,(bb[0],bb[1]),(bb[0]+bb[2],bb[1]+bb[3]),color=[0,255,0 ])
cv2.imshow('scaled_cropped', cropped_copy)
orig_copy = copy.deepcopy(input_file_or_np_arr)
cv2.rectangle(orig_copy,(orig_bb[0],orig_bb[1]),(orig_bb[0]+orig_bb[2],orig_bb[1]+orig_bb[3]),color=[0,255,0 ])
cv2.imshow('orig',orig_copy)
cv2.waitKey(0)
if output_file is None:
if input_file_or_np_arr_name:
output_file = orig_copy
print('writing to:'+output_file)
retval = cv2.imwrite(output_file, cropped_img)
if retval is False:
logging.warning('retval from imwrite is false (attempt to write file:'+output_file+' has failed :( )')
return cropped_img
def resize_and_crop_image_using_bb( input_file_or_np_arr, bb=None, output_file=None, output_w = 128,output_h = 128,use_visual_output=False):
'''Takes an image name, resize it and crop the bb area, keeping as much of orig as possible
'''
#TODO - implement nonsquare crop
# done
if isinstance(input_file_or_np_arr,basestring):
orig_name = input_file_or_np_arr
input_file_or_np_arr = cv2.imread(input_file_or_np_arr)
if input_file_or_np_arr is None:
logging.debug('trouble reading input file {}'.format(orig_name))
return
if 'bbox_' in orig_name and bb is None:
strs = orig_name.split('bbox_')
bb_str = strs[1]
coords = bb_str.split('_')
bb_x = int(coords[0])
bb_y = int(coords[1])
bb_w = int(coords[2])
bb_h = coords[3].split('.')[0] #this has .jpg or .bmp at the end
bb_h = int(bb_h)
bb=[bb_x,bb_y,bb_w,bb_h]
print('bb:'+str(bb))
if bb_h == 0:
logging.warning('bad height encountered in imutils.resize_and_crop_image for '+str(input_file_or_np_arr))
return None
if bb_w == 0:
logging.warning('bad width encountered in imutils.resize_and_crop_image for '+str(input_file_or_np_arr))
return None
height, width, depth = input_file_or_np_arr.shape
if bb is None:
bb = [0,0, width,height]
print('no bbox given, using entire image')
in_aspect = float(bb[2])/bb[3]
out_aspect = float(output_w)/output_h
x1 = bb[0]
x2 = bb[0] + bb[2]
y1 = bb[1]
y2 = bb[1] + bb[3]
if in_aspect>out_aspect:
extra_pad_y = int((output_h*bb[2]/output_w - bb[3]) / 2)
round = (output_h*bb[2]/output_w - bb[3]) % 2
y1 = max(0,bb[1] - extra_pad_y)
y2 = min(height,bb[1]+bb[3]+extra_pad_y+round)
#print('pad y {} y1 {} y2 {}'.format(extra_pad_y,y1,y2))
elif in_aspect<out_aspect:
extra_pad_x = int((output_w*bb[3]/output_h - bb[2]) / 2)
round = (output_w*bb[3]/output_h - bb[2]) % 2
x1 = max(0,bb[0] - extra_pad_x)
x2 = min(width,bb[0]+bb[2]+extra_pad_x+round)
#print('pad x {} x1 {} x2 {}'.format(extra_pad_x,x1,x2))
#print('x1 {} x2 {} y1 {} y2 {}'.format(x1,x2,y1,y2))
cropped_img = input_file_or_np_arr[y1:y2,x1:x2,:]
logging.debug('orig size {}x{} cropped to:{}x{},ar={} desired ar={}'.format(input_file_or_np_arr.shape[0],input_file_or_np_arr.shape[1],cropped_img.shape[0],cropped_img.shape[1],float(cropped_img.shape[1])/cropped_img.shape[0],float(output_w)/output_h))
scaled_cropped_img = cv2.resize(cropped_img,(output_w,output_h))
# print('resized to : {}x{}, ar={}, desired ar={}'.format(scaled_cropped_img.shape[0],scaled_cropped_img.shape[1],float(scaled_cropped_img.shape[1])/scaled_cropped_img.shape[0],float(output_w/output_h)))
if use_visual_output is True:
cv2.imshow('scaled_cropped', scaled_cropped_img)
scaled_input = cv2.resize(input_file_or_np_arr,(output_w,output_h))
cv2.imshow('orig',scaled_input)
cv2.waitKey(0)
if output_file is not None:
# orig_dir = os.path.dirname(orig_name)
# orig_name_only = os.path.basename(orig_name)
# output_file = os.path.join(orig_dir,output_dir)
print('writing to:'+output_file)
retval = cv2.imwrite(output_file, scaled_cropped_img)
if retval is False:
logging.warning('retval from imwrite is false (attempt to write file:'+output_file+' has failed :( )')
return scaled_cropped_img
def center_crop(input_file_or_np_arr,cropsize):
img_arr = Utils.get_cv2_img_array(input_file_or_np_arr)
if img_arr is None:
print('couldnt get img arr in imutils.center_crop')
return
h,w = img_arr.shape[0:2]
if cropsize[0]>h or cropsize[1]>w:
print('cropsize {} > imagesize {}'.format(cropsize,img_arr.shape))
return
h_margin = (h-cropsize[0])/2
w_margin = (w-cropsize[1])/2
out_arr = img_arr[h_margin:h_margin+cropsize[0],w_margin:w_margin+cropsize[1]] #takes care of odd h-crop[0]
return out_arr
def resize_and_crop_maintain_aspect(img_arr_or_url,resize_size,crop_size):
resized = resize_keep_aspect(img_arr_or_url,output_size=resize_size)
cropped = center_crop(resized,crop_size)
return cropped
def crop_files_in_dir(dirname,save_dir,**arglist):
'''
takes a function that has a filename as first arg and maps it onto files in dirname
:param func: function to map
:param dirname: dir of files to do function on
:param arglist: args to func
:return:
'''
Utils.ensure_dir(save_dir)
logging.debug('cropping files in directory {} with arguments {}'.format(dirname,str(arglist)))
only_files = [f for f in os.listdir(dirname) if os.path.isfile(os.path.join(dirname, f))]
for a_file in only_files:
input_path = os.path.join(dirname,a_file)
output_path = os.path.join(save_dir,a_file)
arglist['output_file']=output_path
resize_and_crop_image_using_bb(input_path,**arglist)
def crop_files_in_dir_of_dirs(dir_of_dirs,**arglist):
'''
takes a function that has a filename as first arg and maps it onto files in directory of directories
:param func: function to map
:param dir_of_dirs: dir of dirs to do function on
:param arglist: args to func
:return:
'''
logging.debug('cropping files in directories under directory {} with arguments {}'.format(dir_of_dirs,str(arglist)))
only_dirs = [dir for dir in os.listdir(dir_of_dirs) if os.path.isdir(os.path.join(dir_of_dirs,dir))]
num_cores = multiprocessing.cpu_count()
fullpaths = []
save_dirs = []
for a_dir in only_dirs:
fullpath = os.path.join(dir_of_dirs,a_dir)
save_dir = os.path.join(dir_of_dirs,'cropped/')
save_dir = os.path.join(save_dir,a_dir)
Utils.ensure_dir(save_dir)
fullpaths.append(fullpath)
save_dirs.append(save_dir)
crop_files_in_dir(fullpath,save_dir,**arglist)
# this will work if i can find how to do [x,y for x in a for y in b] 'zip' style
# Parallel(n_jobs=num_cores)(delayed(crop_files_in_dir)(the_dir,the_path) for the_dir, the_path in [fullpaths,save_dirs])
def kill_the_missing(sourcedir, targetdir):
'''
this removes anything not in the source , from the target
:param sourcedir: has files removed relative to target
:param targetdir: has extra files, we want to remove the extras it has relative to source
:return:
'''
files_in_source = [f for f in os.listdir(sourcedir) if os.path.isfile(os.path.join(sourcedir,f))]
files_in_target = [f for f in os.listdir(targetdir) if os.path.isfile(os.path.join(targetdir,f))]
print('{} files in {}, {} files in {}'.format(len(files_in_source),sourcedir,len(files_in_target),targetdir))
kill_dir = os.path.join(targetdir,'removed')
Utils.ensure_dir(kill_dir)
n_matched = 0
n_killed = 0
for a_file in files_in_target:
if a_file in files_in_source:
print('file {} in both dirs'.format(a_file))
n_matched += 1
else:
print('file {} not matched, moving to {}'.format(a_file,kill_dir))
shutil.move(os.path.join(targetdir,a_file), os.path.join(kill_dir,a_file))
n_killed += 1
print('n matched {} n killed {}'.format(n_matched,n_killed))
files_in_source = [f for f in os.listdir(sourcedir) if os.path.isfile(os.path.join(sourcedir,f))]
files_in_target = [f for f in os.listdir(targetdir) if os.path.isfile(os.path.join(targetdir,f))]
print('{} files in {}, {} files in {}'.format(len(files_in_source),sourcedir,len(files_in_target),targetdir))
def find_the_common(sourcedir, targetdir):
'''
this removes anything not in the source , from the target
:param sourcedir: has files removed relative to target
:param targetdir: has extra files, we want to remove the extras it has relative to source
:return:
'''
files_in_source = [f for f in os.listdir(sourcedir) if os.path.isfile(os.path.join(sourcedir,f))]
files_in_target = [f for f in os.listdir(targetdir) if os.path.isfile(os.path.join(targetdir,f))]
print('{} files in {}, {} files in {}'.format(len(files_in_source),sourcedir,len(files_in_target),targetdir))
n_matched = 0
n_not_matched = 0
for a_file in files_in_target:
if a_file in files_in_source:
print('file {} in both dirs'.format(a_file))
n_matched += 1
else:
print('file {} not matched'.format(a_file))
n_not_matched += 1
print('n matched {} n not matched {}'.format(n_matched,n_not_matched))
files_in_source = [f for f in os.listdir(sourcedir) if os.path.isfile(os.path.join(sourcedir,f))]
files_in_target = [f for f in os.listdir(targetdir) if os.path.isfile(os.path.join(targetdir,f))]
def oversegment(img_arr):
image_height,image_width,image_channels = img_arr.shape
num_superpixels = 100
num_levels = 20
cv2.SuperpixelSEEDS.createSuperpixelSEEDS(image_width, image_height, image_channels, num_superpixels, num_levels, use_prior = 2, histogram_bins=5, double_step = False)
def defenestrate_labels(mask,kplist):
matches = np.ones_like(mask)
for i in range(0,len(kplist)):
index = kplist[i]
nv = np.multiply(mask == index,i)
print(nv.shape)
matches = np.add(matches,nv)
return matches
def defenestrate_directory(indir, outdir, filter='.png', keep_these_cats=[1,55,56,57], labels=constants.fashionista_categories_augmented):
masklist = [f for f in os.listdir(indir) if filter in f]
# print('masks:'+str(masklist))
# labels = constants.pascal_context_labels
final_labels = ['','null','hair','skin','face']
final_labels = [labels[ind] for ind in keep_these_cats]
final_labels[:0] = [''] #prepend
print('final labels:'+str(final_labels))
for mask in masklist:
fullname = os.path.join(indir,mask)
print('name:'+mask+' full:'+fullname)
# show_mask_with_labels(fullname,labels)
mask_img = cv2.imread(fullname)
if len(mask_img.shape)==3:
print('fixing multichan mask')
mask_img = mask_img[:,:,0]
new_mask = defenestrate_labels(mask_img,keep_these_cats)
outname = os.path.join(outdir,mask)
cv2.imwrite(outname,new_mask)
print('outname:'+outname+', uniques '+str(np.unique(new_mask)))
# show_mask_with_labels(outname,final_labels)
def concatenate_labels(mask,kplist):
matches = np.ones_like(mask)
first = kplist[0]
for i in range(0,len(kplist)):
index = kplist[i]
nv = np.multiply(mask == index,first)
print(nv.shape)
matches = np.add(matches,nv)
return matches
def resize_and_crop_maintain_bb_on_dir(dir, output_width = 150, output_height = 200,use_visual_output=True):
only_files = [f for f in os.listdir(dir) if os.path.isfile(os.path.join(dir,f))]
print('doing resize/crop in dir '+dir)
# print(only_files)
for a_file in only_files:
print('file '+a_file)
fullfile = os.path.join(dir,a_file)
retval = resize_and_crop_maintain_bb(fullfile, output_width = 150, output_height = 200,use_visual_output=True,bb=None)
def show_mask_with_labels_dir(dir,labels,filter=None,original_images_dir=None,original_images_dir_alt=None,cut_the_crap=False,save_images=False,visual_output=False,webtool=False):
'''
:param dir:
:param filter: take only images with this substring in name
:param labels: list of test labels for categories
:param original_images_dir: dir of image (not labels)
:param original_images_dir_alt: alternate dir of images (to deal with test/train directories)
:param cut_the_crap: sort images to keepers and tossers
:return:
'''
if filter:
print('using filter:'+filter)
files = [f for f in os.listdir(dir) if filter in f]
else:
files = [f for f in os.listdir(dir) ]
print(str(len(files))+ ' files to process in '+dir)
fullpaths = [os.path.join(dir,f) for f in files]
totfrac = 0
fraclist=[]
n=0
if original_images_dir:
original_images = ['.'.join(f.split('.')[:-1])+'.jpg' for f in files]
# if webtool:
# original_images = [f.replace('_pixv2','').replace('_webtool','') for f in files]
# original_images = [f.split('.')[-2]+'.jpg' for f in files]
original_fullpaths = [os.path.join(original_images_dir,f) for f in original_images]
if original_images_dir_alt:
original_altfullpaths = [os.path.join(original_images_dir_alt,f) for f in original_images]
for x in range(0,len(files)):
if os.path.exists(original_fullpaths[x]):
show_mask_with_labels(fullpaths[x],labels,original_image=original_fullpaths[x],cut_the_crap=cut_the_crap,save_images=save_images,visual_output=visual_output)
# if frac is not None:
# fraclist.append(frac)
# totfrac = totfrac + frac
# n=n+1
elif original_images_dir_alt and os.path.exists(original_altfullpaths[x]):
show_mask_with_labels(fullpaths[x],labels,original_image=original_altfullpaths[x],cut_the_crap=cut_the_crap,save_images=save_images,visual_output=visual_output)
# if frac is not None:
# fraclist.append(frac)
## totfrac = totfrac + frac
# n=n+1
else:
logging.warning(' does not exist:'+original_fullpaths[x])
continue
else:
for f in fullpaths:
show_mask_with_labels(f,labels,cut_the_crap=cut_the_crap,save_images=save_images,visual_output=visual_output)
# if frac is not None:
# fraclist.append(frac)
# totfrac = totfrac + frac
# n=n+1
# print('avg frac of image w nonzero pixels:'+str(totfrac/n))
hist, bins = np.histogram(fraclist, bins=30)
width = 0.7 * (bins[1] - bins[0])
center = (bins[:-1] + bins[1:]) / 2
plt.bar(center, hist, align='center', width=width,label='nonzero pixelcount')
plt.show()
plt.legend()
plt.savefig('outhist.jpg')
# print('fraction histogram:'+str(np.histogram(fraclist,bins=20)))
def show_mask_with_labels(mask_filename_or_img_array,labels,original_image=None,cut_the_crap=False,save_images=False,visual_output=False,resize=None,mask2=None,overlay=None,savename=None):
'''
split this into one function that takes mask and gives img with labels possibly with overlay, returns arr
and another func that takes 2 images and puts side by side
todo : use overlay cv2.addWeighted(overlay, alpha, output, 1 - alpha,
0, output)
http://www.pyimagesearch.com/2016/03/07/transparent-overlays-with-opencv/
'''
colormap = cv2.COLORMAP_JET
if isinstance(mask_filename_or_img_array, basestring):
img_arr = Utils.get_cv2_img_array(mask_filename_or_img_array,cv2.IMREAD_GRAYSCALE)
mask_filename=mask_filename_or_img_array
elif type(mask_filename_or_img_array) == np.ndarray:
img_arr = mask_filename_or_img_array
mask_filename='./output.jpg'
if original_image is not None and isinstance(original_image,basestring):
mask_filename = original_image
else:
logging.warning('got something other than a filename (string) or img array')
return
if img_arr is None:
logging.warning('img_arr is None')
return
logging.debug('img size:'+str(img_arr.shape))
if len(img_arr.shape) != 2:
logging.warning('got a multichannel image, using chan 0')
img_arr = img_arr[:,:,0]
histo = np.histogram(img_arr,bins=len(labels)-1)
# print('hist'+str(histo[0])) #
h,w = img_arr.shape[0:2]
n_nonzero = np.count_nonzero(img_arr)
n_tot = h*w
frac = float(n_nonzero)/n_tot
uniques = np.unique(img_arr)
logging.debug('show_mask_with_labels:number of unique mask values:'+str(len(uniques))+' frac nonzero:'+str(frac) +' hxw:'+str(h)+','+str(w))
if len(uniques)>len(labels):
logging.warning('number of unique mask values {} > number of labels {}!!!'.format(len(uniques),len(labels)))
return
# minVal, maxVal, minLoc, maxLoc = cv2.minMaxLoc(img_array)
maxVal = len(labels)
max_huelevel = 160.0
satlevel = 255
vallevel = 255
scaled = np.uint8(np.multiply(img_arr, max_huelevel / maxVal))
# dest = cv2.applyColorMap(scaled,colormap)
dest = np.zeros([h,w,3])
dest[:,:,0] = scaled #hue
dest[:,:,1] = satlevel #saturation
dest[:,:,2] = vallevel #value
# print('type:'+str(type(dest)))
dest = dest.astype(np.uint8)
dest = cv2.cvtColor(dest,cv2.COLOR_HSV2BGR)
bar_height = int(float(h)/len(uniques))
bar_width = 170
colorbar = np.zeros([h,bar_width])
i = 0
logging.debug('len labels:'+str(len(labels)))
logging.debug('unique label val:'+str(uniques))
for unique in uniques:
if unique > len(labels):
logging.warning('pixel value '+str(unique)+' out of label range (1)')
continue
colorbar[i*bar_height:i*bar_height+bar_height,:] = unique
# cv2.putText(colorbar,labels[unique],(5,i*bar_height+bar_height/2-10),cv2.FONT_HERSHEY_PLAIN,1,[i*255/len(uniques),i*255/len(uniques),100],thickness=2)
# cv2.putText(colorbar,labels[unique],(5,i*bar_height+bar_height/2-5),cv2.FONT_HERSHEY_PLAIN,1,[0,10,50],thickness=2)
i=i+1
scaled_colorbar = np.uint8(np.multiply(colorbar, max_huelevel / maxVal))
h_colorbar,w_colorbar = scaled_colorbar.shape[0:2]
dest_colorbar = np.zeros([h_colorbar,w_colorbar,3])
dest_colorbar[:,:,0] = scaled_colorbar #hue
dest_colorbar[:,:,1] = satlevel #saturation
dest_colorbar[:,:,2] = vallevel #value
dest_colorbar = dest_colorbar.astype(np.uint8)
dest_colorbar = cv2.cvtColor(dest_colorbar,cv2.COLOR_HSV2BGR)
# print('size of colrbar:'+str(dest_colorbar.shape))
#have to do labels here to get black
i = 0
totpixels = h*w
for unique in uniques:
if unique >= len(labels):
logging.warning('pixel value '+str(unique)+' out of label range (2)')
continue
pixelcount = len(img_arr[img_arr==unique])
try:
logging.debug('unique:'+str(unique)+':'+labels[unique]+' pixcount:'+str(pixelcount)+' fraction'+str(float(pixelcount)/totpixels))
frac_string='{:.4f}'.format(float(pixelcount)/totpixels)
text_string = str(unique)+' '+labels[unique]+' '+str(frac_string)
cv2.putText(dest_colorbar,text_string,(5,int(i*bar_height+float(bar_height)/2+5)),cv2.FONT_HERSHEY_PLAIN,0.7,[0,10,50],thickness=1)
except:
# logging.warning('some problem in labelling')
print("Unexpected error:"+ str(sys.exc_info()[0]))
print('index {} len labels {}'.format(unique,len(labels)))
i=i+1 #
#dest_colorbar = cv2.applyColorMap(scaled_colorbar, colormap)
combined = np.zeros([h,w+w_colorbar,3],dtype=np.uint8)
if mask2:
combined = np.zeros([h,w+w_colorbar,3],dtype=np.uint8)
mask2_arr = Utils.get_cv2_img_array(mask2,cv2.IMREAD_GRAYSCALE)
combined[:,0:w_colorbar]=dest_colorbar
combined[:,w_colorbar:w_colorbar+w]=dest
if original_image is not None:
orig_arr = Utils.get_cv2_img_array(original_image)
# orig_arr = cv2.imread(original_image)
if orig_arr is not None:
height, width = orig_arr.shape[:2]
logging.debug('show_mask_with_labels:got original image:'+str(original_image)+' shape:'+str(orig_arr.shape))
maxheight=600
minheight=300
desired_height=500
if resize: # or height < minheight:
# if (1): # or height < minheight:
desired_height=resize[0]
logging.debug('(hxw {}x{}) resizing to {} by '.format(height,width,desired_height))
# newheight=(height>maxheight)*maxheight #+(height<minheight)*minheight
newheight=desired_height
factor = float(newheight)/height
orig_arr = cv2.resize(orig_arr,(int(round(width*factor)),int(round(height*factor))))
# print('factor {} newsize {}'.format(factor,orig_arr.shape) )
colorbar_h,colorbar_w = dest_colorbar.shape[0:2]
factor = float(newheight)/colorbar_h
dest_colorbar = cv2.resize(dest_colorbar,(int(round(colorbar_w*factor)),int(round(colorbar_h*factor))))
# print('cbarfactor {} newsize {}'.format(factor,dest_colorbar.shape) )
dest_h,dest_w = dest.shape[0:2]
factor = float(newheight)/dest_h
dest = cv2.resize(dest,(int(round(dest_w*factor)),int(round(dest_h*factor))))
# print('maskfactor {} newsize {}'.format(factor,dest.shape) )
# cv2.imshow('original',orig_arr)
elif height != h or width != w:
orig_arr = resize_keep_aspect(orig_arr,output_size=(h,w))
logging.debug('size mismach bet. orig and mask - orig {}x{} mask {}x{}'.format(height,width,h,w))
colorbar_h,colorbar_w = dest_colorbar.shape[0:2]
# logging.debug('dest colorbar w {} h {} shape {}'.format(colorbar_w,colorbar_h,dest_colorbar.shape))
dest_h,dest_w = dest.shape[0:2]
# logging.debug('dest w {} h {} shape {}'.format(dest_w,dest_h,dest.shape))
orig_h,orig_w = orig_arr.shape[0:2]
logging.debug('orig w {} h {} dest {}x{}'.format(orig_w,orig_h,dest_w,dest_h))
# print('colobar size {} masksize {} imsize {}'.format(dest_colorbar.shape,dest.shape,orig_arr.shape))
combined = np.zeros([dest_h,dest_w+orig_w+colorbar_w,3],dtype=np.uint8)
logging.debug('show_mask_with_labels:combined shape:'+str(combined.shape))
combined[:,0:colorbar_w]=dest_colorbar
combined[:,colorbar_w:colorbar_w+dest_w]=dest
if overlay:
logging.debug('show_mask_with_labels:doing overlay')
orig_arr = cv2.addWeighted(orig_arr, overlay, img_arr, 1 - overlay,0)
combined[:,colorbar_w+dest_w:]=orig_arr
#ValueError: could not broadcast input array from shape (572,940,3) into shape (256,940,3)
combined_h,combined_w = combined.shape[0:2]
logging.debug('show_mask_with_labels:comb w {} h {} shape {}'.format(combined_w,combined_h,combined.shape))
# if combined_h<minheight:
# factor = float(minheight)/combined_h
# combined = cv2.resize(combined,(int(round(combined_w*factor)),minheight))
else:
logging.warning('show_mask_with_labels could not get image '+original_image)
# cv2.imshow('map',dest)
# cv2.imshow('colorbar',dest_colorbar)
relative_name = os.path.basename(mask_filename)
if visual_output:
cv2.imshow(relative_name,combined)
k = cv2.waitKey(0)
if save_images:
if savename is None:
savename = mask_filename[:-4]+'_legend.jpg'
logging.info('show_mask_with_labels is saving labelled img to '+savename)
cv2.imwrite(savename,combined)
#todo move this to a separate function i dont think theres any reason its here
if cut_the_crap: #move selected to dir_removed, move rest to dir_kept
print('(d)elete (c)lose anything else keeps')
indir = os.path.dirname(mask_filename)
parentdir = os.path.abspath(os.path.join(indir, os.pardir))
curdir = os.path.split(indir)[1]
print('in {} parent {} cur {}'.format(indir,parentdir,curdir))
if k == ord('d'):
newdir = curdir+'_removed'
dest_dir = os.path.join(parentdir,newdir)
Utils.ensure_dir(dest_dir)
print('REMOVING moving {} to {}'.format(mask_filename,dest_dir))
shutil.move(mask_filename,dest_dir)
elif k == ord('c'):
newdir = curdir+'_needwork'
dest_dir = os.path.join(parentdir,newdir)
Utils.ensure_dir(dest_dir)
print('CLOSE so moving {} to {}'.format(mask_filename,dest_dir))
shutil.move(mask_filename,dest_dir)
else:
newdir = curdir+'_kept'
dest_dir = os.path.join(parentdir,newdir)
Utils.ensure_dir(dest_dir)
print('KEEPING moving {} to {}'.format(mask_filename,dest_dir))
shutil.move(mask_filename,dest_dir)
cv2.destroyAllWindows()
print('finished show_mask_with-labels')
print('caller name:'+str( inspect.stack()[1][3]))
return combined,frac
# return dest
def resize_dir(dir,out_dir,factor=4,filter='.jpg'):
imfiles = [f for f in os.listdir(dir) if os.path.isfile(os.path.join(dir,f)) and filter in f]
for f in imfiles:
infile = os.path.join(dir,f)
img_arr = cv2.imread(infile)
if filter == '.png' or filter=='.bmp' or filter == 'png' or filter == 'bmp': #png mask is read as x*y*3 , prob. bmp too
img_arr = img_arr[:,:,0]
h, w = img_arr.shape[0:2]
new_h = int(h/factor)
new_w = int(w/factor)
output_arr = cv2.resize(img_arr,(new_w,new_h))
actualh,actualw = output_arr.shape[0:2]
outfile = os.path.join(out_dir,f)
cv2.imwrite(outfile,output_arr)
print('orig w,h {},{} new {},{} '.format(w,h,actualw,actualh))
print('infile {} outfile {}'.format(infile,outfile))
def nms_detections(dets, overlap=0.3):
"""
Non-maximum suppression: Greedily select high-scoring detections and
skip detections that are significantly covered by a previously
selected detection.
This version is translated from Matlab code by <NAME>,
who sped up <NAME>'s code.
Parameters
----------
dets: ndarray
each row is ['xmin', 'ymin', 'xmax', 'ymax', 'score']
overlap: float
minimum overlap ratio (0.3 default)
Output
------
dets: ndarray
remaining after suppression.
"""
x1 = dets[:, 0]
y1 = dets[:, 1]
x2 = dets[:, 2]
y2 = dets[:, 3]
ind = np.argsort(dets[:, 4])
w = x2 - x1
h = y2 - y1
area = (w * h).astype(float)
pick = []
while len(ind) > 0:
i = ind[-1]
pick.append(i)
ind = ind[:-1]
xx1 = np.maximum(x1[i], x1[ind])
yy1 = np.maximum(y1[i], y1[ind])
xx2 = np.minimum(x2[i], x2[ind])
yy2 = np.minimum(y2[i], y2[ind])
w = np.maximum(0., xx2 - xx1)
h = np.maximum(0., yy2 - yy1)
wh = w * h
o = wh / (area[i] + area[ind] - wh)
ind = ind[np.nonzero(o <= overlap)[0]]
return dets[pick, :]
def img_dir_to_html(img_dir,filter='.jpg',htmlname=None):
imglist = [i for i in os.listdir(img_dir) if filter in i]
line_no=0
lines=[]
if htmlname is None:
parentdir = os.path.abspath(os.path.join(img_dir, os.pardir))
htmlname=parentdir+'.html'
htmlname=img_dir.replace('/','_')+'.html'
htmlname=img_dir.replace('/','')+'.html'
with open(htmlname,'w') as f:
lines.append('<HTML><HEAD><TITLE>results '+img_dir+' </TITLE></HEAD>\n')
for img in imglist:
f.write('<br>\n')
link = '"'+os.path.join(img_dir,img)+'"'
f.write('<img src='+link+'>')
#f.write('<a href='+link+'>'+img+'</a>\n')
f.write('</HTML>\n')
f.close()
def do_for_all_files_in_dir(some_function,dir,filter='.jpg',**kwargs):
'''
why didnt i do this a year ago
applies a function onto a dir of jpgs
'''
print(kwargs)
print(**kwargs)
files = [os.path.join(dir,f) for f in os.listdir(dir) if filter in f]
for f in files:
some_function(f,**kwargs)
def clothe_lots(clothing_dir,mannequin_dir,type='fullbody',n=10000,filter='gc'):
clothes_files = [os.path.join(clothing_dir,f) for f in os.listdir(clothing_dir) if filter in f]
mannequin_files = [os.path.join(mannequin_dir,f) for f in os.listdir(mannequin_dir)]
print('{} clothes and {} mannequins'.format(len(clothes_files),len(mannequin_files)))
n_done=0
while(n_done<n):
c=random.choice(clothes_files)
m=random.choice(mannequin_files)
print('{} is trying on {} n={}'.format(m,c,n_done))
clothe_the_naked(c,m,type=type,filter=filter)
n_done+=1
# for c in clothes_files:
# for m in mannequin_files:
# print('{} is trying on {}'.format(m,c))
# clothe_the_naked(c,m,type=type)
def clothe_the_naked(clothing_img, mannequin_img,type='fullbody',max_rot=6,save = True,interactive=True,savedir='clothed',filter=filter):
Utils.ensure_dir(savedir)
f = background_removal.find_face_dlib_with_scores(mannequin_img)
print(f)
img_mannequin = Utils.get_cv2_img_array(mannequin_img)
img_clothing = Utils.get_cv2_img_array(clothing_img)
center = (img_mannequin.shape[1]/2,img_mannequin.shape[0]/2)
angle = max_rot*np.random.randn(max_rot)[0]
r = cv2.getRotationMatrix2D(center,angle,scale=1)
# print(r)
clothing_rotated = cv2.warpAffine(img_clothing,r,(img_mannequin.shape[0],img_mannequin.shape[1]))# cv2.INTER_LINEAR, cv2.BORDER_CONSTANT, 255)
# print('angle {}'.format(angle))
if f['are_faces']:
faces = f['faces']
for face in faces:
print(face)
cv2.rectangle(img_mannequin,(face[0],face[1]),(face[0]+face[2],face[1]+face[3]),(255,100,0),thickness=3)
# cv2.imshow('mannequin',img_mannequin)
full_size=(256,256)
reduction_factor = 0.7
if type == 'fullbody':
reduction_factor = 0.8
clothes_size = (int(full_size[0]*reduction_factor),int(full_size[1]*reduction_factor))
mannequin_resized = resize_keep_aspect(mannequin_img,output_size=full_size)
# print('clothes size:{}'.format(clothes_size))
clothes_resized = resize_keep_aspect(clothing_rotated,output_size = clothes_size)
# cv2.imshow('orig m',img_mannequin)
# cv2.imshow('clothing rotated',clothing_rotated)
# cv2.imshow('mannequin_resized',mannequin_resized)
# cv2.imshow('clothes_resized',clothes_resized)
# k = cv2.waitKey(0)
# cv2.destroyAllWindows()
if filter: # these ones have already been interactively gc'd so no need to gc
p0 = clothes_resized[:,:,0]
p1 = clothes_resized[:,:,1]
p2 = clothes_resized[:,:,2]
nonzero = np.where((p0!=0)+(p1!=0)+(p2!=0),255,0)
print('size of nonzero {} type {}'.format(nonzero.shape,nonzero.dtype))
nonzero = np.array(nonzero,dtype=np.uint8)
print('size of nonzero {} type {}'.format(nonzero.shape,nonzero.dtype))
# cv2.imshow('themask',nonzero)
# cv2.waitKey(0)
#mask2 = np.where((mask == cv2.GC_FGD) + (mask == cv2.GC_PR_FGD), 255, 0).astype(np.uint8) #return all fg and prob. fg
result = overlay(nonzero, clothes_resized,mannequin_resized)
else:
result = gc_then_overlay(clothes_resized,mannequin_resized)
if result is None:
pass
elif save:
if isinstance(mannequin_img,basestring):
mannequin_name=os.path.basename(mannequin_img)
else:
mannequin_name='body'+''.join(random.choice(string.ascii_letters + string.digits) for _ in range(5))
if isinstance(clothing_img,basestring):
clothing_name=os.path.basename(clothing_img)
else:
clothing_name='clothing'+''.join(random.choice(string.ascii_letters + string.digits) for _ in range(5))
name = mannequin_name.replace('.jpg','')+clothing_name.replace('gc.png','').replace('.jpg','').replace('.png','')+'.jpg'
name = os.path.join(savedir,name)
print('saving image to {}'.format(name))
cv2.imwrite(name,result)
else:
print('not saving')
# elif interactive:
# k=raw_input('s or return to save...')
# if k == 's' or k== '':
cv2.destroyAllWindows()
def gc_then_overlay(im1,im2, position=None,save=True,visual_output=True):
im1 = get_cv2_img_array(im1)
im2 = get_cv2_img_array(im2)
if im1.shape[0]>im2.shape[0] or im1.shape[1]>im2.shape[1]:
print('overlay larger than image im1 {} im2 {}'.format(im1.shape,im2.shape))
return
if position == None:
position = (im2.shape[0]/2,im2.shape[1]/2)
mask_y = (im2.shape[0]-im1.shape[0])/2
mask_x = (im2.shape[1]-im1.shape[1])/2
bgdmodel = np.zeros((1, 65), np.float64)
fgdmodel = np.zeros((1, 65), np.float64)
mask = np.zeros(im1.shape[:2], dtype=np.uint8)
#TODO - maybe find something better than median as the threshold
# x0, x1, y0, y1 = []
# mask[y0:y1, x0:x1] = 0
# print('BG'+str(rectangle))
# cv2.GC_BGD, cv2.GC_FGD, cv2.GC_PR_BGD, cv2.GC_PR_FGD, or
#prob. backgnd - entire image
h,w = im1.shape[0:2]
x0, x1, y0, y1 = [0,w,0,h]
mask[y0:y1, x0:x1] = cv2.GC_PR_BGD
# print('PBG x0 {} x1 {} y0 {} y1 {} '.format(x0,x1,y0,y1))
#prob. fgnd - center rectangle
bb_percent_w = 0.5 #percent of image center to use as bb
bb_percent_h = 0.8 #percent of image center to use as bb
w = int(im1.shape[1]*bb_percent_w)
h = int(im1.shape[0]*bb_percent_h)
x = int((im1.shape[1]-w)/2)
y = int((im1.shape[0]-h)/2)
x0, x1, y0, y1 = [x,x+w,y,y+h]
mask[y0:y1, x0:x1] = cv2.GC_PR_FGD
print('PFG x0 {} x1 {} y0 {} y1 {} '.format(x0,x1,y0,y1))
#prob. fgnd - center rectangle
bb_percent = 0.1 #percent of image center to use as bb
w = int(im1.shape[1]*bb_percent)
h = int(im1.shape[0]*bb_percent)
x = int((im1.shape[1]-w)/2)
y = int((im1.shape[0]-h)/2)
x0, x1, y0, y1 = [x,x+w,y,y+h]
mask[y0:y1, x0:x1] = cv2.GC_FGD
# print('FG x0 {} x1 {} y0 {} y1 {} '.format(x0,x1,y0,y1))
try:
#TODO - try more than 1 grabcut call in itr
itr = 2
cv2.grabCut(im1, mask, None, bgdmodel, fgdmodel, itr, cv2.GC_INIT_WITH_MASK) #im, mask, rect, bgmodel, fgmoel, iterations
except:
print('grabcut exception')
return None
mask2 = np.where((mask == cv2.GC_FGD) + (mask == cv2.GC_PR_FGD), 255, 0).astype(np.uint8) #return all fg and prob. fg
# mask = background_removal.get_fg_mask(im1,bounding_box=bb)
# print('got mask shape {} uniques {} '.format(mask.shape,np.unique(mask)))
# cv2.imshow('mask_b4gc',mask)
# cv2.imshow('mask_aftergc',mask2)
# cv2.waitKey(0)
overlaid = overlay(mask2, im1,im2)
return overlaid
def overlay(im1_mask,im1, bgnd_img,position=None,rotation=0,scale=1,save=True,visual_output=True):
bgnd_img = Utils.get_cv2_img_array(bgnd_img)
w,h = im1.shape[0:2]
if im1_mask.shape[0]>bgnd_img.shape[0] or im1_mask.shape[1]>bgnd_img.shape[1]:
print('overlay larger than image im1 {} im2 {}'.format(im1_mask.shape,bgnd_img.shape))
return
if position == None:
im2,contours,hierarchy = cv2.findContours(im1_mask, 1, 2)
# cv2.imshow('mask1',im1_mask)
# cv2.waitKey(0)
cnt = contours[0]
M = cv2.moments(cnt)
# print('contour moments:'+str(M))
# From this moments, you can extract useful data like area, centroid etc. Centroid is given by the relations, Cx=M10M00 and Cy=M01M00. This can be done as follows:
try:
cx = int(M['m10']/M['m00'])
cy = int(M['m01']/M['m00'])
print('cx {} cy {}'.format(cx,cy))
except:
print('prob division by zero, m00={}'.format(M['m00']))
cx = im1_mask.shape[0]/2
cy = im1_mask.shape[1]/2
# cv2.circle(im1_mask,(cx,cy),20,(255,100,50),thickness=5)
# cv2.rectangle(img_arr,(bbox[0],bbox[1]),(bbox[0]+bbox[2],bbox[1]+bbox[3]),color=(255,255,0),thickness=2)
# cv2.imshow('mask1',im1_mask)
# cv2.waitKey(0)
dx = im1_mask.shape[0]/2-cx
dy = im1_mask.shape[1]/2-cy
position = (dx,dy)
print('cx {} cy {} dx {} dy {}'.format(cx,cy,dx,dy))
print('shifting by {}'.format(position))
translation_matrix = np.float32([ [1,0,position[1]], [0,1,position[0]]] )
im1_mask = cv2.warpAffine(im1_mask, translation_matrix, (w, h)) # cv2.INTER_LINEAR, cv2.BORDER_CONSTANT, 255)
im1 = cv2.warpAffine(im1, translation_matrix, (w, h)) #cv2.INTER_LINEAR, cv2.BORDER_CONSTANT, 255)
if scale != 1:
print('im1_mask {} im1 {} before resize'.format(im1_mask.shape,im1.shape))
h,w = im1.shape[0:2]
dsize = (int(w*scale),int(h*scale))
im1_mask = cv2.resize(im1_mask,dsize)
im1 = cv2.resize(im1,dsize)
print('im1_mask {} im1 {} after resize'.format(im1_mask.shape,im1.shape))
if scale>1: #crop extra
extra = (dsize[0]-h,dsize[1]-w)
starty=extra[0]/2
endy = extra[0]/2+h
startx=extra[1]/2
endx = extra[1]/2+w
print('sy {} endy {} sx {} edx {}'.format(starty,endy,startx,endx))
im1 = im1[starty:endy,startx:endx,:]
im1_mask=im1_mask[starty:endy,startx:endx]
print('im1_mask {} im1 {} after crop'.format(im1_mask.shape,im1.shape))
else: #add missing
extra = (h-dsize[0],w-dsize[1])
print('extra {} h {} w {} dsize {} e0 {} e1 {}'.format(extra,h,w,dsize,extra[0],extra[1]))
starty=extra[0]/2
endy = extra[0]/2+dsize[0]
startx=extra[1]/2
endx = extra[1]/2+dsize[1]
print('sy {} endy {} sx {} edx {}'.format(starty,endy,startx,endx))
im1_dest = np.zeros((h,w,3))
im1_mask_dest = np.zeros((h,w))
im1_dest[starty:endy,startx:endx,:]= im1
im1_mask_dest[starty:endy,startx:endx]=im1_mask
print('im1_mask {} im1 {} after padding'.format(im1_mask.shape,im1.shape))
if rotation != 0:
center = (w/2,h/2)
r = cv2.getRotationMatrix2D(center,rotation,scale=1)
im1_mask = cv2.warpAffine(im1_mask, r, (w, h)) # cv2.INTER_LINEAR, cv2.BORDER_CONSTANT, 255)
im1 = cv2.warpAffine(im1, r, (w, h)) #cv2.INTER_LINEAR, cv2.BORDER_CONSTANT, 255)
mask_y = (bgnd_img.shape[0]-im1_mask.shape[0])/2
mask_x = (bgnd_img.shape[1]-im1_mask.shape[1])/2
final_canvas = np.zeros_like(bgnd_img)
mask_height = im1_mask.shape[0]
mask_width = im1_mask.shape[1]
mask_on_canvas = np.zeros_like(bgnd_img)
mask_on_canvas[mask_y:mask_y+mask_height,mask_x:mask_x+mask_width,0] = im1[:,:,0]
mask_on_canvas[mask_y:mask_y+mask_height,mask_x:mask_x+mask_width,1] = im1[:,:,1]
mask_on_canvas[mask_y:mask_y+mask_height,mask_x:mask_x+mask_width,2] = im1[:,:,2]
print('im1 {} bgndd {} final canvas {} maskh {} maskw {}'.format(im1_mask.shape,bgnd_img.shape,final_canvas.shape,mask_height,mask_width))
final_canvas[mask_y:mask_y+mask_height,mask_x:mask_x+mask_width,0] = im1_mask
final_canvas[mask_y:mask_y+mask_height,mask_x:mask_x+mask_width,1] = im1_mask
final_canvas[mask_y:mask_y+mask_height,mask_x:mask_x+mask_width,2] = im1_mask
masked_1 = np.where(final_canvas!=0,mask_on_canvas,bgnd_img)
if visual_output:
# cv2.imshow('mask1',im1_mask)
# cv2.imshow('mask_on_canvas',mask_on_canvas)
# cv2.imshow('final',final_canvas)
# cv2.imshow('bgnd',bgnd_img)
cv2.imshow('masked_1',masked_1)
print('use arrow keys to translate:awds rotate:er scale:o-,p+ (q)uit, return to save')
k = cv2.waitKey(0)
#shift mask interactively
print('pressed value:'+str(k))
shift = 5 #pixels to translate each time
if k == 37 or k == ord('a'): #left
return(overlay(im1_mask,im1,bgnd_img,position=(0,-shift)))
elif k == 38 or k == ord('w'): #up
return(overlay(im1_mask,im1,bgnd_img,position=(-shift,0)))
elif k == 39 or k == ord('d'): #right
return(overlay(im1_mask,im1,bgnd_img,position=(0,+shift)))
elif k == 40 or k == ord('s'): #down
return(overlay(im1_mask,im1,bgnd_img,position=(shift,0)))
elif k == ord('+') or k==ord('p'): #enlargen
return(overlay(im1_mask,im1,bgnd_img,scale=1.05))
elif k == ord('-') or k==ord('o'): #smallen
return(overlay(im1_mask,im1,bgnd_img,scale=.95))
elif k == ord('e'): #rot-
return(overlay(im1_mask,im1,bgnd_img,rotation=-shift))
elif k == ord('r'): #rot+
return(overlay(im1_mask,im1,bgnd_img,rotation=shift))
elif k == ord('q'): #quit
return
return masked_1
# overlaid = np.where(mask_3channels>0,im1,im2)
def get_fg_mask(image, bounding_box=None):
rect = (0, 0, image.shape[1]-1, image.shape[0]-1)
bgdmodel = np.zeros((1, 65), np.float64) # what is this wierd size about? (jr)
fgdmodel = np.zeros((1, 65), np.float64)
# bounding box was sent from a human - grabcut with bounding box mask
if Utils.legal_bounding_box(bounding_box):
if Utils.all_inclusive_bounding_box(image, bounding_box): # bb is nearly the whole image
mask = np.zeros(image.shape[:2], dtype=np.uint8)
cv2.grabCut(image, mask, rect, bgdmodel, fgdmodel, 1, cv2.GC_INIT_WITH_RECT)
else:
mask = bb_mask(image, bounding_box)
cv2.grabCut(image, mask, rect, bgdmodel, fgdmodel, 1, cv2.GC_INIT_WITH_MASK)
# grabcut on the whole image, with/without face
else:
faces_dict = find_face_cascade(image)
# if len(faces_dict['faces']) > 0: # grabcut with mask
# try:
# rectangles = body_estimation(image, faces_dict['faces'][0])
# mask = create_mask_for_gc(rectangles, image)
# except:
# mask = create_mask_for_gc(image)
#
# else: # grabcut with arbitrary rect
mask = create_arbitrary(image)
cv2.grabCut(image, mask, rect, bgdmodel, fgdmodel, 1, cv2.GC_INIT_WITH_RECT)
mask2 = np.where((mask == 1) + (mask == 3), 255, 0).astype(np.uint8)
return mask2
def smallify_and_implant(arr_url_or_file,reduction_percent=30,background_image=None,bb=None,fade_in=True):
'''
WIP - finish this to augment yolo stuff - and call it from augment_images , checking size of largest object
and smallifying accordingly. so we have to keep track of bb's too and return those smallified in same way
:param arr_url_or_file:
:param reduction_percent:
:param background_image:
:return:
'''
img_arr = Utils.get_cv2_img_array(arr_url_or_file)
orig_h,orig_w = img_arr.shape[0:2]
if background_image is not None:
new_arr = resize_keep_aspect(background_image,output_size=(orig_h,orig_w))
else:
new_arr = np.zeros_like(img_arr)
dsize=(orig_w*(1-reduction_percent),orig_h*(1-reduction_percent))# #make sure resize wants width,height not height,width
reduced = cv2.resize(img_arr,dsize)
x_wiggleroom = orig_w - dsize[0]
y_wiggleroom = orig_h - dsize[1]
def dominant_colors(img_arr,n_components=2):
'''
:param img_arr: this will generally be a subimage (orig image cropped to a bb)
:return:
'''
dom_color = None
if img_arr is None:
print('got non arr in dominant_colors')
return None
hsv = cv2.cvtColor(img_arr, cv2.COLOR_BGR2HSV)
if hsv is None:
print('some prob with hsv')
return None
try:
avg_hue = np.mean(hsv[:,:,0])
avg_sat = np.mean(hsv[:,:,1])
avg_val = np.mean(hsv[:,:,2])
stdev_hue = np.std(hsv[:,:,0])
stdev_sat = np.std(hsv[:,:,1])
stdev_val = np.std(hsv[:,:,2])
#take care of large std for red (which wraps around from 180 to 0
if stdev_hue>60:
print('correcting hue modulo, orig mean {} std {}'.format(avg_hue,stdev_hue))
hue=hsv[:,:,0]
mask=hue>90
hue=hue-mask*180
avg_hue = np.mean(hue)
stdev_hue = np.std(hue)
print('corrected hue modulo, new mean {} std {}'.format(avg_hue,stdev_hue))
except:
print('problem calculating sat or val')
print('avg hue {} std {} avg sat {} std {} avg val {} std {}'.format(avg_hue,stdev_hue,avg_sat,stdev_sat,avg_val,stdev_val))
min_sat_for_color = 0.3*255 #102
min_val_for_color=0.3*255 #76
max_std_for_color=70
max_val_for_black=0.35*255 #89
min_val_for_white=0.8*255 #204
max_sat_for_white=0.15*255 #38
max_sat_for_gray=0.1*255
max_val_for_gray=0.8*255
min_val_for_gray=0.3*255
if avg_sat > min_sat_for_color and avg_val > min_val_for_color and stdev_hue<max_std_for_color: #color in visible range
# print('got visible color')
colors = ['red','orange','yellow','green','aqua','blue','purple','pink','red']
# range_edges=[20,45,70,140,180,260,290,291,340] #for range 0-360
range_edges=[13,22,35,75,90,130,145,170,180]
i=0
while(avg_hue>range_edges[i]):
i=i+1
# i=i-1
# print('range edge '+str(i)+' color '+colors[i])
dom_color = colors[i]
elif avg_val < max_val_for_black:
# print('got black')
dom_color = 'black'
elif avg_val>min_val_for_white and avg_sat<max_sat_for_white:
# print('got white')
dom_color = 'white'
elif avg_val<max_val_for_gray and avg_val>min_val_for_gray and avg_sat<max_sat_for_gray:
dom_color='gray'
# grab the image channels, initialize the tuple of colors,
# the figure and the flattened feature vector
debug=False
if(debug):
chans = cv2.split(hsv)
colors = ("b", "g", "r")
plt.figure()
plt.title("'Flattened' Color Histogram")
plt.xlabel("Bins")
plt.ylabel("# of Pixels")
features = []
# loop over the image channels
for (chan, color) in zip(chans, colors):
# create a histogram for the current channel and
# concatenate the resulting histograms for each
# channel
hist = cv2.calcHist([chan], [0], None, [256], [0, 256])
features.extend(hist)
# plot the histogram
plt.plot(hist, color = color)
plt.xlim([0, 256])
blu_patch = mpatches.Patch(color='blue', label='Hue')
# plt.legend(handles=[blu_patch])
grn_patch = mpatches.Patch(color='green', label='Sat')
# plt.legend(handles=[grn_patch])
red_patch = mpatches.Patch(color='red', label='Val')
plt.legend(handles=[red_patch,blu_patch,grn_patch])
# here we are simply showing the dimensionality of the
# flattened color histogram 256 bins for each channel
# x 3 channels = 768 total values -- in practice, we would
# normally not use 256 bins for each channel, a choice
# between 32-96 bins are normally used, but this tends
# to be application dependent
print("flattened feature vector size: %d" % (np.array(features).flatten().shape))
plt.show()
print('dominant color:'+str(dom_color))
return dom_color
def test_dominant_colors():
images = ['white.jpg','black.jpg','pink.jpg','red.jpg','orange.jpg','yellow.jpg','green.jpg','blue.jpg','lightblue.jpg','purple.jpg',
'orange.jpg','grey.jpg','turqoise.jpg']
for im in images:
path = os.path.join('/home/jeremy/projects/core/images',im)
img_arr = cv2.imread(path)
col = dominant_colors(img_arr,n_components=2)
print('file:{} color {}'.format(path,col))
def browse_images(dir,filter='.jpeg'):
files = [os.path.join(dir,f) for f in os.listdir(dir) if filter in f]
for f in files:
img_arr = cv2.imread(f)
cv2.imshow('img',img_arr)
cv2.waitKey(0)
def one_person_per_image(image,save_dir='multiple_people',visual_output=False):
if isinstance(image,basestring):
# imgname = image.replace('https://','').replace('http://','').replace('/','_') #conver url to name
imgname = image
else:
imgname = 'test.jpg'
img_arr = Utils.get_cv2_img_array(image)
faces = background_removal.find_face_dlib_with_scores(img_arr)
print(faces)
if 'scores' in faces and 'faces' in faces:
for score,bbox in zip(faces['scores'],faces['faces']):
print('score {} bbox {}'.format(score,bbox))
cv2.rectangle(img_arr,(bbox[0],bbox[1]),(bbox[0]+bbox[2],bbox[1]+bbox[3]),color=(255,255,0),thickness=2)
if len(faces['scores'])>1:
multiples_dir = os.path.join(os.path.dirname(image),save_dir)
Utils.ensure_dir(multiples_dir)
savename = os.path.join(multiples_dir,os.path.basename(imgname))
print('more than one face found, moving {} to {}'.format(image,savename))
mvcmd = 'mv '+imgname+' '+savename
subprocess.call(mvcmd,shell=True)
if visual_output:
cv2.imshow('image',img_arr)
cv2.waitKey(100)
def x1y1x2y2_to_xywh(bb):
assert bb[2]>bb[0],'bb not in format x1y1x2y2 {}'.format(bb)
assert bb[3]>bb[1],'bb not in format x1y1x2y2 {}'.format(bb)
return [bb[0],bb[1],bb[2]-bb[0],bb[3]-bb[1]]
def xywh_to_x1y1x2y2(bb):
return [bb[0],bb[1],bb[2]+bb[0],bb[3]+bb[1]]
def xywh_to_yolo(bb_xywh,dims_hxw,correct_out_of_bounds=True):
'''
output : for yolo - https://pjreddie.com/darknet/yolo/
Darknet wants a .txt file for each image with a line for each ground truth object in the image that looks like:
<object-class> <x> <y> <width> <height>
where those are percentages and x,y are CENTER OF BB (also in percent)
:param bb_xywh:
:param image_dims size of image for this bb (needed since yolo wants bb's as percentages)
:return:
'''
if correct_out_of_bounds:
if bb_xywh[0] > dims_hxw[1]:
bb_xywh[0] = dims_hxw[1]
logging.warning('corrected y out of bounds')
if bb_xywh[1] > dims_hxw[0]:
bb_xywh[1] = dims_hxw[0]
logging.warning('corrected x out of bounds!')
if bb_xywh[0]+bb_xywh[2] > dims_hxw[1]:
bb_xywh[2] = dims_hxw[1]-bb_xywh[0]
logging.warning('corrected x+w > image width!!')
if bb_xywh[1]+bb_xywh[3] > dims_hxw[0]:
bb_xywh[3] = dims_hxw[0]-bb_xywh[1]
logging.warning('corrected y+h > image height!!')
x_center = bb_xywh[0]+(bb_xywh[2]/2.0) #x1+w/2
y_center = bb_xywh[1]+(bb_xywh[3]/2.0) #y1+h/2
x_p = float(x_center)/dims_hxw[1] #center x as %
y_p = float(y_center)/dims_hxw[0] #center y as %
w_p = float(bb_xywh[2])/dims_hxw[1] #width as %
h_p = float(bb_xywh[3])/dims_hxw[0] #height as %
try:
assert x_p<=1,'x > image width!!'
except:
logging.warning('x_p>1 bb {} out of bounds hw {}'.format(bb_xywh,dims_hxw))
try:
assert y_p<=1,'y > image height!!'
except:
logging.warning('y_p > 1 bb {} out of bounds hw {}'.format(bb_xywh,dims_hxw))
try:
assert bb_xywh[0]+bb_xywh[2]<=dims_hxw[1],'x+w > image width!!'
except:
logging.warning('x+width bb {} out of bounds hw {}'.format(bb_xywh,dims_hxw))
try:
assert bb_xywh[1]+bb_xywh[3]<=dims_hxw[0],'y+h > image height!!'
except:
logging.warning('y+height bb {} out of bounds hw {}'.format(bb_xywh,dims_hxw))
return([x_p,y_p,w_p,h_p])
def x1x2y1y2_to_yolo(size, box):
dw = 1./(size[0])
dh = 1./(size[1])
x = (box[0] + box[1])/2.0 - 1
y = (box[2] + box[3])/2.0 - 1
w = box[1] - box[0]
h = box[3] - box[2]
x = x*dw
w = w*dw
y = y*dh
h = h*dh
return (x,y,w,h)
def yolo_to_xywh(bb_yolo,image_dims_HxW): #should change this to HxW and all callers, what was i thiinking
'''
output : for yolo - https://pjreddie.com/darknet/yolo/
Darknet wants a .txt file for each image with a line for each ground truth object in the image that looks like:
:param bb_yolo: x_center, y_center, w, h all as percentages of image width or height
:param image_dims size of image for this bb (needed since yolo wants bb's as percentages)
:return:
'''
x_center = float(bb_yolo[0])*image_dims_HxW[1] #center x in pixels
y_center = float(bb_yolo[1])*image_dims_HxW[0] #center y pixels
w = float(bb_yolo[2])*image_dims_HxW[1] #width pixels
h = float(bb_yolo[3])*image_dims_HxW[0] #height pixels
x=x_center-w/2
y=y_center-h/2
logging.debug('in {} dims {} out(xywh) {} {} {} {}'.format(bb_yolo,image_dims_HxW,x,y,w,h))
return([int(x),int(y),int(w),int(h)])
def bb_with_text(img_arr,bb_xywh,text,boxcolor = [50,255,50],text_bgnd_color=[255,255,80],box_thickness=1):
text_color=[0,50,255]
cv2.rectangle(img_arr,(bb_xywh[0],bb_xywh[1]),(bb_xywh[0]+bb_xywh[2],bb_xywh[1]+bb_xywh[3]),color=boxcolor,thickness=box_thickness)
img_arr[bb_xywh[1]:bb_xywh[1]+20,bb_xywh[0]:bb_xywh[0]+bb_xywh[2]]=(img_arr[bb_xywh[1]:bb_xywh[1]+20,bb_xywh[0]:bb_xywh[0]+bb_xywh[2]]/2)+np.array(text_bgnd_color)/2
cv2.putText(img_arr,text,(bb_xywh[0]+5,bb_xywh[1]+20),cv2.FONT_HERSHEY_PLAIN, 1, text_color)
return img_arr
def count_values(mask,labels=None):
image_size = mask.shape[0]*mask.shape[1]
uniques = np.unique(mask)
pixelcounts = {}
for unique in uniques:
pixelcount = len(mask[mask==unique])
ratio = float(pixelcount)/image_size
if labels is not None:
print('class {} {} count {} ratio {}'.format(unique,labels[unique],pixelcount,ratio))
else:
print('class {} count {} ratio {}'.format(unique,pixelcount,ratio))
pixelcounts[unique]=pixelcount
return pixelcounts
def get_median_image(img_arr_list,visual_output=True):
''''
given list of image arrs, produce median image useful for bg subtraction
'''
np_images = np.array(img_arr_list)
print('np size:'+str(np_images.shape))
median_image = np.median(np_images,axis=0) #get median pixel across images
print('type:'+str(type(median_image)))
median_image = np.array(median_image,dtype=np.uint8)
print('median size:'+str(median_image.shape))
if visual_output:
cv2.imshow('median',median_image)
k=cv2.waitKey(0)
return median_image
def test_median_image():
dir = '/home/jeremy/PycharmProjects/snooker/'
files = [file for file in os.listdir(dir) if '.jpg' in file]
files = sorted(files)
# build image array
img_arr_list =[]
# for file in files:
# path = os.path.join(dir,file)
# img_arr = cv2.imread(path)
# img_arr_list.append(img_arr)
#
# med_img = get_median_image(img_arr_list)
# cv2.imwrite(os.path.join(dir,'median.bmp'),med_img)
med_img = cv2.imread(os.path.join(dir, 'median2.bmp'))
med_eq = clahe_rgb(med_img)
cv2.imshow('hi',med_img)
cv2.imshow('eq',med_eq)
cv2.waitKey(0)
height, width, channels = med_img.shape
outfile = os.path.join(dir, 'out.mp4')
# Define the codec and create VideoWriter object
fourcc = cv2.VideoWriter_fourcc(*'mp4v') # Be sure to use lower case
out = cv2.VideoWriter(outfile, fourcc, 20.0, (width, height))
import time
start = time.time()
for file in files:
path = os.path.join(dir, file)
img_arr = cv2.imread(path)
img_eq = clahe_rgb(img_arr)
diff = cv2.subtract(img_eq, med_eq)
cv2.imshow('diff',diff)
cv2.waitKey(10)
print('ok1')
out.write(diff) # Write out frame to video
print('ok2')
elapsed = time.time() - start
print(
'elapsed {} n {} tpi {} ipt {} '.format(elapsed, len(files), elapsed / len(files), float(len(files)) / elapsed))
def clahe_rgb(img_arr):
#-----Converting image to LAB Color model-----------------------------------
lab= cv2.cvtColor(img_arr, cv2.COLOR_BGR2LAB)
# cv2.imshow("lab",lab)
#-----Splitting the LAB image to different channels-------------------------
l, a, b = cv2.split(lab)
# cv2.imshow('l_channel', l)
# cv2.imshow('a_channel', a)
# cv2.imshow('b_channel', b)
# #-----Applying CLAHE to L-channel-------------------------------------------
clahe = cv2.createCLAHE(clipLimit=3.0, tileGridSize=(8,8))
cl = clahe.apply(l)
# cv2.imshow('CLAHE output', cl)
#-----Merge the CLAHE enhanced L-channel with the a and b channel-----------
limg = cv2.merge((cl,a,b))
# cv2.imshow('limg', limg)
#-----Converting image from LAB Color model to RGB model--------------------
final = cv2.cvtColor(limg, cv2.COLOR_LAB2BGR)
# cv2.imshow('final', final)
return final
if __name__ == "__main__":
test_median_image()
img=cv2.imread('../images/female1.jpg')
resize_by_adding_border(img,output_size=(900,1000),visual_output=True)
# test_or_training_textfile('/home/jr/python-packages/trendi/classifier_stuff/caffe_nns/only_train',test_or_train='test')
# test_or_training_textfile('/home/jr/python-packages/trendi/classifier_stuff/caffe_nns/only_train',test_or_train='train')
# Utils.remove_duplicate_files('/media/jr/Transcend/my_stuff/tg/tg_ultimate_image_db/ours/pd_output_brain1/')
# resize_and_crop_image_using_bb('../images/female1.jpg',bb=[240,122,170,170],output_w=50,output_h=50)
# resize_and_crop_image_using_bb('../images/female1.jpg',bb=[240,122,170,400],output_w=50,output_h=50)
# resize_and_crop_image_using_bb('../images/female1.jpg',bb=[240,122,170,400],output_w=150,output_h=50)
# resize_and_crop_image_using_bb('../images/female1.jpg',bb=[240,122,170,400],output_w=50,output_h=150)
#resize_and_crop_image_using_bb('../images/female1.jpg',bb=[240,122,170,170],output_w=1000,output_h=100)
# avg_h,avg_w,avg_d,avg_B,avg_G,avg_R,totfiles = image_stats_from_dir_of_dirs(dir_of_dirs,filter='test')
# print('avg h {} avg w {} avgB {} avgG {} avgR {} nfiles {} in dir_of_dirs {}',avg_h,avg_w,avg_d,avg_B,avg_G,avg_R,totfiles,dir_of_dirs)
# dir_of_dirs = '/home/jr/core/classifier_stuff/caffe_nns/dataset'
# raw_input('enter to continue')
# image_chooser_dir_of_dirs(dir_of_dirs,output_dir)
# image_chooser(dir_of_dirs,output_dir)
# crop_files_in_dir_of_dirs(dir_of_dirs,bb=None,output_w =150,output_h =200,use_visual_output=True)
# dir = '/home/jeremy/projects/core/images'
# resize_and_crop_maintain_bb_on_dir(dir, output_width = 448, output_height = 448,use_visual_output=True)
if(0): #test mask to bbs
# url = 'http://s-media-cache-ak0.pinimg.com/736x/fe/5d/f7/fe5df7e80093f674ecc79a9f30069a8a.jpg'
# start=time.time()
# retval = neurodoll_falcon_client.nd(url,get_combined_results=True)
#
# elapsed = time.time()-start
# print('elapsed time in nd:'+str(elapsed))
# if retval['success']:
# print('got nd')
# cv2.imwrite('/home/jeremy/projects/core/images/dress_mask_u21.png',retval['mask'])
# mask_to_rects(retval['mask'])
# else:
# print('did not get good mask from ndfc')
mask = cv2.imread('/home/jeremy/projects/core/images/dress_mask_u21.png')
bbs = mask_to_rects(mask,visual_output=True)
print('bbs:{}'.format(bbs))
if(0) : #test dominant colors
dir = '/home/jeremy/Dropbox/tg/color_snatches'
files = [os.path.join(dir,f) for f in os.listdir(dir)]
for file in files:
print('file '+file)
im1=cv2.imread(file)
cv2.imshow('im1',im1)
cv2.waitKey(0)
dominant_colors(im1)
# dir = '/home/jeremy/tg/pd_output'
# dir = '/root'
# indir = '/home/jeremy/image_dbs/fashionista-v0.2.1'
# outdir = '/home/jeremy/image_dbs/fashionista-v0.2.1/reduced_cats'
#
# indir = '/home/jeremy/image_dbs/colorful_fashion_parsing_data/labels_200x150'
# outdir = '/home/jeremy/image_dbs/colorful_fashion_parsing_data/labels_200x150/reduced_cats'
# # defenestrate_directory(indir,outdir,filter='.png',keep_these_cats=[1,55,56,57],labels=constants.fashionista_categories_augmented)
#
# if host == 'jr-ThinkPad-X1-Carbon' or host == 'jr':
# dir_of_dirs = '/home/jeremy/tg/train_pairs_dresses'
# output_dir = '/home/jeremy/tg/curated_train_pairs_dresses'
# sourcedir = '/home/jeremy/projects/core/d1'
# targetdir = '/home/jeremy/projects/core/d2'
# infile = '/home/jeremy/projects/core/images/female1.jpg'
# else:
# dir_of_dirs = '/home/jeremy/core/classifier_stuff/caffe_nns/dataset/cropped'
# output_dir = '/home/jeremy/core/classifier_stuff/caffe_nns/curated_dataset'
#
# # kill_the_missing(sourcedir, targetdir)
#
# image_chooser('/data/jeremy/image_dbs/tg/google/pijamas - Google Search_files')
#
# output_file = 'resized.jpg'
# img_arr = cv2.imread(infile)
# orig_h,orig_w = img_arr.shape[0:2]
#
# resize_keep_aspect(infile, output_file=output_file, output_size = (600,400),use_visual_output=True)
# undo_resize_keep_aspect(output_file, output_file=None, output_size = (orig_h,orig_w),use_visual_output=True,careful_with_the_labels=True)
#
# resize_keep_aspect(infile, output_file=output_file, output_size = (600,401),use_visual_output=True)
# undo_resize_keep_aspect(output_file, output_file=None, output_size = (orig_h,orig_w),use_visual_output=True,careful_with_the_labels=True)
#
# resize_keep_aspect(infile, output_file=output_file, output_size = (600,399),use_visual_output=True)
# undo_resize_keep_aspect(output_file, output_file=None, output_size = (orig_h,orig_w),use_visual_output=True,careful_with_the_labels=True)
#
# resize_keep_aspect(infile, output_file=output_file, output_size = (400,600),use_visual_output=True)
# undo_resize_keep_aspect(output_file, output_file=None, output_size = (orig_h,orig_w),use_visual_output=True,careful_with_the_labels=True)
#
# resize_keep_aspect(infile, output_file=output_file, output_size = (400,601),use_visual_output=True)
# undo_resize_keep_aspect(output_file, output_file=None, output_size = (orig_h,orig_w),use_visual_output=True,careful_with_the_labels=True)
#
# resize_keep_aspect(infile, output_file=output_file, output_size = (400,599),use_visual_output=True)
# undo_resize_keep_aspect(output_file, output_file=None, output_size = (orig_h,orig_w),use_visual_output=True,careful_with_the_labels=True)
#
#nonlinear xforms , stolen from:
#https://www.kaggle.com/bguberfain/ultrasound-nerve-segmentation/elastic-transform-for-data-augmentation/comments
'''
import numpy as np
import pandas as pd
import cv2
from scipy.ndimage.interpolation import map_coordinates
from scipy.ndimage.filters import gaussian_filter
import matplotlib.pyplot as plt
# Function to distort image
def elastic_transform(image, alpha, sigma, alpha_affine, random_state=None):
"""Elastic deformation of images as described in [Simard2003]_ (with modifications).
.. [Simard2003] <NAME>, "Best Practices for
Convolutional Neural Networks applied to Visual Document Analysis", in
Proc. of the International Conference on Document Analysis and
Recognition, 2003.
Based on https://gist.github.com/erniejunior/601cdf56d2b424757de5
"""
if random_state is None:
random_state = np.random.RandomState(None)
shape = image.shape
shape_size = shape[:2]
# Random affine
center_square = np.float32(shape_size) // 2
square_size = min(shape_size) // 3
pts1 = np.float32([center_square + square_size, [center_square[0]+square_size, center_square[1]-square_size], center_square - square_size])
pts2 = pts1 + random_state.uniform(-alpha_affine, alpha_affine, size=pts1.shape).astype(np.float32)
M = cv2.getAffineTransform(pts1, pts2)
image = cv2.warpAffine(image, M, shape_size[::-1], borderMode=cv2.BORDER_REFLECT_101)
dx = gaussian_filter((random_state.rand(*shape) * 2 - 1), sigma) * alpha
dy = gaussian_filter((random_state.rand(*shape) * 2 - 1), sigma) * alpha
dz = np.zeros_like(dx)
x, y, z = np.meshgrid(np.arange(shape[1]), np.arange(shape[0]), np.arange(shape[2]))
indices = np.reshape(y+dy, (-1, 1)), np.reshape(x+dx, (-1, 1)), np.reshape(z, (-1, 1))
return map_coordinates(image, indices, order=1, mode='reflect').reshape(shape)
In [3]:
# Define function to draw a grid
def draw_grid(im, grid_size):
# Draw grid lines
for i in range(0, im.shape[1], grid_size):
cv2.line(im, (i, 0), (i, im.shape[0]), color=(255,))
for j in range(0, im.shape[0], grid_size):
cv2.line(im, (0, j), (im.shape[1], j), color=(255,))
# Load images
im = cv2.imread("../input/train/10_1.tif", -1)
im_mask = cv2.imread("../input/train/10_1_mask.tif", -1)
# Draw grid lines
draw_grid(im, 50)
draw_grid(im_mask, 50)
# Merge images into separete channels (shape will be (cols, rols, 2))
im_merge = np.concatenate((im[...,None], im_mask[...,None]), axis=2)
In [4]:
# First sample...
%matplotlib inline
# Apply transformation on image
im_merge_t = elastic_transform(im_merge, im_merge.shape[1] * 2, im_merge.shape[1] * 0.08, im_merge.shape[1] * 0.08)
# Split image and mask
im_t = im_merge_t[...,0]
im_mask_t = im_merge_t[...,1]
# Display result
plt.figure(figsize = (16,14))
plt.imshow(np.c_[np.r_[im, im_mask], np.r_[im_t, im_mask_t]], cmap='gray')
# Second sample (heavyer transform)...
%matplotlib inline
# Apply transformation on image
im_merge_t = elastic_transform(im_merge, im_merge.shape[1] * 3, im_merge.shape[1] * 0.07, im_merge.shape[1] * 0.09)
# Split image and mask
im_t = im_merge_t[...,0]
im_mask_t = im_merge_t[...,1]
# Display result
plt.figure(figsize = (16,14))
plt.imshow(np.c_[np.r_[im, im_mask], np.r_[im_t, im_mask_t]], cmap='gray')
managed to get about 4x improvement by using:
# include 4 standard deviations in the kernel (the default for ndimage.gaussian_filter)
# OpenCV also requires an odd size for the kernel hence the "| 1" part
blur_size = int(4*sigma) | 1
cv2.GaussianBlur(image, ksize=(blur_size, blur_size), sigmaX=sigma)
instead of ndimage.gaussian_filter(image, sigma)
and cv2.remap(image, dx, dy, interpolation=cv2.INTER_LINEAR) instead of ndimage.map_coordinates(image, (dx, dy), order=1)
resize_keep_aspect(infile, output_file=None, output_size = (300,200),use_visual_output=True)
'''
```
#### File: ml-support-code-in-python/nn_utils/augment_images.py
```python
import cv2
import numpy as np
# import scipy as sp
import os
import logging
import time
import string
import random
import copy
import imutils
import read_various_training_formats
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger("simple_example")
logger.setLevel(logging.DEBUG)
def generate_images(img_filename, max_angle = 5,n_angles=10,
max_offset_x = 100,n_offsets_x=1,
max_offset_y = 100, n_offsets_y=1,
max_scale=1.2, n_scales=1,
noise_level=0.05,n_noises=1,noise_type='gauss',
max_blur=2, n_blurs=1,
do_mirror_lr=True,do_mirror_ud=False,output_dir=None,
show_visual_output=False,bb=None,do_bb=False,suffix='.jpg'):
'''
generates a bunch of variations of image by rotating, translating, noising etc
total # images generated is n_angles*n_offsets_x*n_offsets_y*n_noises*n_scales*etc, these are done in nested loops
if you don't want a particular xform set n_whatever = 0
original image dimensions are preserved
:param img_arr: image array to vary
:param max_angle: rotation limit (degrees)
:param n_angles: number of rotated images
:param max_offset_x: x offset limit (pixels)
:param n_offsets_x: number of x-offset images
:param max_offset_y: y offset limit (pixels)
:param n_offsets_y: number of y-offset images
:param max_scales: global scaling factor
:param n_scales: number of globally scaled images
:param noise_level: level of gaussian noise to add - 0->no noise, 1->noise_level (avg 128)
:param n_noises: number of noised images
:param noise_type 'gauss' Gaussian-distributed additive noise.
'poisson' Poisson-distributed noise generated from the data.
's&p' Replaces random pixels with 0 or 1.
'speckle' Multiplicative noise using out = image + n*image
None
:param max_blur: level of blur (pixels in kernel) to add - 0->no noise,
:param n_blurs: number of blurred images
:param do_mirror_lr: work on orig and x-axis-flipped copy
:param do_mirror_ud: work on orig and x-axis-flipped copy
:param output_dir: dir to write output images
:return:
'''
img_arr = cv2.imread(img_filename)
if img_arr is None:
logging.warning('didnt get input image '+str(img_filename))
return
orig_path, filename = os.path.split(img_filename)
if output_dir is not None and not os.path.exists(output_dir):
os.mkdir(output_dir)
eps = 0.01
if n_angles <2:
angles = [0.0]
else:
angles = np.arange(-max_angle, max_angle+eps, max_angle*2 / (n_angles-1))
if n_offsets_x <2:
offsets_x = [0]
else:
offsets_x = np.arange(-max_offset_x, max_offset_x+eps, max_offset_x*2/(n_offsets_x-1))
if n_offsets_y <2:
offsets_y = [0]
else:
offsets_y = np.arange(-max_offset_y, max_offset_y+eps, max_offset_y*2/(n_offsets_y-1))
if n_scales <1:
scales = [1.0]
elif n_scales ==1: #todo - change dx , dy , angles to have ==1 case
scales = [max_scale]
else:
scales = np.arange(1, max_scale+eps, (max_scale-1)/(n_scales-1))
if n_blurs <1:
blurs = [0]
elif n_blurs ==1:
blurs = [max_blur]
else:
print('n_blurs-1:' + str(n_blurs-1))
rat = float(max_blur)/(n_blurs-1)
print('rat:'+str(rat))
blurs = np.arange(1, max_blur+eps, rat)
if n_noises <1:
n_noises=1
noise_type=None
print('angles {0} offsets_x {1} offsets_y {2} scales {3} n_noises {4} lr {5} ud {6} blurs {7} '.format(angles,offsets_x,offsets_y,scales,n_noises,do_mirror_lr,do_mirror_ud,blurs))
height=img_arr.shape[0]
width=img_arr.shape[1]
if len(img_arr.shape) == 2:
depth = img_arr.shape[2]
else:
depth = 1
center = (width/2,height/2)
reflections=[img_arr]
if do_mirror_lr:
fimg=img_arr.copy()
mirror_image = cv2.flip(fimg,1)
reflections.append(mirror_image)
if do_mirror_ud:
fimg=img_arr.copy()
mirror_image = cv2.flip(fimg,0)
reflections.append(mirror_image)
if do_mirror_ud and do_mirror_lr:
fimg=img_arr.copy()
mirror_image = cv2.flip(fimg,0)
mirror_image = cv2.flip(mirror_image,1)
reflections.append(mirror_image)
if show_visual_output:
cv2.imshow('orig',img_arr)
k = cv2.waitKey(0)
if 'bbox_' in img_filename and bb is None and do_bb:
strs = img_filename.split('bbox_')
bb_str = strs[1]
coords = bb_str.split('_')
bb_x = int(coords[0])
bb_y = int(coords[1])
bb_w = int(coords[2])
bb_h = coords[3].split('.')[0] #this has .jpg or .bmp at the end
bb_h = int(bb_h)
bb=[bb_x,bb_y,bb_w,bb_h]
bb_points = [[bb_x,bb_y],[bb_x+bb_w,bb_y],[bb_x,bb_y+bb_h],[bb_x+bb_w,bb_y+bb_h]] #topleft topright bottomleft bottomright
print('bb:'+str(bb))
if bb_h == 0:
logging.warning('bad height encountered in imutils.resize_and_crop_image for '+str(input_file_or_np_arr))
return None
if bb_w == 0:
logging.warning('bad width encountered in imutils.resize_and_crop_image for '+str(input_file_or_np_arr))
return None
# Python: cv2.transform(src, m[, dst]) -> dst
#http://docs.opencv.org/2.4/modules/core/doc/operations_on_arrays.html#void%20transform%28InputArray%20src,%20OutputArray%20dst,%20InputArray%20m%29
#SO CLEANNNN
for n_reflection in range(0,len(reflections)):
for offset_x in offsets_x:
for offset_y in offsets_y:
for angle in angles:
for scale in scales:
for i in range(0,n_noises):
for blur in blurs:
original_img = reflections[n_reflection]
if blur == 0:
blurred = original_img #blur=0 crashes cv2.blur
else:
blurred = cv2.blur(original_img,(int(blur),int(blur))) #fails if blur is nonint or 0
noised = add_noise(blurred,noise_type,noise_level)
print('center {0} angle {1} scale {2} h {3} w {4}'.format(center,angle, scale,height,width))
M = cv2.getRotationMatrix2D(center, angle,scale)
# print('M='+str(M))
M[0,2]=M[0,2]+offset_x
M[1,2]=M[1,2]+offset_y
print('M='+str(M))
dest = np.ones_like(img_arr) * 255
# xformed_img_arr = cv2.warpAffine(noised, M, (width,height),dst=dest,borderMode=cv2.BORDER_TRANSPARENT)
xformed_img_arr = cv2.warpAffine(noised, M, (width,height),dst=dest,borderMode=cv2.BORDER_REPLICATE)
xformed_img_arr = dest
if do_bb:
xformed_bb_points = np.dot(bb_points,M)
name = filename[0:-4]+'_ref{0}dx{1}dy{2}rot{3}scl{4}n{5}b{6}'.format(n_reflection,offset_x,offset_y,angle,scale,i,blur)+suffix
name = filename[0:-4]+'_m%dx%dy%dr%.2fs%.2fn%db%.2f' % (n_reflection,offset_x,offset_y,angle,scale,i,blur)+suffix
if output_dir is not None:
full_name = os.path.join(output_dir,name)
else:
full_name = os.path.join(orig_path,name)
print('name:'+str(full_name))
cv2.imwrite(full_name, xformed_img_arr)
if show_visual_output:
cv2.imshow('xformed',xformed_img_arr)
k = cv2.waitKey(0)
def multichannel_to_mask(multichannel_arr):
'''
from n-channel binary image (one chan for every category) make mask_array (single chan with integers indicating categories), make
:param multichannel_arr:
:return:
'''
if len(multichannel_arr.shape) != 3:
logging.debug('got 1-chan image in multichannel_to_mask')
return multichannel_arr
h,w,c = multichannel_arr.shape
output_arr = np.zeros([h,w])
cumulative = 0
for chan in range(c):
nth_chan = multichannel_arr[:,:,chan]
pixel_count = np.count_nonzero(nth_chan)
cumulative = cumulative + pixel_count
# print('multichannel to mask {} pixcount {}'.format(chan,pixel_count))
output_arr[nth_chan != 0] = chan
pixel_count = np.count_nonzero(output_arr)
# print('cumulative pixcount {}'.format(cumulative))
return output_arr
def mask_to_multichannel(mask_arr,n_channels):
'''
from mask_array (single chan with integers indicating categories), make n-channel binary image (one chan for every category)
:param mask_arr:
:param n_channels:
:return:
'''
if len(mask_arr.shape) != 2:
logging.debug('got multichannel image in mask_to_multichannel, converting to single chan: array shape:'+str(mask_arr.shape))
# assert(mask_arr[:,:,0] == mask_arr[:,:,1]) #include these if paranoid
# assert(mask_arr[:,:,0] == mask_arr[:,:,2])
mask_arr = mask_arr[:,:,0] #take 0th channel
h,w = mask_arr.shape[0:2]
output_arr = np.zeros([h,w,n_channels])
for i in np.unique(mask_arr):
channel = np.zeros([h,w])
channel[mask_arr == i] = 1
# print('mask to multichannel {} pixcount {}'.format(i,pixel_count))
output_arr[:,:,i] = channel
# print('cumulative pixcount {}'.format(pixel_count))
logging.debug('nonzero elements in layer {}:{} '.format(i,len(mask_arr[mask_arr==i])))
logging.debug('nonzero in multichan layer {}:{}'.format(i,np.count_nonzero(output_arr[:,:,i])))
logging.debug('nonzero elements in orig:{} nonzero in multichan {}'.format(np.nonzero(mask_arr),np.nonzero(output_arr)))
return output_arr
#
def resize_bbs(bblist_xywh,orig_shape,new_shape,img_arr=None):
x_factor = new_shape[1]/orig_shape[1]
y_factor = new_shape[0]/orig_shape[0]
print('resize factor : {},{}'.format(x_factor,y_factor))
resized_bbs = []
for bb in bblist_xywh:
bb_out = [bb[0]*x_factor,bb[1]*y_factor,bb[2]*x_factor,bb[3]*y_factor]
resized_bbs.append(bb_out)
if img_arr is not None:
imutils.bb_with_text(img_arr, bb_out, 'resized')
def flip_bbs(image_dims_h_w, bb_list_xywh,flip_rl=False,flip_ud=False):
for bb in bb_list_xywh:
logging.debug('initial bb {}'.format(bb))
if flip_rl:
right_margin = image_dims_h_w[1]-(bb[0]+bb[2]) #width - right bb edge
bb[0] = right_margin
if flip_ud:
bottom_margin = image_dims_h_w[0]-(bb[1]+bb[3]) #height - bottom bb edge
bb[1] = bottom_margin
logging.debug('final bb {}'.format(bb))
return bb_list_xywh
def warp_bbs(bblist_xywh,M,dims_hw,img_arr=None):
'''
apply affine xfrom matrix m to bbs
:param bblist_xywh:
:param M:
:return: bblist_xywh after affine xform
'''
bbs_out=[]
# bblist_xywh = [bblist_xywh[0]]
logging.debug('Mshape '+str(M.shape))
logging.debug('img dims '+str(dims_hw))
for bb in bblist_xywh:
# bbs_xy_chans = np.array([[bb[0],bb[1]],[bb[0]+bb[2],bb[1]+bb[3]]])
# print('bbs out '+str(bbs_out))
# print('cols {}'.format(M.cols))
# src = np.array([
# [bb[0],bb[1]], #tl
# [bb[0]+bb[2],bb[1]], #tr
# [bb[0],bb[1]+bb[3]], #bl
# [bb[0]+bb[2],bb[1]+bb[3]]], #br
# dtype = "float32")
# # src = src.transpose()
# print('sshape '+str(src.shape))
# dst = np.dot(src,M[:,0:2]) +M[:,2]
# print('dest from npdot:{}'.format(dst))
src = np.array([
[[bb[0],bb[1]]], #tl
[[bb[0]+bb[2],bb[1]]], #tr
[[bb[0],bb[1]+bb[3]]], #bl
[[bb[0]+bb[2],bb[1]+bb[3]]]], #br
dtype = "float32")
dst2=cv2.transform(src,M)
# print('dst from cv2'+str(dst2))
dst_bb = [(int(i[0][0]),int(i[0][1])) for i in dst2]
# print('original dstbb '+str(dst_bb))
minx=dst_bb[0][0]
miny=dst_bb[0][1]
maxx=dst_bb[0][0]
maxy=dst_bb[0][1]
for pt in dst_bb:
if img_arr is not None:
cv2.circle(img_arr,pt,10,(200,155,100))
if pt[0]<minx:
minx=pt[0]
elif pt[0]>maxx:
maxx=pt[0]
if pt[1]<miny:
miny=pt[1]
elif pt[1]>maxy:
maxy=pt[1]
minx = max(0,minx)
miny = max(0,miny)
maxx = min(dims_hw[1],maxx)
maxy = min(dims_hw[0],maxy)
dst_bb=[minx,miny,maxx-minx,maxy-miny]
# print('dst_bb:'+str(dst_bb))
bbs_out.append(dst_bb)
# if img_arr is not None:
# cv2.imshow('circs',img_arr)
# cv2.waitKey(0)
# print('bbs out')
return bbs_out
def test_warp_bbs(annotation_file='/home/jeremy/projects/core/images/female1_yololabels.txt',
img_file='/home/jeremy/projects/core/images/female1.jpg'):
bbs,img_arr = read_various_training_formats.inspect_yolo_annotation(annotation_file,img_file)
orig_img = copy.copy(img_arr)
if img_arr is None:
print('none img arr')
return
center = (img_arr.shape[0]/2,img_arr.shape[1]/2)
angle = 20
scale = 1.3
offset_x = 20
offset_y = 30
M = cv2.getRotationMatrix2D(center, angle,scale)
# logging.debug('db G')
M[0,2]=M[0,2]+offset_x
M[1,2]=M[1,2]+offset_y
print('M:'+str(M))
height,width=img_arr.shape[0:2]
warped_image = cv2.warpAffine(img_arr,M,(width,height))
warped_bbs = warp_bbs(bbs,M,img_arr.shape[0:2],img_arr=warped_image)
for bb in warped_bbs:
print('bb0 '+str(bb))
warped_image = imutils.bb_with_text(warped_image, bb, 'out')
# for pt in bb:
# cv2.circle(warped_image,pt,10,(100,255,100))
# cv2.circle(warped_image,(bb[0,:]),10,(100,255,100))
# cv2.circle(warped_image,(bb[1,:]),10,(100,100,255))
# cv2.circle(warped_image,(bb[2,:]),10,(255,100,100))
# cv2.circle(warped_image,(bb[3,:]),10,(255,255,100))
# #read_various_training_formats.show_annotations_xywh(warped_bbs,warped_image)
cv2.imshow('out',warped_image)
cv2.waitKey(0)
def test_flip_bbs(imgfile='images/female1.jpg'):
img_arr=cv2.imread(imgfile)
if img_arr is None:
print('trouble getting '+imgfile)
return
h,w=img_arr.shape[0:2]
bb1=[10,20,50,70]
bb2=[100,200,100,200]
bblist =[bb1,bb2]
im2=cv2.flip(img_arr,1) #lr
im2=cv2.flip(im2,0) #ud
for bb in bblist:
cv2.rectangle(img_arr,(bb[0],bb[1]),(bb[0]+bb[2],bb[1]+bb[3]),[100,200,255],thickness=1)
cv2.imshow('orig',img_arr)
cv2.waitKey(0)
new_bbs = flip_bbs((h,w),bblist,flip_rl=True,flip_ud=True)
for bb in new_bbs:
cv2.rectangle(im2,(bb[0],bb[1]),(bb[0]+bb[2],bb[1]+bb[3]),[200,100,55],thickness=2)
cv2.imshow('flip',im2)
cv2.waitKey(0)
def crop_bblist(bblist_xywh,(height,width),(top,bottom,left,right)):
new_bblist = []
for bb in bblist_xywh:
x1=bb[0]
y1=bb[1]
x2=bb[0]+bb[2]
y2=bb[1]+bb[3]
new_x1=x1-left if x1>left else 0
new_y1=y1-top if y1>top else 0
new_x2=x2-left if right>x2-left else right
new_y2=y2-top if bottom>y2-top else bottom
new_w = new_x2-new_x1 if new_x1+ new_x2-new_x1 <= right-left else right-left-new_x1
new_h = new_y2-new_y1 if new_y1+new_y2-new_y1 <= bottom-top else bottom-top - new_y1
new_bb=[new_x1,new_y1,new_w,new_h]
new_bblist.append(new_bb)
return new_bblist
def test_crop_bblist(annotation_file='/home/jeremy/projects/core/images/female1_yololabels.txt',
img_file='/home/jeremy/projects/core/images/female1.jpg'):
bbs,img_arr = read_various_training_formats.inspect_yolo_annotation(annotation_file,img_file)
orig_img = copy.copy(img_arr)
if img_arr is None:
print('none img arr')
return
height,width=img_arr.shape[0:2]
top = 160
bottom = height - 360
left = 290
right = width -270
cropped_image = img_arr[top:bottom,left:right]
print('cropped size '+str(cropped_image.shape))
cropped_bbs = crop_bblist(bbs,(height,width),(top,bottom,left,right))
for bb in cropped_bbs:
print('bb0 '+str(bb))
assert(bb[0]>=0),'x1 < 0'
assert(bb[1]>=0),'y1 < 0'
assert(bb[2]+bb[0]<=cropped_image.shape[1]),'x2 > w'
assert(bb[3]+bb[1]<=cropped_image.shape[0]),'y2 > h'
cropped_image = imutils.bb_with_text(cropped_image, bb, 'cropped', boxcolor=[255, 255.200])
# for pt in bb:
# cv2.circle(warped_image,pt,10,(100,255,100))
# cv2.circle(warped_image,(bb[0,:]),10,(100,255,100))
# cv2.circle(warped_image,(bb[1,:]),10,(100,100,255))
# cv2.circle(warped_image,(bb[2,:]),10,(255,100,100))
# cv2.circle(warped_image,(bb[3,:]),10,(255,255,100))
# #read_various_training_formats.show_annotations_xywh(warped_bbs,warped_image)
cv2.imshow('out',cropped_image)
cv2.waitKey(0)
def generate_image_onthefly(img_filename_or_nparray, gaussian_or_uniform_distributions='uniform',
max_angle = 5,
max_offset_x = 5,max_offset_y = 5,
max_scale=1.2,min_scale=0.8,
max_noise_level= 0,noise_type='gauss',
max_blur=0,
max_color_rotation=0,
do_mirror_lr=True,do_mirror_ud=False,
crop_size=None,
show_visual_output=False,save_visual_output=False,mask_filename_or_nparray=None,n_mask_channels=21,
bblist_xywh=None):
'''
generates a bunch of variations of image by rotating, translating, noising etc
total # images generated is n_angles*n_offsets_x*n_offsets_y*n_noises*n_scales*etc, these are done in nested loops
if you don't want a particular xform set n_whatever = 0
original image dimensions are preserved
:param img_filename:
:param gaussian_or_uniform_distributions:
:param max_angle:
:param max_offset_x:
:param max_offset_y:
:param max_scale: this is percent to enlarge/shrink image
:param max_noise_level:
:param noise_type:
:param max_blur:
:param do_mirror_lr:
:param do_mirror_ud:
:param output_dir:
:param show_visual_output:
:param suffix:
:return:
TODO
add color shifting
fix blur / noise
''' #
start_time = time.time()
if isinstance(img_filename_or_nparray,basestring):
# logging.debug('db A filename:'+img_filename_or_nparray)
img_arr = cv2.imread(img_filename_or_nparray)
else:
img_arr = img_filename_or_nparray
if img_arr is None:
logging.warning('didnt get input image '+str(img_filename_or_nparray))
return
mask_arr = None
if mask_filename_or_nparray is not None:
if isinstance(mask_filename_or_nparray,basestring):
# logging.debug('db A1 filename:'+mask_filename_or_nparray)
mask_arr = cv2.imread(mask_filename_or_nparray)
else:
mask_arr = mask_filename_or_nparray
if mask_arr is None:
logging.warning('didnt get mask image '+str(mask_filename_or_nparray))
return
#convert mask img to binary multichannel image
mask_arr = mask_to_multichannel(mask_arr,n_mask_channels)
#check that mask size and img size are equal
if mask_arr.shape[0]!=img_arr.shape[0] or mask_arr.shape[1]!= img_arr.shape[1]:
print('WARNING shape mismatch (no crop) in augment images, forcing reshape - imgshape {} maskshape {}'.format(img_arr.shape,mask_arr.shape))
angle = 0
offset_x = 0
offset_y = 0
scale = 0
noise_level = 0
blur = 0
crop_dx = 0
crop_dy = 0
x_room = 0
y_room = 0
height,width = img_arr.shape[0:2]
if crop_size:
#WIP fix too small images here - embed into black bgnd of sufficient size
# if img_arr.shape[0]<
#calculate headroom left after crop. actual crop is random within that headroom iirc
x_room = width - crop_size[1]
y_room = height - crop_size[0]
if x_room<0 or y_room<0:
logging.debug('crop {} is larger than incoming image {} so I need to resize'.format(crop_size,img_arr.shape[0:2]))
if x_room<y_room:
factor = float(crop_size[1]+2)/width #assumes crop is x,y not y,x
resize_size = (int(height*factor),crop_size[1])
else:
factor = float(crop_size[0]+2)/height #add 1 since rounding can cause output to be one pix too small
resize_size = (crop_size[0],int(width*factor))
logging.warning('resizing {} to {} so as to accomodate crop to {}'.format(img_arr.shape[0:2],resize_size,crop_size))
img_arr= imutils.resize_keep_aspect(img_arr, output_size=resize_size, careful_with_the_labels=False) #img not labels
if(mask_arr is not None):
# print('uniques beffg '+str(np.unique(mask_arr)))
mask_arr= imutils.resize_keep_aspect(mask_arr, output_size=resize_size, careful_with_the_labels=True) #labels not img
# print('uniques aft '+str(np.unique(mask_arr)))
height,width = img_arr.shape[0:2]
x_room = width - crop_size[1]
y_room = height - crop_size[0]
if x_room<0 or y_room<0:
logging.warning('crop {} is still larger than incoming image {} !!!!! something went wrong'.format(crop_size,img_arr.shape[0:2]))
# logging.debug('crop size {} xroom {} yroom {}'.format(crop_size,x_room,y_room))
# if crop_size[0]!=img_arr.shape[0] or crop_size[1]!= img_arr.shape[1]:
## print('WARNING shape mismatch with crop in augment images, forcing reshape!')
# print('img shape wxh {}x{} cropsize {}x{}'.format(img_arr.shape[0],img_arr.shape[1],crop_size[0],crop_size[1]))
eps = 0.1
if gaussian_or_uniform_distributions == 'gaussian':
if max_angle:
angle = np.random.normal(0,max_angle)
if max_offset_x:
offset_x = np.random.normal(0,max_offset_x)
if max_offset_y:
offset_y = np.random.normal(0,max_offset_y)
if max_scale and min_scale:
# print('gscale limits {} {}'.format(1,np.abs(1.0-max_scale)/2))
scale = max(eps,np.random.normal(max_scale+min_scale)/2.0,np.abs(max_scale-min_scale)/2.0) #make sure scale >= eps
elif max_scale:
# print('gscale limits {} {}'.format(1,np.abs(1.0-max_scale)/2))
scale = max(eps,np.random.normal(1,np.abs(1.0-max_scale)/2.0)) #make sure scale >= eps
if max_noise_level:
noise_level = max(0,np.random.normal(0,max_noise_level)) #noise >= 0
if max_blur:
blur = max(0,np.random.normal(0,max_blur)) #blur >= 0
if x_room:
crop_dx = max(-float(x_room)/2,int(np.random.normal(0,float(x_room)/2)))
crop_dx = min(crop_dx,float(x_room)/2)
if y_room:
crop_dy = max(-float(y_room)/2,int(np.random.normal(0,float(y_room)/2)))
crop_dy = min(crop_dy,float(y_room)/2)
else: #uniform distributed random numbers
if max_offset_x:
offset_x = np.random.uniform(-max_offset_x,max_offset_x)
if max_offset_y:
offset_y = np.random.uniform(-max_offset_y,max_offset_y)
if max_scale and min_scale:
# print('gscale limits {} {}'.format(1,np.abs(1.0-max_scale)/2))
scale = np.random.uniform(min_scale,max_scale) #make sure scale >= eps
elif max_scale:
# print('uscale limits {} {}'.format(1-np.abs(1-max_scale),1+np.abs(1-max_scale)))
scale = np.random.uniform(1-np.abs(1-max_scale),1+np.abs(1-max_scale))
if max_noise_level:
noise_level = np.random.uniform(0,max_noise_level)
if max_blur:
blur = np.random.uniform(0,max_blur)
if x_room:
crop_dx = int(np.random.uniform(0,float(x_room)/2))
if y_room:
crop_dy = int(np.random.uniform(0,float(y_room)/2))
if max_angle:
angle = np.random.uniform(-max_angle,max_angle)
if len(img_arr.shape) == 3:
depth = img_arr.shape[2]
else:
depth = 1
center = (width/2,height/2)
# logging.debug('db C')
flip_lr = 0
flip_ud = 0
if do_mirror_lr:
flip_lr = np.random.randint(2)
if do_mirror_ud:
flip_ud = np.random.randint(2)
# logging.debug('augment w {} h {} cropdx {} cropdy {} cropsize {} depth {} fliplr {} flipdud {} center {} angle {} scale {} offx {} offy {}'.format(
# width,height,crop_dx,crop_dy,crop_size,depth,flip_lr,flip_ud,center,angle,scale,offset_x,offset_y))
img_arr = do_xform(img_arr,width,height,crop_dx,crop_dy,crop_size,depth,flip_lr,flip_ud,blur,noise_level,center,angle,scale,offset_x,offset_y)
# if show_visual_output:
# logging.debug('img_arr shape:'+str(img_arr.shape))
# cv2.imshow('xformed',img_arr)
# k = cv2.waitKey(0)
if mask_arr is not None: #do xform to mask
# logging.debug('doing mask augmentation')
mask_arr =do_xform(mask_arr,width,height,crop_dx,crop_dy,crop_size,depth,flip_lr,flip_ud,blur,noise_level,center,angle,scale,offset_x,offset_y)
mask_arr = multichannel_to_mask(mask_arr)
if save_visual_output:
lst = [random.choice(string.ascii_letters + string.digits) for n in xrange(30)]
name = "".join(lst)
cv2.imwrite(name+'.jpg',img_arr)
maskname = name+'_mask.png'
cv2.imwrite(maskname,mask_arr)
# logging.debug('augment output:img arr size {} mask size {}'.format(img_arr.shape,mask_arr.shape))
if bblist_xywh is not None:
bblist_xywh = do_xform_bblist_xywh(bblist_xywh,width,height,crop_dx,crop_dy,crop_size,depth,flip_lr,flip_ud,blur,noise_level,center,angle,scale,offset_x,offset_y)
if save_visual_output:
lst = [random.choice(string.ascii_letters + string.digits) for n in xrange(10)]
name = "".join(lst)+'.jpg'
cv2.imwrite(name,img_arr)
if mask_arr is not None:
cv2.imwrite(name.replace('.jpg','.png'),mask_arr)
if show_visual_output:
img_copy = copy.copy(img_arr)
if mask_arr:
labels = {i:str(i) for i in range(max(np.unique(mask_arr)))}
imutils.show_mask_with_labels(mask_arr, labels, original_image=img_arr, visual_output=True)
else:
if bblist_xywh:
for bb in bblist_xywh:
cv2.rectangle(img_copy,(bb[0],bb[1]),(bb[0]+bb[2],bb[1]+bb[3]),[255,50,100],thickness=2)
cv2.imshow('augmented',img_copy)
cv2.waitKey(0)
#assuming that there is either mask or bblist not both
if mask_arr is not None:
return img_arr,mask_arr
elif bblist_xywh is not None:
return img_arr,bblist_xywh
return img_arr
def do_xform(img_array,width,height,crop_dx,crop_dy,crop_size,depth,flip_lr,flip_ud,blur,noise_level,center,angle,scale,offset_x,offset_y):
#todo this can all be cleaned up by putting more of the generate_image_on_thefly code here
# logging.debug('db D')
if flip_lr:
# logging.debug('db D1')
img_array = cv2.flip(img_array,1)
# logging.debug('db D2')
if flip_ud:
img_array = cv2.flip(img_array,0)
# logging.debug('db E')
# Python: cv2.transform(src, m[, dst]) -> dst
#http://docs.opencv.org/2.4/modules/core/doc/operations_on_arrays.html#void%20transform%28InputArray%20src,%20OutputArray%20dst,%20InputArray%20m%29
if blur: #untested
img_array = cv2.blur(img_array,(int(blur),int(blur))) #fails if blur is nonint or 0
if noise_level: #untested
img_array = add_noise(img_array,noise_type,noise_level)
# logging.debug('db F')
# print('center {0} angle {1} scale {2} h {3} w {4} dx {5} dy {6} noise {7} blur {8}'.format(center,angle, scale,height,width,offset_x,offset_y,noise_level,blur))
M = cv2.getRotationMatrix2D(center, angle,scale)
# logging.debug('db G')
M[0,2]=M[0,2]+offset_x
M[1,2]=M[1,2]+offset_y
# print('M='+str(M))
# xformed_img_arr = cv2.warpAffine(noised, M, (width,height),dst=dest,borderMode=cv2.BORDER_TRANSPARENT)
img_array = cv2.warpAffine(img_array, M, (width,height),borderMode=cv2.BORDER_REPLICATE)
if crop_size:
if crop_dx is None:
crop_dx = 0
if crop_dy is None:
crop_dy = 0
left = int(round(max(0,round(float(width-crop_size[1])/2) - crop_dx)))
right = int(round(left + crop_size[1]))
top = int(round(max(0,round(float(height-crop_size[0])/2) - crop_dy)))
bottom = int(round(top + crop_size[0]))
logging.debug('incoming wxh {}x{} cropsize {}'.format(width,height,crop_size))
# print('left {} right {} top {} bottom {} crop_dx {} crop_dy {} csize {} xroom {} yroom {}'.format(left,right,top,bottom,crop_dx,crop_dy,crop_size,x_room,y_room))
if depth!=1:
img_array = img_array[top:bottom,left:right,:]
#print img_arr.shape
else:
img_array = img_array[top:bottom,left:right]
return img_array
# raw_input('enter to cont')
def do_xform_bblist_xywh(bb_list_xywh,width,height,crop_dx,crop_dy,crop_size,depth,flip_lr,flip_ud,blur,noise_level,center,angle,scale,offset_x,offset_y):
#todo this can all be cleaned up by putting more of the generate_image_on_thefly code here
# logging.debug('db D')
logging.debug('augmenting bbs w {} h {} dx {} dy {} crop {} lr {} ud {} center {} andgle {} scale {} offx {} offy {}'.
format(width,height,crop_dx,crop_dy,crop_size,flip_lr,flip_ud,center,angle,scale,offset_x,offset_y))
if flip_lr:
# logging.debug('db D1')
flip_bbs((height,width),bb_list_xywh,flip_rl=True,flip_ud=False)
# img_array = cv2.flip(img_array,1)
# logging.debug('db D2')
if flip_ud:
flip_bbs((height,width),bb_list_xywh,flip_rl=False,flip_ud=True)
M = cv2.getRotationMatrix2D(center, angle,scale)
M[0,2]=M[0,2]+offset_x
M[1,2]=M[1,2]+offset_y
bb_list_xywh = warp_bbs(bb_list_xywh,M,(height,width))
if crop_size:
if crop_dx is None:
crop_dx = 0
if crop_dy is None:
crop_dy = 0
left = int(round(max(0,round(float(width-crop_size[1])/2) - crop_dx)))
right = int(round(left + crop_size[1]))
top = int(round(max(0,round(float(height-crop_size[0])/2) - crop_dy)))
bottom = int(round(top + crop_size[0]))
logging.debug('incoming wxh {}x{} cropsize {}'.format(width,height,crop_size))
# print('left {} right {} top {} bottom {} crop_dx {} crop_dy {} csize {} xroom {} yroom {}'.format(left,right,top,bottom,crop_dx,crop_dy,crop_size,x_room,y_room))
bb_list_xywh = crop_bblist(bb_list_xywh,(height,width),(top,bottom,left,right))
return bb_list_xywh
def generate_images_for_directory(fulldir,**args):
only_files = [f for f in os.listdir(fulldir) if os.path.isfile(os.path.join(fulldir, f))]
for a_file in only_files:
full_filename = os.path.join(fulldir,a_file)
generate_images(full_filename,**args)
def generate_masks(img_filename, **kwargs):
img_arr = cv2.imread(img_filename,cv2.IMREAD_GRAYSCALE)
if img_arr is None:
logging.warning('didnt get input image '+str(img_filename))
return
print('shape:'+str(img_arr.shape))
if len(img_arr.shape) == 3:
logging.warning('got 3 channel image '+str(img_filename)+', using first chan')
img_arr = img_arr[:,:,0]
if img_arr is None:
logging.warning('didnt get input image '+str(img_filename))
return
h,w = img_arr.shape[0:2]
uniques = np.unique(img_arr)
n_uniques=len(uniques)
binary_masks = np.zeros([h,w,n_uniques])
for i in range(0,n_uniques):
binary_masks[:,:,i] = img_arr[:,:]==uniques[i]
cv2.imshow('mask'+str(i),binary_masks[:,:,i])
transformed_mask = transform_image(binary_masks[:,:,i],kwargs)
cv2.waitKey(0)
def generate_images_for_directory_of_directories(dir_of_dirs,filter= None,**args):
only_dirs = [dir for dir in os.listdir(dir_of_dirs) if os.path.isdir(os.path.join(dir_of_dirs,dir)) ]
logging.debug(str(only_dirs))
if filter:
only_dirs = [dir for dir in only_dirs if filter in dir ]
logging.debug(str(only_dirs))
for a_dir in only_dirs:
full_dir = os.path.join(dir_of_dirs,a_dir)
generate_images_for_directory(full_dir,**args)
def clear_underutilized_bins(img_arr):
h = np.histogram(img_arr,bins=57)
print h
def add_noise(image, noise_typ,level):
'''
Parameters
----------
image : ndarray
Input image data. Will be converted to float.
mode : str
One of the following strings, selecting the type of noise to add:
'gauss' Gaussian-distributed additive noise.
'poisson' Poisson-distributed noise generated from the data.
's&p' Replaces random pixels with 0 or 1.
'speckle' Multiplicative noise using out = image + n*image,where
n is uniform noise with specified mean & variance.
'''
print('adding noise type {0} level {1}'.format(noise_typ,level))
if noise_typ == None:
return image
if noise_typ == "gauss":
row,col,ch= image.shape
print('row {} col {} ch {}'.format(row,col,ch))
mean = 0
var = level*255
sigma = var**0.5
print('sigma {0} mean {1}'.format(sigma,mean))
gauss = np.random.normal(mean,sigma,(row,col,ch))
# z=np.multiply(gauss,0)
# gauss = np.maximum(gauss,z)
gauss = gauss.reshape(row,col,ch)
# cv2.imshow('orig',gauss)
# k = cv2.waitKey(0)
noisy = (image + gauss)
noisy = noisy.astype(dtype=np.uint8)
return noisy
elif noise_typ == "s&p":
s_vs_p = 0.5
amount = level
out = image
# Salt mode
num_salt = np.ceil(amount * image.size * s_vs_p)
coords = [np.random.randint(0, i - 1, int(num_salt))
for i in image.shape]
out[coords] = [255,255,255]
# Pepper mode
num_pepper = np.ceil(amount* image.size * (1. - s_vs_p))
coords = [np.random.randint(0, i - 1, int(num_pepper)) for i in image.shape]
out[coords] = [0,0,0]
return out
elif noise_typ == "poisson":
vals = len(np.unique(image))
vals = 2 ** np.ceil(np.log2(vals))
noisy = np.random.poisson(image * vals) / float(vals)
return noisy
elif noise_typ =="speckle":
row,col,ch = image.shape
gauss = np.random.randn(row,col,ch)
gauss = gauss.reshape(row,col,ch)
noisy = image + image * gauss
return noisy
def augment_yolo_bbs(file_list='/media/jeremy/9FBD-1B00/data/jeremy/image_dbs/hls/voc_rio_udacity_test.txt',
visual_output=False,replace_this=None,with_this=None,labels_dir=None,n_augmentations=3,path_filter=None,path_antifilter='rio'):
'''
takes a bunch of yolos and augments using generate_image_onthefly, right now for generating smaller objects
:param file_list:
:param visual_output:
:param replace_this:
:param with_this:
:param labels_dir:
:param n_augmentations:
:param path_filter: require filepaths to contain this string (to only augment certain dbs)
:return:
'''
max_angle=7
max_scale=1.1
min_scale=0.7
with open(file_list,'r') as fp:
lines = fp.readlines()
fp.close()
print('{} lines in {}'.format(len(lines),file_list))
for line in lines:
if replace_this is not None:
line = line.replace(replace_this,with_this)
line = line.replace('\n','')
print('got line '+str(line))
if path_filter and not path_filter in line:
logging.debug('didnt find {} in {}, skipping'.format(path_filter,line))
continue
if path_antifilter and path_antifilter in line:
logging.debug('found {} in {}, skipping'.format(path_antifilter,line))
continue
tgdict = read_various_training_formats.yolo_to_tgdict(img_file=line,visual_output=visual_output,classlabels=constants.hls_yolo_categories,labels_dir_suffix=labels_dir)
if tgdict is None:
logging.warning('couldnt get dict for {}, continuing to next'.format(line))
continue
annotations = tgdict['annotations']
filename = tgdict['filename']
logging.debug('file {}\nannotations {}'.format(filename,annotations))
bbox_list = []
if annotations == []:
logging.info('no annotations, skipping')
continue
for annotation in annotations:
bbox_xywh=annotation['bbox_xywh']
bbox_list.append(bbox_xywh)
for n in range(n_augmentations):
logging.debug('\norig bbox list:'+str(bbox_list))
bbox_to_send=copy.deepcopy(bbox_list) #func can modify arg it seems
img_arr,new_bbox_list = generate_image_onthefly(filename,show_visual_output=visual_output,bblist_xywh=bbox_to_send,max_angle=max_angle,max_scale=max_scale,min_scale=min_scale)
logging.debug('new bbox list:'+str(new_bbox_list))
if img_arr is None:
logging.warning('couldnt get {}, continuing to next'.format(filename))
#write image
if line[-5:] == '.jpeg':
suffix = line[-5:]
else:
suffix = line[-4:] #keep augmented and orig images of same type, people
new_imgfile=line.strip(suffix)+'_aug'+str(n)+suffix
new_lblfile=line.strip(suffix)+'_aug'+str(n)+'.txt'
print('saving new image {}'.format(new_imgfile))
cv2.imwrite(new_imgfile,img_arr)
#write annotation
tgdict['filename']=new_imgfile
for i in range(len(annotations)):
annotations[i]['bbox_xywh']=new_bbox_list[i]
logging.debug('tgdict {}'.format(tgdict))
read_various_training_formats.tgdict_to_yolo(tgdict)
#write file list
with open(file_list,'a') as fp2:
fp2.write(new_imgfile+'\n')
fp2.close()
# raw_input('ret to cont')
if __name__=="__main__":
print('running main')
img_filename = '../images/female1.jpg'
img_filename = '../images/female1_256x256.jpg'
image_dir = '/home/jeremy/image_dbs/colorful_fashion_parsing_data/images/train_200x150'
label_dir = '/home/jeremy/image_dbs/colorful_fashion_parsing_data/labels_200x150'
img = '/media/jeremy/9FBD-1B00/data/olympics/'
file_list = '/data/jeremy/image_dbs/hls/insecam/07.05.2015_cameras_01-73filelist.txt'
# augment_yolo_bbs(file_list='/data/jeremy/image_dbs/hls/kitti/training/yolo_train_test.txt',visual_output=True,
# replace_this='/mnt/',with_this='/data/jeremy/image_dbs/')
augment_yolo_bbs(file_list=file_list,visual_output=False)
# test_crop_bblist()
# test_warp_bbs()
if(0):
dir = '/home/jeremy/Dropbox/tg/pixlabels/test_256x256_novariations'
images = [f for f in os.listdir(dir)]
for image in images:
label = image[:-4]+'.png'
print('image {} label {}'.format(image,label))
labelfile = os.path.join('/home/jeremy/Dropbox/tg/pixlabels/labels_256x256_novariations',label)
imfile = os.path.join(dir,image)
if os.path.isfile(imfile) and os.path.isfile(labelfile):
in1 = cv2.imread(imfile)
in2 = cv2.imread(labelfile)
for i in range(10):
out1,out2 = generate_image_onthefly(in1, mask_filename_or_nparray=in2)
cv2.imwrite('out1.jpg',out1)
cv2.imwrite('out2.png',out2)
imutils.show_mask_with_labels('out2.png', labels=constants.ultimate_21, original_image='out1.jpg', visual_output=True)
if(0):
in1 = np.zeros([500,500,3])
in2 = np.zeros_like(in1,dtype=np.uint8)
for i in range(1,21):
color = (np.random.randint(256),np.random.randint(256),np.random.randint(256))
position = (50+np.random.randint(400),50+np.random.randint(400))
radius = np.random.randint(200)
cv2.circle(in1,position,radius,color=color,thickness=10)
cv2.circle(in2,position,radius,(i,i,i),thickness=10)
pt1 = (50+np.random.randint(400),50+np.random.randint(400))
pt2 = (pt1[0]+np.random.randint(100),pt1[1]+np.random.randint(100))
cv2.rectangle(in1,pt1,pt2,color,thickness=10)
cv2.rectangle(in2,pt1,pt2,color=(i,i,i),thickness=10)
cv2.imwrite('in1.jpg',in1)
cv2.imwrite('in2.png',in2)
imutils.show_mask_with_labels('in2.png', labels=constants.ultimate_21, visual_output=True, original_image='in1.jpg')
cv2.destroyAllWindows()
while(1):
# in2 = cv2.imread('/home/jeremy/Pictures/Berlin_Naturkundemuseum_Dino_Schaedel_posterized.png')
out1,out2 = generate_image_onthefly(in1, mask_filename_or_nparray=in2)
cv2.imwrite('out1.jpg',out1)
cv2.imwrite('out2.png',out2)
imutils.show_mask_with_labels('out2.png', labels=constants.ultimate_21, original_image='out1.jpg', visual_output=True)
print('orig uniques {} nonzero {} mask uniques {} nonzero {} '.format(np.unique(out1),np.count_nonzero(out1),np.unique(out2),np.count_nonzero(out2)))
print('')
print('')
cv2.destroyAllWindows()
while(0):
generate_image_onthefly(img_filename, gaussian_or_uniform_distributions='uniform',
max_angle = 5,
max_offset_x = 10,max_offset_y = 10,
max_scale=1.2,
max_noise_level=0,noise_type='gauss',
max_blur=0,
do_mirror_lr=True,do_mirror_ud=False,
crop_size=(224,224),
show_visual_output=True)
if 0:
generate_images_for_directory(image_dir,
max_angle = 10,n_angles=2,
max_offset_x = 10,n_offsets_x=2,
max_offset_y = 10, n_offsets_y=2,
max_scale=1.3, n_scales=2,
noise_level=0.1,noise_type='gauss',n_noises=0,
max_blur=5, n_blurs=0,
do_mirror_lr=True,do_mirror_ud=False,do_bb=False,suffix='.jpg')
generate_images_for_directory(label_dir,
max_angle = 10,n_angles=2,
max_offset_x = 10,n_offsets_x=2,
max_offset_y = 10, n_offsets_y=2,
max_scale=1.3, n_scales=2,
noise_level=0.1,noise_type='gauss',n_noises=0,
max_blur=5, n_blurs=0,
do_mirror_lr=True,do_mirror_ud=False,do_bb=False,suffix='.png')
# generate_images(img_filename, max_angle = 3,n_angles=2,
# max_offset_x = 50,n_offsets_x=2,
# max_offset_y = 50, n_offsets_y=2,
# max_scale=1.2, n_scales=2,
# noise_level=0.1,noise_type='gauss',n_noises=2,
# max_blur=5, n_blurs=2,
# do_mirror_lr=True,do_mirror_ud=False,output_dir='snorb')
```
#### File: ml-support-code-in-python/nn_utils/read_various_training_formats.py
```python
from __future__ import print_function
'''
generally for reading db's having bb's or pixlevel
pascal voc
kitti
mapillary
http://host.robots.ox.ac.uk/pascal/VOC/databases.html#VOC2005_2
'''
__author__ = 'jeremy'
import os
import cv2
import sys
import re
import pdb
import csv
import xml.etree.ElementTree as ET
import pickle
import os
from os import listdir, getcwd
from os.path import join
import json
import random
import logging
logging.basicConfig(level=logging.DEBUG)
from multiprocessing import Pool
from functools import partial
from itertools import repeat
import copy
import numpy as np
import time
import random
#for mapillary, got lazy and not using cv2 instead of original PIL
import json
import numpy as np
import matplotlib.pyplot as plt
from PIL import Image
from trendi import Utils
from trendi.classifier_stuff.caffe_nns import create_nn_imagelsts
from trendi.utils import imutils
from trendi import constants
from trendi import kassper
from trendi import background_removal
#from trendi.utils import augment_images
def kitti_to_tgdict(label_dir='/data/jeremy/image_dbs/hls/kitti/training/label_2',
image_dir = '/data/jeremy/image_dbs/hls/kitti/training/image_2',visual_output=True,
write_json=True,jsonfile=None,img_suffix='.png',label_suffix='.txt'):
'''
reads data at http://www.vision.caltech.edu/Image_Datasets/CaltechPedestrians/datasets/USA/
which has a file for each image, filenames 000000.txt, 000001.txt etc, each file has a line like:
Pedestrian 0.00 0 -0.20 712.40 143.00 810.73 307.92 1.89 0.48 1.20 1.84 1.47 8.41 0.01
in format:
1 type Describes the type of object: 'Car', 'Van', 'Truck',
'Pedestrian', 'Person_sitting', 'Cyclist', 'Tram',
'Misc' or 'DontCare'
1 truncated Float from 0 (non-truncated) to 1 (truncated), where
truncated refers to the object leaving image boundaries
1 occluded Integer (0,1,2,3) indicating occlusion state:
0 = fully visible, 1 = partly occluded
2 = largely occluded, 3 = unknown
1 alpha Observation angle of object, ranging [-pi..pi]
4 bbox 2D bounding box of object in the image (0-based index):
contains left, top, right, bottom pixel coordinates
3 dimensions 3D object dimensions: height, width, length (in meters)
3 location 3D object location x,y,z in camera coordinates (in meters)
1 rotation_y Rotation ry around Y-axis in camera coordinates [-pi..pi]
1 score Only for results: Float, indicating confidence in
detection, needed for p/r curves, higher is better.
:param dir:
:return:
'''
#todo - use perspective transform (useful for hls...) along the lines of below, maybe use semirandom trapezoid for 4 points
# pts1 = np.float32([[56,65],[368,52],[28,387],[389,390]])
# pts2 = np.float32([[0,0],[300,0],[0,300],[300,300]])
# M = cv2.getPerspectiveTransform(pts1,pts2)
# dst = cv2.warpPerspective(img,M,(300,300))
files = [os.path.join(label_dir,f) for f in os.listdir(label_dir)]
files.sort()
types=[]
all_annotations = []
n=0
n_tot = len(files)
for f in files:
# filename = os.path.join(dir,'%06d.txt'%i)
n=n+1
print('{}/{} {}'.format(n,n_tot,f))
if not os.path.exists(f):
print('{} not found'.format(f))
continue
result_dict = {}
# result_dict['data']=[]
f_dir = os.path.dirname(f)
par_dir = Utils.parent_dir(f_dir)
f_base = os.path.basename(f)
img_base = f_base.replace(label_suffix,img_suffix)
img_file = os.path.join(image_dir,img_base)
result_dict['filename']=img_file
result_dict['annotations']=[]
img_arr = cv2.imread(img_file)
if img_arr is None:
logging.warning('could not get img arr for {}'.format(img_file))
h,w=10000,10000
else:
result_dict['dimensions_h_w_c'] = img_arr.shape
h,w=img_arr.shape[0:2]
print('got image h{} x w{} '.format(h,w))
with open(f,'r' ) as fp:
lines = fp.readlines()
n_line=0
n_lines=len(lines)
for line in lines:
n_line=n_line+1
print('{}/{} '.format(n_line,n_lines)+ line)
try:
elements = line.split()
type=elements[0]
truncated=elements[1]
occluded=elements[2]
alpha=elements[3]
x1=int(float(elements[4]))
y1=int(float(elements[5]))
x2=int(float(elements[6]))
y2=int(float(elements[7]))
except:
print("error getting elements from line:", sys.exc_info()[0])
print('{} {} x1 {} y1 {} x2 {} y2 {}'.format(f,type,x1,y1,x2,y2))
x1=max(0,x1)
y1=max(0,y1)
x2=min(w,x2)
y2=min(h,y2)
tg_type = constants.kitti_to_hls_map[type]
print('converted: {} x1 {} y1 {} x2 {} y2 {}'.format(tg_type,x1,y1,x2,y2))
if tg_type is None:
logging.info('tgtype for {} is None, moving on'.format(type))
continue
bb_xywh = [x1,y1,(x2-x1),(y2-y1)]
if not type in types: #this is keeping track of all types seen in case above list is incomplete
types.append(type)
print('types:'+str(types))
object_dict={}
object_dict['bbox_xywh'] = bb_xywh
object_dict['object']= tg_type
object_dict['original_object'] = type
result_dict['annotations'].append(object_dict)
if visual_output:
print('drawing bb')
img_arr=imutils.bb_with_text(img_arr,bb_xywh,tg_type)
if visual_output:
cv2.imshow('kitti2tgdict',img_arr)
cv2.waitKey(0)
all_annotations.append(result_dict)
if write_json:
print('writing json')
if jsonfile == None:
labeldir_alone = label_dir.split('/')[-1]
par_dir = Utils.parent_dir(label_dir)
jsonfile = os.path.join(par_dir,labeldir_alone+'.json')
print('jsonfile:'+str(jsonfile))
Utils.ensure_file(jsonfile)
with open(jsonfile,'w ') as fp:
json.dump(all_annotations,fp,indent=4)
fp.close()
def read_rmptfmp_write_yolo(images_dir='/data/jeremy/image_dbs/hls/data.vision.ee.ethz.ch',gt_file='refined.idl',class_no=0,visual_output=False,label_destination='labels'):
'''
reads from gt for dataset from https://data.vision.ee.ethz.ch/cvl/aess/dataset/ (pedestrians only)
'"left/image_00000001.png": (212, 204, 232, 261):-1, (223, 181, 259, 285):-1, (293, 151, 354, 325):-1, (452, 208, 479, 276):-1, (255, 219, 268, 249):-1, (280, 219, 291, 249):-1, (267, 246, 279, 216):-1, (600, 247, 584, 210):-1;'
writes to yolo format
'''
# Define the codec and create VideoWriter object
# not necessary fot function , just wanted to track boxes
# fourcc = cv2.VideoWriter_fourcc(*'XVID')
# out = cv2.VideoWriter('output.avi',fourcc, 20.0, (640,480))
# pdb.set_trace()
with open(os.path.join(images_dir,gt_file),'r') as fp:
lines = fp.readlines()
for line in lines:
print(line)
elements = re.findall(r"[-\w']+",line)
print(elements)
# elements = line.split
imgname = line.split()[0].replace('"','').replace(':','').replace('\n','')#.replace('.png','_0.png')
# print('img name '+str(imgname))
imgname = os.path.basename(imgname) #ignore dir referred to in gt file and use mine
if imgname[-6:] != '_0.png':
print('imgname {} has no _0 at end'.format(imgname))
imgname = imgname.replace('.png','_0.png')
fullpath=os.path.join(images_dir,imgname)
if not os.path.isfile(fullpath):
print('couldnt find {}'.format(fullpath))
continue
print('reading {}'.format(fullpath))
img_arr = cv2.imread(fullpath)
img_dims = (img_arr.shape[1],img_arr.shape[0]) #widthxheight
png_element_index = elements.index('png')
bb_list_xywh = []
ind = png_element_index+1
n_bb=0
while ind<len(elements):
x1=int(elements[ind])
if x1 == -1:
ind=ind+1
x1=int(elements[ind])
y1=int(elements[ind+1])
x2=int(elements[ind+2])
y2=int(elements[ind+3])
ind = ind+4
if y2 == -1:
print('XXX warning, got a -1 XXX')
n_bb += 1
bb = Utils.fix_bb_x1y1x2y2([x1,y1,x2,y2])
bb_xywh = [bb[0],bb[1],bb[2]-bb[0],bb[3]-bb[1]]
bb_list_xywh.append(bb_xywh)
print('ind {} x1 {} y1 {} x2 {} y2 {} bbxywh {}'.format(ind,x1,y1,x2,y2,bb_xywh))
if visual_output:
cv2.rectangle(img_arr,(x1,y1),(x2,y2),color=[100,255,100],thickness=2)
print('{} bounding boxes for this image (png {} len {} '.format(n_bb,png_element_index,len(elements)))
print('sending {} for writing'.format(bb_list_xywh))
write_yolo_labels(fullpath,bb_list_xywh,class_no,img_dims)
if visual_output:
cv2.imshow('img',img_arr)
cv2.waitKey(0)
# out.write(img_arr)
# out.release()
if visual_output:
cv2.destroyAllWindows()
def write_yolo_labels(img_path,bb_list_xywh,class_number,image_dims,destination_dir=None,overwrite=True):
'''
output : for yolo - https://pjreddie.com/darknet/yolo/
Darknet wants a .txt file for each image with a line for each ground truth object in the image that looks like:
<object-class> <x> <y> <width> <height>
where those are percentages...
it looks like yolo makes an assumption abt where images and label files are, namely in parallel dirs. named:
JPEGImages labels
and a train.txt file pointing to just the images - and the label files are same names with .txt instead of .jpg
:param img_path:
:param bb_xywh:
:param class_number:
:param destination_dir:
:return:
'''
if destination_dir is None:
destination_dir = Utils.parent_dir(os.path.basename(img_path))
destination_dir = os.path.join(destination_dir,'labels')
Utils.ensure_dir(destination_dir)
img_basename = os.path.basename(img_path)
img_basename = img_basename.replace('.jpg','.txt').replace('.png','.txt').replace('.bmp','.txt')
destination_path=os.path.join(destination_dir,img_basename)
if overwrite:
write_mode = 'w'
else:
write_mode = 'a'
with open(destination_path,write_mode) as fp:
for bb_xywh in bb_list_xywh:
x_center = bb_xywh[0]+bb_xywh[2]/2.0
y_center = bb_xywh[1]+bb_xywh[3]/2.0
x_p = float(x_center)/image_dims[0]
y_p = float(y_center)/image_dims[1]
w_p = float(bb_xywh[2])/image_dims[0]
h_p = float(bb_xywh[3])/image_dims[1]
line = str(class_number)+' '+str(round(x_p,4))+' '+str(round(y_p,4))+' '+str(round(w_p,4))+' '+str(round(h_p,4))+'\n'
print('writing "{}" to {}'.format(line[:-1],destination_path))
fp.write(line)
fp.close()
# if not os.exists(destination_path):
# Utils.ensure_file(destination_path)
def write_yolo_trainfile(image_dir,trainfile='train.txt',filter='.png',split_to_test_and_train=0.05,check_for_bbfiles=True,bb_dir=None,labels_dir=None):
'''
this is just a list of full paths to the training images. the labels apparently need to be in parallel dir(s) called 'labels'
note this appends to trainfile , doesnt overwrite , to facilitate building up from multiple sources
:param dir:
:param trainfile:
:return:
'''
if filter:
files = [os.path.join(image_dir,f) for f in os.listdir(image_dir) if filter in f]
else:
files = [os.path.join(image_dir,f) for f in os.listdir(image_dir)]
print('{} files w filter {} in {}'.format(len(files),filter,image_dir))
if check_for_bbfiles:
if bb_dir == None:
if labels_dir:
labeldir = os.path.basename(image_dir)+labels_dir
else:
labeldir = os.path.basename(image_dir)
bb_dir = os.path.join(Utils.parent_dir(image_dir),labeldir)
print('checking for bbs in '+bb_dir)
if len(files) == 0:
print('no files fitting {} in {}, stopping'.format(filter,image_dir))
return
count = 0
with open(trainfile,'a+') as fp:
for f in files:
if check_for_bbfiles:
if filter:
bbfile = os.path.basename(f).replace(filter,'.txt')
else:
bbfile = os.path.basename(f)[:-4]+'.txt'
bbpath = os.path.join(bb_dir,bbfile)
if os.path.exists(bbpath):
fp.write(f+'\n')
count +=1
else:
print('bbfile {} describing {} not found'.format(bbpath,f))
else:
fp.write(f+'\n')
count += 1
print('wrote {} files to {}'.format(count,trainfile))
if split_to_test_and_train is not None:
create_nn_imagelsts.split_to_trainfile_and_testfile(trainfile,fraction=split_to_test_and_train)
def yolo_to_tgdict(txt_file=None,img_file=None,visual_output=False,img_suffix='.jpg',classlabels=constants.hls_yolo_categories,labels_dir_suffix=None,dont_write_blank=True):
'''
format is
<object-class> <x> <y> <width> <height>
where x,y,w,h are relative to image width, height. It looks like x,y are bb center, not topleft corner - see voc_label.py in .convert(size,box) func
:param txt_file:
:return: a 'tgdict' which looks like
{ "dimensions_h_w_c": [360,640,3], "filename": "/data/olympics/olympics/9908661.jpg",
"annotations": [
{
"bbox_xywh": [89, 118, 64,44 ],
"object": "car"
} ... ] }
using convention that label dir is at same level as image dir and has 'labels' tacked on to end of dirname
'''
# img_file = txt_file.replace('.txt','.png')
logging.debug('yolo to tgdict {} {} '.format(txt_file,img_file))
if txt_file is None and img_file is None:
logging.warning('yolo to tfdict got no txtfile nor imgfile')
return
if txt_file is not None and img_file is None:
txt_dir = os.path.dirname(txt_file)
par_dir = Utils.parent_dir(txt_file)
if 'labels' in par_dir:
img_dir = par_dir.replace('labels','')
img_name = os.path.basename(txt_file).replace('.txt',img_suffix)
img_file = os.path.join(img_dir,img_name)
logging.debug('looking for image file '+img_file)
elif img_file is not None and txt_file is None:
img_dir = os.path.dirname(img_file)
img_base = os.path.basename(img_file)
par_dir = Utils.parent_dir(img_dir)
logging.debug('pardir {} imgdir {}'.format(par_dir,img_dir))
if labels_dir_suffix:
labels_dir = img_dir+labels_dir_suffix
else:
labels_dir = img_dir
lbl_name = os.path.basename(img_file).replace('.jpg','.txt').replace('.png','.txt').replace('.jpeg','.txt')
txt_file = os.path.join(labels_dir,lbl_name)
elif img_file is not None and txt_file is not None:
pass
logging.info('lblfile {} imgfile {}'.format(txt_file,img_file))
img_arr = cv2.imread(img_file)
if img_arr is None:
logging.warning('problem reading {}, returning'.format(img_file))
return None
image_h, image_w = img_arr.shape[0:2]
result_dict = {}
result_dict['filename']=img_file
result_dict['dimensions_h_w_c']=img_arr.shape
result_dict['annotations']=[]
if not os.path.exists(txt_file):
logging.warning('yolo2tgdict could not find {}, trying replacing "images" with "labels" '.format(txt_file))
#try alternate path replacing 'images' with 'labels'
if 'images' in img_file:
img_dir = os.path.dirname(img_file)
img_base = os.path.basename(img_file)
labels_dir = img_dir.replace('images','labels')
lbl_name = os.path.basename(img_file).replace('.jpg','.txt').replace('.png','.txt')
txt_file = os.path.join(labels_dir,lbl_name)
if not os.path.exists(txt_file):
logging.warning('yolo2tgdict could not find {}, returning '.format(txt_file))
return
else:
return
with open(txt_file,'r') as fp:
lines = fp.readlines()
logging.debug('{} bbs found'.format(len(lines)))
if lines == []:
logging.warning('no lines in {}'.format(txt_file))
for line in lines:
if line.strip()[0]=='#':
logging.debug('got comment line')
continue
class_index,x,y,w,h = line.split()
x_p=float(x)
y_p=float(y)
w_p=float(w)
h_p=float(h)
class_index = int(class_index)
class_label = classlabels[class_index]
x_center = int(x_p*image_w)
y_center = int(y_p*image_h)
w = int(w_p*image_w)
h = int(h_p*image_h)
x1 = x_center-w/2
x2 = x_center+w/2
y1 = y_center-h/2
y2 = y_center+h/2
logging.info('class {} x_c {} y_c {} w {} h {} x x1 {} y1 {} x2 {} y2 {}'.format(class_index,x_center,y_center,w,h,x1,y1,x2,y2))
if visual_output:
cv2.rectangle(img_arr,(x1,y1),(x2,y2),color=[100,255,100],thickness=2)
object_dict={}
object_dict['bbox_xywh'] = [x1,y1,w,h]
object_dict['object']=class_label
result_dict['annotations'].append(object_dict)
if visual_output:
cv2.imshow('yolo2tgdict',img_arr)
cv2.waitKey(0)
return result_dict
def tgdict_to_yolo(tg_dict,label_dir=None,classes=constants.hls_yolo_categories,yolo_trainfile='yolo_train.txt'):
'''
changing save dir to be same as img dir
input- dict in 'tg format' which is like this
{'filename':'image423.jpg','annotations':[{'object':'person','bbox_xywh':[x,y,w,h]},{'object':'person','bbox_xywh':[x,y,w,h],'sId':104}],
{'filename':'image423.jpg','annotations':[{'object':'person','bbox_xywh':[x,y,w,h]},{'object':'person','bbox_xywh':[x,y,w,h],'sId',105}
That json can then be used to generate yolo or frcnn training files
output : for yolo - https://pjreddie.com/darknet/yolo/
Darknet wants a .txt file for each image with a line for each ground truth object in the image that looks like:
<object-class> <x> <y> <width> <height>
where those are percentages...
it looks like yolo makes an assumption abt where images and label files are, namely in parallel dirs named [whatever]images and [whatever]labels:
e.g. JPEGImages labels
and a train.txt file pointing to just the images - the label files are same names with .txt instead of .jpg
also writes a line in the yolo_trainfile . This is all getting called by json_to_yolo
:param img_path:
:param bb_xywh:
:param class_number:
:param destination_dir:
:return:
'''
img_filename = tg_dict['filename']
annotations = tg_dict['annotations']
sid = None
if 'sid' in tg_dict:
sid = tg_dict['sId']
dims = tg_dict['dimensions_h_w_c']
im_h,im_w=(dims[0],dims[1])
logging.debug('writing yolo for file {}\nannotations {}'.format(img_filename,annotations))
if label_dir is None:
label_dir = os.path.dirname(img_filename)
label_name = os.path.basename(img_filename).replace('.png','.txt').replace('.jpg','.txt').replace('.jpeg','.txt')
if label_name[-4:]!='.txt':
logging.warning('did not replace suffix of {} with .txt'.format(img_filename))
label_path = os.path.join(label_dir,label_name)
print('writing yolo to '+str(label_path))
with open(label_path,'w') as fp:
for annotation in annotations:
bb_xywh = annotation['bbox_xywh']
bb_yolo = imutils.xywh_to_yolo(bb_xywh,(im_h,im_w))
logging.info('dims {} bbxywh {} bbyolo {}'.format((im_w,im_h),bb_xywh,bb_yolo))
object = annotation['object']
class_number = classes.index(object)
line = str(class_number)+' '+str(bb_yolo[0])+' '+str(bb_yolo[1])+' '+str(bb_yolo[2])+' '+str(bb_yolo[3])+'\n'
fp.write(line)
fp.close()
Utils.ensure_file(yolo_trainfile)
with open(yolo_trainfile,'a') as fp2:
fp2.write(img_filename+'\n')
fp2.close()
def json_vietnam_to_yolo(jsonfile,split_to_test_and_train=True,label_dir=None,classes=constants.hls_yolo_categories,yolo_trainfile=None,check_dims=True,visual_output=True):
''' input- json dicts in 'vietname rmat' which is like this
{"objects":[{"label":"Private Car","x_y_w_h":[1160,223,65,59]},{"label":"Private Car","x_y_w_h":[971,354,127,85]}],"image_path":"2017-07-06_09-24-24-995.jpeg","image_w_h":[1600,900]}
output : for yolo - https://pjreddie.com/darknet/yolo/ looking like
<object-class> <x> <y> <width> <height>
where x,y,width,height are percentages...
it looks like yolo makes an assumption abt where images and label files are, namely in parallel dirs named [whatever]images and [whatever]labels:
e.g. JPEGImages labels
and a train.txt file pointing to just the images - the label files are same names with .txt instead of .jpg
:param img_path:
:param bb_xywh:
:param class_number:
:param destination_dir:
:return:
'''
print('converting json annotations in '+jsonfile+' to yolo')
with open(jsonfile,'r') as fp:
vietnam_dict = json.load(fp)
img_filename = vietnam_dict['image_path']
annotations = vietnam_dict['objects']
dims = vietnam_dict['image_w_h']
im_h,im_w=(dims[1],dims[0])
logging.debug('writing yolo for image {} hxw {}x{}\nannotations {} '.format(img_filename,im_h,im_w,annotations))
if check_dims or visual_output:
if not os.path.isabs(img_filename):
file_path = os.path.join(os.path.dirname(jsonfile),img_filename)
else:
file_path = img_filename
if not os.path.exists(file_path):
logging.warning('{} does not exist'.format(file_path))
img_arr = cv2.imread(file_path)
if img_arr is None:
logging.warning('could not find {}'.format(file_path))
return
actual_h,actual_w = img_arr.shape[0:2]
if actual_h!=im_h or actual_w != im_w:
logging.warning('image dims hw {} {} dont match json {}'.format(actual_h,actual_w,im_h,im_w))
return
if label_dir is None:
img_parent = Utils.parent_dir(os.path.dirname(img_filename))
img_diralone = os.path.dirname(img_filename).split('/')[-1]
label_diralone = img_diralone+'labels'
# label_dir= os.path.join(img_parent,label_diralone)
label_dir = os.path.dirname(img_filename) #keep labels and imgs in same dir, yolo is apparently ok with that
print('using label dir {}'.format(label_dir))
Utils.ensure_dir(label_dir)
# label_dir = os.path.join(img_parent,label_ext)
logging.debug('yolo img parent {} labeldir {} imgalone {} lblalone {} '.format(img_parent,label_dir,img_diralone,label_diralone))
label_name = os.path.basename(img_filename).replace('.png','.txt').replace('.jpg','.txt').replace('.jpeg','.txt')
if label_name[-4:]!='.txt':
logging.warning('did not replace image suffix of {} with .txt'.format(img_filename))
return
label_path = os.path.join(label_dir,label_name)
print('writing label to '+str(label_path))
with open(label_path,'w') as fp:
for annotation in annotations:
bb_xywh = annotation['x_y_w_h']
bb_yolo = imutils.xywh_to_yolo(bb_xywh,(im_h,im_w))
object = annotation['label']
if not object in constants.vietnam_to_hls_map:
logging.warning('{} not found in constants.vietname to hls map'.format(object))
raw_input('ret to cont')
continue
tg_object = constants.vietnam_to_hls_map[object]
class_number = classes.index(tg_object)
logging.debug('wxh {} bbxywh {} bbyolo {}\norigobj {} tgobj {} ind {}'.format((im_w,im_h),bb_xywh,bb_yolo,object,tg_object,class_number))
line = str(class_number)+' '+str(bb_yolo[0])+' '+str(bb_yolo[1])+' '+str(bb_yolo[2])+' '+str(bb_yolo[3])+'\n'
fp.write(line)
if visual_output:
img_arr = imutils.bb_with_text(img_arr,bb_xywh,tg_object)
if visual_output:
cv2.imshow('image',img_arr)
cv2.waitKey(0)
cv2.destroyAllWindows()
fp.close()
if yolo_trainfile is None:
return
with open(yolo_trainfile,'a') as fp2:
fp2.write(file_path+'\n')
fp2.close()
def vietnam_dir_to_yolo(dir,visual_output=False):
json_files = [os.path.join(dir,f) for f in os.listdir(dir) if '.json' in f]
yolo_trainfile = dir+'filelist.txt'
Utils.ensure_file(yolo_trainfile)
print('{} .json files in {}'.format(len(json_files),dir))
label_dir = dir
for json_file in json_files:
json_vietnam_to_yolo(json_file,yolo_trainfile=yolo_trainfile,label_dir=label_dir,visual_output=visual_output)
create_nn_imagelsts.split_to_trainfile_and_testfile(yolo_trainfile)
return yolo_trainfile
def read_many_yolo_bbs(imagedir='/data/jeremy/image_dbs/hls/data.vision.ee.ethz.ch/left/',labeldir=None,img_filter='.png'):
if labeldir is None:
labeldir = os.path.join(Utils.parent_dir(imagedir),'labels')
imgfiles = [f for f in os.listdir(imagedir) if img_filter in f]
imgfiles = sorted(imgfiles)
print('found {} files in {}, label dir {}'.format(len(imgfiles),imagedir,labeldir))
for f in imgfiles:
bb_path = os.path.join(labeldir,f).replace(img_filter,'.txt')
if not os.path.isfile(bb_path):
print('{} not found '.format(bb_path))
continue
image_path = os.path.join(imagedir,f)
read_yolo_bbs(bb_path,image_path)
def read_pascal_xml_write_yolo(dir='/media/jeremy/9FBD-1B00/hls_potential/voc2007/VOCdevkit/VOC2007',annotation_folder='Annotations',img_folder='JPEGImages',
annotation_filter='.xml'):
'''
nondestructive - if there are already label files these get added to not overwritten
:param dir:
:param annotation_folder:
:param img_folder:
:param annotation_filter:
:return:
'''
# classes = [ 'person','hat','backpack','bag','person_wearing_red_shirt','person_wearing_blue_shirt',
# 'car','bus','truck','unattended_bag', 'bicycle', 'motorbike']
classes = constants.hls_yolo_categories
annotation_dir = os.path.join(dir,annotation_folder)
img_dir = os.path.join(dir,img_folder)
annotation_files = [os.path.join(annotation_dir,f) for f in os.listdir(annotation_dir) if annotation_filter in f]
listfilename = os.path.join(dir,'filelist.txt')
list_file = open(listfilename, 'w')
for annotation_file in annotation_files:
success = convert_pascal_xml_annotation(annotation_file,classes)
if success:
print('found relevant class(es)')
filenumber = os.path.basename(annotation_file).replace('.xml','')
jpgpath = os.path.join(img_dir,str(filenumber)+'.jpg')
list_file.write(jpgpath+'\n')
def convert_pascal_xml_annotation(in_file,classes,labeldir=None):
filenumber = os.path.basename(in_file).replace('.xml','')
# in_file = open('VOCdevkit/VOC%s/Annotations/%s.xml'%(year, image_id))
if labeldir==None:
parent_dir = Utils.parent_dir(os.path.dirname(in_file))
labeldir = os.path.join(parent_dir,'labels')
Utils.ensure_dir(labeldir)
out_filename = os.path.join(labeldir, filenumber+'.txt')
print('in {} out {}'.format(in_file,out_filename))
tree=ET.parse(in_file)
root = tree.getroot()
size = root.find('size')
w = int(size.find('width').text)
h = int(size.find('height').text)
success=False
for obj in root.iter('object'):
difficult = obj.find('difficult').text
cls = obj.find('name').text
if cls not in classes or int(difficult)==1:
continue
cls_id = classes.index(cls)
xmlbox = obj.find('bndbox')
b = (float(xmlbox.find('xmin').text), float(xmlbox.find('xmax').text), float(xmlbox.find('ymin').text), float(xmlbox.find('ymax').text))
bb = convert_x1x2y1y2_to_yolo((w,h), b)
out_file = open(out_filename, 'a+')
os.chmod(out_filename, 0o666)
out_file.write(str(cls_id) + " " + " ".join([str(round(a,4)) for a in bb]) + '\n')
# os.chmod(out_filename, 0o777)
success = True
return(success)
def read_pascal_txt_write_yolo(dir='/media/jeremy/9FBD-1B00/hls_potential/voc2005_1/',
annotation_folder='all_relevant_annotations',img_folder='all_relevant_images',
annotation_filter='.txt',image_filter='.png',yolo_annotation_dir='labels'):
'''
nondestructive - if there are already label files these get added to not overwritten
:param dir:
:param annotation_folder:
:param img_folder:
:param annotation_filter:
:return:
'''
# classes = [ 'person','hat','backpack','bag','person_wearing_red_shirt','person_wearing_blue_shirt',
# 'car','bus','truck','unattended_bag', 'bicycle', 'motorbike']
classes = constants.hls_yolo_categories
annotation_dir = os.path.join(dir,annotation_folder)
img_dir = os.path.join(dir,img_folder)
annotation_files = [os.path.join(annotation_dir,f) for f in os.listdir(annotation_dir) if annotation_filter in f]
listfilename = os.path.join(dir,'filelist.txt')
list_file = open(listfilename, 'w')
yolo_annotation_path = os.path.join(dir,yolo_annotation_dir)
Utils.ensure_dir(yolo_annotation_path)
for annotation_file in annotation_files:
out_filename=os.path.join(yolo_annotation_path,os.path.basename(annotation_file))
print('outfile'+out_filename)
success = convert_pascal_txt_annotation(annotation_file,classes,out_filename)
if success:
print('found relevant class(es)')
filename = os.path.basename(annotation_file).replace(annotation_filter,'')
img_dir = os.path.join(dir,img_folder)
imgpath = os.path.join(img_dir,str(filename)+image_filter)
list_file.write(imgpath+'\n')
def convert_pascal_txt_annotation(in_file,classes,out_filename):
print('in {} out {}'.format(in_file,out_filename))
with open(in_file,'r') as fp:
lines = fp.readlines()
for i in range(len(lines)):
if 'Image filename' in lines[i]:
imfile=lines[i].split()[3]
print('imfile:'+imfile)
# path = Utils.parent_dir(os.path.basename(in_file))
# if path.split('/')[-1] != 'Annotations':
# path = Utils.parent_dir(path)
# print('path to annotation:'+str(path))
# img_path = os.path.join(path,imfile)
# print('path to img:'+str(img_path))
# img_arr = cv2.imread(img_path)
if 'Image size' in lines[i]:
nums = re.findall('\d+', lines[i])
print(lines[i])
print('nums'+str(nums))
w = int(nums[0])
h = int(nums[1])
print('h {} w {}'.format(h,w))
if '# Details' in lines[i] :
object = lines[i].split()[5].replace('(','').replace(')','').replace('"','')
nums = re.findall('\d+', lines[i+2])
print('obj {} nums {}'.format(object,nums))
success=False
cls_id = tg_class_from_pascal_class(object,classes)
if cls_id is not None:
print('class index '+str(cls_id)+' '+classes[cls_id])
success=True
if not success:
print('NO RELEVANT CLASS FOUND')
continue
b = (int(nums[1]), int(nums[3]), int(nums[2]), int(nums[4])) #file has xmin ymin xmax ymax
print('bb_x1x2y1y2:'+str(b))
bb = convert_x1x2y1y2_to_yolo((w,h), b)
print('bb_yolo'+str(bb))
if os.path.exists(out_filename):
append_write = 'a' # append if already exists
else:
append_write = 'w' # make a new file if not
out_file = open(out_filename, append_write)
# os.chmod(out_filename, 0o666) #
out_file.write(str(cls_id) + " " + " ".join([str(round(a,4)) for a in bb]) + '\n')
# os.chmod(out_filename, 0o777)
success = True
return(success)
def tgdict_to_api_dict(tgdict):
'''
convert a tgdict in format
{ "dimensions_h_w_c": [360,640,3], "filename": "/data/olympics/olympics/9908661.jpg",
"annotations": [
{
"bbox_xywh": [89, 118, 64,44 ],
"object": "car"
} ... ] }
to an api dict (returned by our api ) in format
{"data": [{"confidence": 0.366, "object": "car", "bbox": [394, 49, 486, 82]}, {"confidence": 0.2606, "object": "car", "bbox": [0, 116, 571, 462]},
where bbox is [xmin,ymin,xmax,ymax] aka [x1,y1,x2,y2]
:param tgdict:
:return:
'''
apidict={}
apidict['data'] = []
for annotation in tgdict['annotations']:
bb=annotation['bbox_xywh']
object=annotation['object']
api_entry={}
api_entry['confidence']=None #tgdict doesnt have this, generally its a gt so its 100%
api_entry['object']=object
api_entry['bbox']=[bb[0],bb[1],bb[0]+bb[2],bb[1]+bb[3]] #api bbox is [xmin,ymin,xmax,ymax] aka [x1,y1,x2,y2]
apidict['data'].append(api_entry)
return apidict
def tg_class_from_pascal_class(pascal_class,tg_classes):
#hls_yolo_categories = [ 'person','hat','backpack','bag','person_wearing_red_shirt','person_wearing_blue_shirt',
# 'car','bus','truck','unattended_bag', 'bicycle', 'motorbike']
conversions = {'bike':'bicycle',
'motorcycle':'motorbike'} #things that have names different than tg names
#(forced to do this since e.g. bike and bicycle are both used in VOC)
for tg_class in tg_classes:
if tg_class in pascal_class:
tg_ind = tg_classes.index(tg_class)
return tg_ind
for pascal,tg in conversions.iteritems():
if pascal in pascal_class:
tg_ind = tg_classes.index(tg)
return tg_ind
return None
def json_to_yolo(jsonfile,split_to_test_and_train=True):
''' input- json arr of dicts in 'tg format' which is like this
{'filename':'image423.jpg','annotations':[{'object':'person','bbox_xywh':[x,y,w,h]},{'object':'person','bbox_xywh':[x,y,w,h]}],
output : for yolo - https://pjreddie.com/darknet/yolo/ looking lie
<object-class> <x> <y> <width> <height>
where x,y,width,height are percentages...
it looks like yolo makes an assumption abt where images and label files are, namely in parallel dirs named [whatever]images and [whatever]labels:
e.g. JPEGImages labels
and a train.txt file pointing to just the images - the label files are same names with .txt instead of .jpg
:param img_path:
:param bb_xywh:
:param class_number:
:param destination_dir:
:return:
'''
print('converting json annotations in '+jsonfile+' to yolo')
trainfile = 'yolo_train.txt'
with open(jsonfile,'r') as fp:
annotation_list = json.load(fp)
for tg_dict in annotation_list:
tgdict_to_yolo(tg_dict,yolo_trainfile=trainfile)
create_nn_imagelsts.split_to_trainfile_and_testfile(trainfile)
def autti_txt_to_yolo(autti_txt='/media/jeremy/9FBD-1B00/image_dbs/hls/object-dataset/labels.csv'):
#to deal with driving file from autti
# wget http://bit.ly/udacity-annotations-autti
all_annotations = txt_to_tgdict(txtfile=autti_txt,image_dir=None,parsemethod=parse_autti)
for tg_dict in all_annotations:
tgdict_to_yolo(tg_dict)
json_name = autti_txt.replace('.csv','.json')
inspect_json(json_name)
def udacity_csv_to_yolo(udacity_csv='/media/jeremy/9FBD-1B00/image_dbs/hls/object-detection-crowdai/labels.csv'):
# to deal with driving file from udacity -
# wget http://bit.ly/udacity-annoations-crowdai
all_annotations = csv_to_tgdict(udacity_csv=udacity_csv,parsemethod=parse_udacity)
for tg_dict in all_annotations:
tgdict_to_yolo(tg_dict)
json_name = udacity_csv.replace('.csv','.json')
inspect_json(json_name)
def parse_udacity(row):
xmin=int(row['xmin'])
xmax=int(row['ymin'])
ymin=int(row['xmax'])
ymax=int(row['ymax'])
frame=row['Frame'] #aka filename
label=row['Label']
label=label.lower()
preview_url=row['Preview URL']
tg_object=convert_udacity_label_to_tg(label)
if tg_object is None:
#label didnt get xlated so its something we dont care about e.g streetlight
print('object {} is not of interest'.format(label))
return xmin,xmax,ymin,ymax,frame,tg_object
def parse_autti(row,delimiter=' '):
#these parse guys should also have the translator (whatever classes into tg classes
#autti looks like this
# 178019968680240537.jpg 888 498 910 532 0 "trafficLight" "Red"
# 1478019969186707568.jpg 404 560 540 650 0 "car"
elements = row.split(delimiter)
filename=elements[0]
xmin=int(elements[1])
ymin=int(elements[2])
xmax=int(elements[3])
ymax=int(elements[4])
#something i'm ignoring in row[5]
label=elements[6].replace('"','').replace("'","").replace('\n','').replace('\t','')
label=label.lower()
assert(xmin<xmax)
assert(ymin<ymax)
tg_object=convert_udacity_label_to_tg(label)
if tg_object is None:
#label didnt get xlated so its something we dont care about e.g streetlight
print('object {} is not of interest'.format(label))
return xmin,xmax,ymin,ymax,filename,tg_object
def convert_kyle(dir='/home/jeremy/Dropbox/tg/hls_tagging/person_wearing_backpack/annotations',filter='.txt'):
'''
run yolo on a dir having gt from kyle or elsewhere, get yolo and compare
:param dir:
:return:
'''
gts = [os.path.join(dir,f) for f in dir if filter in f]
for gt_file in gts:
yolodict = read_various_training_formats.kyle_dicts_to_yolo()
def kyle_dicts_to_yolo(dir='/data/jeremy/image_dbs/hls/kyle/person_wearing_hat/annotations_hat',visual_output=True):
'''
convert from kyles mac itunes-app generated dict which looks like
{ "objects" : [
{
"label" : "person",
"x_y_w_h" : [
29.75364,
16.1669,
161.5282,
236.6785 ] },
{ "label" : "hat",
"x_y_w_h" : [
58.17136,
16.62691,
83.0643,
59.15696 ] } ],
"image_path" : "\/Users\/kylegiddens\/Desktop\/ELBIT\/person_wearing_hat\/images1.jpg",
"image_w_h" : [
202,
250 ] }
to tgformat (while at it write to json) which looks like
[ {
"dimensions_h_w_c": [360,640,3],
"filename": "/data/olympics/olympics/9908661.jpg"
"annotations": [
{
"bbox_xywh": [89, 118, 64,44 ],
"object": "car"
}
], }, ...
and use tgdict_to_yolo(tg_dict,label_dir=None,classes=constants.hls_yolo_categories)
to finally write yolo trainfiles
:param jsonfile:
:return:
'''
jsonfiles = [os.path.join(dir,f) for f in os.listdir(dir) if '.json' in f]
all_tgdicts = []
images_dir = Utils.parent_dir(dir)
for jsonfile in jsonfiles:
with open(jsonfile,'r') as fp:
kyledict = json.load(fp)
print(kyledict)
tgdict = {}
basefile = os.path.basename(kyledict['image_path'])
tgdict['filename'] = os.path.join(images_dir,basefile)
print('path {} base {} new {}'.format(kyledict['image_path'],basefile,tgdict['filename']))
img_arr=cv2.imread(tgdict['filename'])
if img_arr is None:
print('COULDNT GET IMAGE '+tgdict['filename'])
# tgdict['dimensions_h_w_c']=kyledict['image_w_h']
# tgdict['dimensions_h_w_c'].append(3) #add 3 chans to tgdict
tgdict['dimensions_h_w_c'] = img_arr.shape
print('tg dims {} kyle dims {}'.format(tgdict['dimensions_h_w_c'],kyledict['image_w_h']))
tgdict['annotations']=[]
for kyle_object in kyledict['objects']:
tg_annotation_dict={}
tg_annotation_dict['object']=kyle_object['label']
tg_annotation_dict['bbox_xywh']=[int(round(x)) for x in kyle_object['x_y_w_h']]
tgdict['annotations'].append(tg_annotation_dict)
if visual_output:
imutils.bb_with_text(img_arr,tg_annotation_dict['bbox_xywh'],tg_annotation_dict['object'])
print(tgdict)
if visual_output:
cv2.imshow('bboxes',img_arr)
cv2.waitKey(0)
all_tgdicts.append(tgdict)
tgdict_to_yolo(tgdict,label_dir=None,classes=constants.hls_yolo_categories)
json_out = os.path.join(images_dir,'annotations.json')
with open(json_out,'w') as fp:
json.dump(all_tgdicts,fp,indent=4)
fp.close()
def csv_to_tgdict(udacity_csv='/media/jeremy/9FBD-1B00/image_dbs/hls/object-dataset/labels.csv',image_dir=None,classes=constants.hls_yolo_categories,visual_output=False,manual_verification=False,jsonfile=None,parsemethod=parse_udacity,delimiter='\t',readmode='r'):
'''
read udaicty csv to grab files here
https://github.com/udacity/self-driving-car/tree/master/annotations
pedestrians, cars, trucks (and trafficlights in second one)
udacity file looks like:
xmin,ymin,xmax,ymax,Frame,Label,Preview URL
785,533,905,644,1479498371963069978.jpg,Car,http://crowdai.com/images/Wwj-gorOCisE7uxA/visualize
create the 'usual' tg dict for bb's , also write to json while we're at it
[ {
"dimensions_h_w_c": [360,640,3],
"filename": "/data/olympics/olympics/9908661.jpg"
"annotations": [
{
"bbox_xywh": [89, 118, 64,44 ],
"object": "car"
}
], }, ...
:param udacity_csv:
:param label_dir:
:param classes:
:return:
'''
#todo this can be combined with the txt_to_tgdict probably, maybe usin csv.reader instead of csv.dictread
# spamreader = csv.reader(csvfile, delimiter=' ', quotechar='|')
#... for row in spamreader:
#... print ', '.join(row)
all_annotations = []
if image_dir is None:
image_dir = os.path.dirname(udacity_csv)
print('opening udacity csv file {} '.format(udacity_csv))
# with open(udacity_csv, newline='') as file:
with open(udacity_csv,readmode) as file:
# with open('eggs.csv', newline='') as csvfile:
## reader = csv.DictReader(file,delimiter=delimiter, quotechar='|')
reader = csv.DictReader(file)
n_rows = 0
max_annotations=10**10
for row in reader:
n_rows += 1
print('row'+str(row))
try:
xmin,xmax,ymin,ymax,filename,label=parsemethod(row)
print('file {} xmin {} ymin {} xmax {} ymax {} object {}'.format(filename,xmin,ymin,xmax,ymax,label))
except:
print('trouble getting row '+str(row))
continue
try:
assert(xmax>xmin)
assert(ymax>ymin)
except:
print('problem with order of x/y min/max')
print('xmin {} ymin {} xmax {} ymax {} '.format(xmin,ymin,xmax,ymax))
xmint=min(xmin,xmax)
xmax=max(xmin,xmax)
xmin=xmint
ymint=min(ymin,ymax)
ymax=max(ymin,ymax)
ymin=ymint
bb = [xmin,ymin,xmax-xmin,ymax-ymin] #xywh
if image_dir is not None:
full_name = os.path.join(image_dir,filename)
else:
full_name = filename
im = cv2.imread(full_name)
if im is None:
print('couldnt open '+full_name)
continue
im_h,im_w=im.shape[0:2]
annotation_dict = {}
annotation_dict['filename']=full_name
annotation_dict['annotations']=[]
annotation_dict['dimensions_h_w_c'] = im.shape
#check if file has already been seen and a dict started, if so use that instead
file_already_in_json = False
#this is prob a stupid slow way to check
for a in all_annotations:
if a['filename'] == full_name:
annotation_dict=a
file_already_in_json = True
break
# print('im_w {} im_h {} bb {} label {}'.format(im_w,im_h,bb,label))
object_dict={}
object_dict['bbox_xywh'] = bb
object_dict['object']=label
if visual_output or manual_verification:
im = imutils.bb_with_text(im,bb,label)
magnify = 1
im = cv2.resize(im,(int(magnify*im_w),int(magnify*im_h)))
cv2.imshow('full',im)
if not manual_verification:
cv2.waitKey(5)
else:
print('(a)ccept , any other key to not accept')
k=cv2.waitKey(0)
if k == ord('a'):
annotation_dict['annotations'].append(object_dict)
else:
continue #dont add bb to list, go to next csv line
if not manual_verification:
annotation_dict['annotations'].append(object_dict)
# print('annotation dict:'+str(annotation_dict))
if not file_already_in_json: #add new file to all_annotations
all_annotations.append(annotation_dict)
else: #update current annotation with new bb
for a in all_annotations:
if a['filename'] == full_name:
a=annotation_dict
# print('annotation dict:'+str(annotation_dict))
print('# files:'+str(len(all_annotations)))
if len(all_annotations)>max_annotations:
break # for debugging, these files are ginormous
# raw_input('ret to cont')
if jsonfile == None:
jsonfile = udacity_csv.replace('.csv','.json')
with open(jsonfile,'w') as fp:
json.dump(all_annotations,fp,indent=4)
fp.close()
return all_annotations
def txt_to_tgdict(txtfile='/media/jeremy/9FBD-1B00/image_dbs/hls/object-dataset/labels.csv',image_dir=None,classes=constants.hls_yolo_categories,visual_output=False,manual_verification=False,jsonfile=None,parsemethod=parse_autti,wait=1):
'''
read udaicty csv to grab files here
https://github.com/udacity/self-driving-car/tree/master/annotations
pedestrians, cars, trucks (and trafficlights in second one)
udacity file looks like:
xmin,ymin,xmax,ymax,Frame,Label,Preview URL
785,533,905,644,1479498371963069978.jpg,Car,http://crowdai.com/images/Wwj-gorOCisE7uxA/visualize
create the 'usual' tg dict for bb's , also write to json while we're at it
[ {
"dimensions_h_w_c": [360,640,3],
"filename": "/data/olympics/olympics/9908661.jpg"
"annotations": [
{
"bbox_xywh": [89, 118, 64,44 ],
"object": "car"
}
], }, ...
:param udacity_csv:
:param label_dir:
:param classes:
:return:
'''
all_annotations = []
if image_dir is None:
image_dir = os.path.dirname(txtfile)
print('opening udacity csv file {} '.format(txtfile))
with open(txtfile, "r") as file:
lines = file.readlines()
for row in lines:
# print(row)
try:
xmin,xmax,ymin,ymax,filename,label=parsemethod(row)
print('file {} xmin {} ymin {} xmax {} ymax {} object {}'.format(filename,xmin,ymin,xmax,ymax,label))
if label is None:
continue
except:
print('trouble getting row '+str(row))
continue
try:
assert(xmax>xmin)
assert(ymax>ymin)
except:
print('problem with order of x/y min/max')
print('xmin {} ymin {} xmax {} ymax {} '.format(xmin,ymin,xmax,ymax))
xmint=min(xmin,xmax)
xmax=max(xmin,xmax)
xmin=xmint
ymint=min(ymin,ymax)
ymax=max(ymin,ymax)
ymin=ymint
if image_dir is not None:
full_name = os.path.join(image_dir,filename)
else:
full_name = filename
im = cv2.imread(full_name)
if im is None:
print('couldnt open '+full_name)
continue
im_h,im_w=im.shape[0:2]
annotation_dict = {}
bb = [xmin,ymin,xmax-xmin,ymax-ymin] #xywh
annotation_dict['filename']=full_name
annotation_dict['annotations']=[]
annotation_dict['dimensions_h_w_c'] = im.shape
#check if file has already been seen and a dict started, if so use that instead
file_already_in_json = False
#this is prob a stupid slow way to check
for a in all_annotations:
if a['filename'] == full_name:
annotation_dict=a
file_already_in_json = True
break
object_dict={}
object_dict['bbox_xywh'] = bb
object_dict['object']=label
if visual_output or manual_verification:
im = imutils.bb_with_text(im,bb,label)
magnify = 1
im = cv2.resize(im,(int(magnify*im_w),int(magnify*im_h)))
cv2.imshow('full',im)
if not manual_verification:
cv2.waitKey(wait)
else:
print('(a)ccept , any other key to not accept')
k=cv2.waitKey(0)
if k == ord('a'):
annotation_dict['annotations'].append(object_dict)
else:
continue #dont add bb to list, go to next csv line
if not manual_verification:
annotation_dict['annotations'].append(object_dict)
# print('annotation dict:'+str(annotation_dict))
if not file_already_in_json: #add new file to all_annotations
all_annotations.append(annotation_dict)
else: #update current annotation with new bb
for a in all_annotations:
if a['filename'] == full_name:
a=annotation_dict
# print('annotation dict:'+str(annotation_dict))
print('# files:'+str(len(all_annotations)))
# raw_input('ret to cont')
if jsonfile == None:
jsonfile = txtfile.replace('.csv','.json').replace('.txt','.json')
with open(jsonfile,'w') as fp:
json.dump(all_annotations,fp,indent=4)
fp.close()
return all_annotations
def convert_udacity_label_to_tg(udacity_label):
# hls_yolo_categories = ['person','person_wearing_hat','person_wearing_backpack','person_holding_bag',
# 'man_with_red_shirt','man_with_blue_shirt',
# 'car','van','truck','unattended_bag']
#udacity: Car Truck Pedestrian
conversions = {'pedestrian':'person',
'car':'car',
'truck':'truck'}
if not udacity_label in conversions:
print('!!!!!!!!!! did not find {} in conversions from udacity to tg cats !!!!!!!!'.format(udacity_label))
# raw_input('!!')
return(None)
tg_description = conversions[udacity_label]
return(tg_description)
def convert_x1x2y1y2_to_yolo(size, box):
dw = 1./(size[0])
dh = 1./(size[1])
x = (box[0] + box[1])/2.0 - 1
y = (box[2] + box[3])/2.0 - 1
w = box[1] - box[0]
h = box[3] - box[2]
x = x*dw
w = w*dw
y = y*dh
h = h*dh
return (x,y,w,h)
def convert_deepfashion_helper(line,labelfile,dir_to_catlist,visual_output,pardir):
global frequencies
if not '.jpg' in line:
return #first and second lines are metadata
with open(labelfile,'a+') as fp2:
image_name,x1,y1,x2,y2 = line.split()
x1=int(x1)
x2=int(x2)
y1=int(y1)
y2=int(y2)
# print('file {} x1 {} y1 {} x2 {} y2 {}'.format(image_name,x1,y2,x2,y2))
image_dir = Utils.parent_dir(image_name)
image_dir = image_dir.split('/')[-1]
tgcat = create_nn_imagelsts.deepfashion_folder_to_cat(dir_to_catlist,image_dir)
if tgcat is None:
print('got no tg cat fr '+str(image_dir))
return
if not tgcat in constants.trendi_to_pixlevel_v3_map:
print('didnt get cat for {} {}'.format(tgcat,line))
return
# if not(tgcat is 'lower_cover_long_items' or tgcat is 'lower_cover_short_items' or tgcat is 'bag' or tgcat is 'belt'):
# return
pixlevel_v3_cat = constants.trendi_to_pixlevel_v3_map[tgcat]
pixlevel_v3_index = constants.pixlevel_categories_v3.index(pixlevel_v3_cat)
frequencies[pixlevel_v3_index]+=1
print('freq '+str(frequencies))
print('tgcat {} v3cat {} index {}'.format(tgcat,pixlevel_v3_cat,pixlevel_v3_index))
image_path = os.path.join(pardir,image_name)
img_arr=cv2.imread(image_path)
mask,img_arr2 = grabcut_bb(img_arr,[x1,y1,x2,y2])
# make new img with extraneous removed
if(visual_output):
cv2.imshow('after gc',img_arr2)
# cv2.rectangle(img_arr,(x1,y1),(x2,y2),color=[100,255,100],thickness=2)
cv2.imshow('orig',img_arr)
cv2.waitKey(0)
mask = np.where((mask!=0),1,0).astype('uint8') * pixlevel_v3_index #mask should be from (0,1) but just in case...
skin_index = constants.pixlevel_categories_v3.index('skin')
skin_mask = kassper.skin_detection_fast(img_arr) * skin_index
mask2 = np.where(skin_mask!=0,skin_mask,mask)
overlap = np.bitwise_and(skin_mask,mask)
mask3 = np.where(overlap!=0,mask,mask2)
prefer_skin=False
if prefer_skin:
#take skin instead of gc in case of overlap
mask = mask2
else:
#take gc instaeda of skin in case of overlap
mask=mask3
# if(visual_output):
#
# imutils.show_mask_with_labels(mask,constants.pixlevel_categories_v3)
# imutils.show_mask_with_labels(mask2,constants.pixlevel_categories_v3)
# imutils.show_mask_with_labels(mask3,constants.pixlevel_categories_v3)
# imutils.show_mask_with_labels(mask,constants.pixlevel_categories_v3)
# #
# cv2.imshow('mask1',mask)
# # cv2.rectangle(img_arr,(x1,y1),(x2,y2),color=[100,255,100],thickness=2)
# cv2.imshow('mask2',mask2)
# cv2.imshow('mask3',mask3)
# cv2.imshow('overlap',overlap)
#
# cv2.waitKey(0)
gc_img_name = image_path.replace('.jpg','_gc.jpg')
print('writing img to '+str(gc_img_name))
res = cv2.imwrite(gc_img_name,img_arr2)
if not res:
logging.warning('bad save result '+str(res)+' for '+str(gc_img_name))
maskname = image_path.replace('.jpg','.png')
print('writing mask to '+str(maskname))
res = cv2.imwrite(maskname,mask)
if not res:
logging.warning('bad save result '+str(res)+' for '+str(maskname))
# img_arr2=np.where(skin_mask!=0[:,:,np.newaxis],img_arr,img_arr2)
if(visual_output):
# cv2.imshow('gc',img_arr2)
# # cv2.rectangle(img_arr,(x1,y1),(x2,y2),color=[100,255,100],thickness=2)
# cv2.imshow('orig',img_arr)
# cv2.waitKey(0)
# imutils.count_values(mask,constants.pixlevel_categories_v3)
imutils.show_mask_with_labels(mask,constants.pixlevel_categories_v3,original_image=gc_img_name,visual_output=True)
line = gc_img_name+' '+maskname+'\n'
Utils.ensure_file(labelfile)
fp2.write(line)
fp2.close()
# img_arr=remove_irrelevant_parts_of_image(img_arr,[x1,y1,x2,y2],pixlevel_v3_cat)
# imutils.show_mask_with_labels(maskname,constants.pixlevel_categories_v3,original_image=image_path,visual_output=False)
def catalog_image_to_pixlevel_mask_nobb(dir,visual_output=False, filter='.jpg',cats=constants.ultimate_21,forced_cat=None,label_dir=None):
'''
take catalog images (usu with uniform bgnd and large figure ), gc the fg , implant on some background (maybe noise) and generate mask
do all this without bb, assume figure is middle of image
:param dir:
:param visual_output:
:param filter:
:return:
'''
files = [os.path.join(dir,f) for f in os.listdir(dir) if filter in f]
if forced_cat:
tgcat = forced_cat
else:
for cat in cats:
if cat in dir:
print('cat {} appears in dir {} so this appears to be a dir of {}'.format(cat,dir,cat))
tgcat = cat
print('using category {}'.format(tgcat))
pixlevel_v3_cat = constants.trendi_to_pixlevel_v3_map[tgcat]
pixlevel_v3_index = constants.pixlevel_categories_v3.index(pixlevel_v3_cat)
frequencies[pixlevel_v3_index]+=1
print('freq '+str(frequencies))
print('tgcat {} v3cat {} index {}'.format(tgcat,pixlevel_v3_cat,pixlevel_v3_index))
for file in files:
img_arr=cv2.imread(file)
mask,img_arr2 = grabcut_no_bb(img_arr)
# make new img with extraneous removed
if(visual_output):
cv2.imshow('after gc',img_arr2)
# cv2.rectangle(img_arr,(x1,y1),(x2,y2),color=[100,255,100],thickness=2)
cv2.imshow('orig',img_arr)
cv2.waitKey(0)
mask = np.where((mask!=0),1,0).astype('uint8') * pixlevel_v3_index #mask should be from (0,1) but just in case...
skin_index = constants.pixlevel_categories_v3.index('skin')
skin_mask = kassper.skin_detection_fast(img_arr) * skin_index
mask2 = np.where(skin_mask!=0,skin_mask,mask)
overlap = np.bitwise_and(skin_mask,mask)
mask3 = np.where(overlap!=0,mask,mask2)
prefer_skin=False
if prefer_skin:
#take skin instead of gc in case of overlap
mask = mask2
else:
#take gc instaeda of skin in case of overlap
mask=mask3
if(visual_output):
imutils.show_mask_with_labels(mask,constants.pixlevel_categories_v3)
imutils.show_mask_with_labels(mask2,constants.pixlevel_categories_v3)
imutils.show_mask_with_labels(mask3,constants.pixlevel_categories_v3)
imutils.show_mask_with_labels(mask,constants.pixlevel_categories_v3)
#
cv2.imshow('mask1',mask)
# cv2.rectangle(img_arr,(x1,y1),(x2,y2),color=[100,255,100],thickness=2)
cv2.imshow('mask2',mask2)
cv2.imshow('mask3',mask3)
cv2.imshow('overlap',overlap)
cv2.waitKey(0)
gc_img_name = image_path.replace('.jpg','_gc.jpg')
print('writing img to '+str(gc_img_name))
res = cv2.imwrite(gc_img_name,img_arr2)
if not res:
logging.warning('bad save result '+str(res)+' for '+str(gc_img_name))
maskname = image_path.replace('.jpg','.png')
print('writing mask to '+str(maskname))
res = cv2.imwrite(maskname,mask)
if not res:
logging.warning('bad save result '+str(res)+' for '+str(maskname))
# img_arr2=np.where(skin_mask!=0[:,:,np.newaxis],img_arr,img_arr2)
if(visual_output):
cv2.imshow('gc',img_arr2)
cv2.rectangle(img_arr,(x1,y1),(x2,y2),color=[100,255,100],thickness=2)
cv2.imshow('orig',img_arr)
cv2.waitKey(0)
imutils.count_values(mask,constants.pixlevel_categories_v3)
imutils.show_mask_with_labels(mask,constants.pixlevel_categories_v3,original_image=gc_img_name,visual_output=True)
line = gc_img_name+' '+maskname+'\n'
Utils.ensure_file(labelfile)
fp2.write(line)
fp2.close()
def read_and_convert_deepfashion_bbfile(bbfile='/data/jeremy/image_dbs/deep_fashion/category_and_attribute_prediction/Anno/list_bbox.txt',
labelfile='/data/jeremy/image_dbs/deep_fashion/category_and_attribute_prediction/df_pixlabels.txt',
filefilter='250x250.jpg',visual_output=False,
multiprocess_it=True):
'''
first 2 lines of file are count and description, then data (imgpath x1 y1 x2 y2) - looks like
289222
image_name x_1 y_1 x_2 y_2
img/Sheer_Pleated-Front_Blouse/img_00000001.jpg 072 079 232 273
convert the parent dir to a hydra cat using ready function
convert hydra to pixlevel v3
final freqs from deepfashion for pixlevel_categories_v3 were:
freq [0, 7412, 30, 0, 6575, 4159, 1765, 3110, 0, 2, 0, 0, 0, 0, 0, 0]
black out irrelevant areas (lower part for top cats, top part for lower cats, nothing for whole body or anything else
:param bbfile:
:return:
'''
global frequencies
dir = Utils.parent_dir(bbfile)
pardir = Utils.parent_dir(dir)
print('pardir '+str(pardir))
with open(bbfile,'r') as fp:
lines = fp.readlines()
fp.close
print('getting deepfashion categoy translations')
dir_to_catlist = create_nn_imagelsts.deepfashion_to_tg_hydra()
print(dir_to_catlist[0])
print('{} lines in bbfile'.format(len(lines)))
if multiprocess_it:
n=12 #yeah there is some way to get nproc from system , sue me
p=Pool(processes=n)
# p.map(convert_deepfashion_helper,((line,fp2,labelfile,dir_to_catlist,visual_output,pardir ) for line in lines))
# p.map(convert_deepfashion_helper,zip(lines,repeat(fp2),repeat(labelfile),repeat(dir_to_catlist),repeat(visual_output),repeat(pardir) ))
for i in range(len(lines)/n):
print('doing nagla {}'.format(i))
# print('freq '+str(frequencies))
# p.map(convert_deepfashion_helper,(lines[i*n+j],fp2,labelfile,dir_to_catlist,visual_output,pardir ))
nagla = []
for j in range(n):
nagla.append((lines[i*n+j],labelfile,dir_to_catlist,visual_output,pardir ))
# print('nagla len {} index {}'.format(len(nagla),i*n+j))
p.map(convert_deepfashion_helper,nagla)
# p.close()
# p.join()
# p.map(convert_deepfashion_helper,(lines[i*n+j],fp2,labelfile,dir_to_catlist,visual_output,pardir ))
# p.map(convert_deepfashion_helper,(lines[i*n+j],fp2,labelfile,dir_to_catlist,visual_output,pardir ))
else:
for line in lines:
convert_deepfashion_helper((line,labelfile,dir_to_catlist,visual_output,pardir))
def count_deepfashion_bbfile(bbfile='/data/jeremy/image_dbs/deep_fashion/category_and_attribute_prediction/Anno/list_bbox.txt',
labelfile='/data/jeremy/image_dbs/deep_fashion/category_and_attribute_prediction/df_pixlabels.txt',
filefilter='250x250.jpg',visual_output=False,
multiprocess_it=True):
'''
first lines of file looks like
289222
image_name x_1 y_1 x_2 y_2
img/Sheer_Pleated-Front_Blouse/img_00000001.jpg 072 079 232 273
convert the parent dir to a hydra cat using ready function
convert hydra to pixlevel v3
black out irrelevant areas (lower part for top cats, top part for lower cats, nothing for whole body or anything else
:param bbfile:
:return:
'''
global frequencies
dir = Utils.parent_dir(bbfile)
pardir = Utils.parent_dir(dir)
print('pardir '+str(pardir))
with open(bbfile,'r') as fp:
lines = fp.readlines()
fp.close
print('getting deepfashion categoy translations')
dir_to_catlist = create_nn_imagelsts.deepfashion_to_tg_hydra()
print(dir_to_catlist[0])
print('{} lines in bbfile'.format(len(lines)))
for line in lines:
if not '.jpg' in line:
continue #first and second lines are metadata
image_name,x1,y1,x2,y2 = line.split()
# print('file {} x1 {} y1 {} x2 {} y2 {}'.format(image_name,x1,y2,x2,y2))
image_dir = Utils.parent_dir(image_name)
image_dir = image_dir.split('/')[-1]
tgcat = create_nn_imagelsts.deepfashion_folder_to_cat(dir_to_catlist,image_dir)
if tgcat is None:
print('got no tg cat fr '+str(image_name))
continue
if not tgcat in constants.trendi_to_pixlevel_v3_map:
print('didnt get cat for {} {}'.format(tgcat,line))
raw_input('ret to cont')
continue
pixlevel_v3_cat = constants.trendi_to_pixlevel_v3_map[tgcat]
pixlevel_v3_index = constants.pixlevel_categories_v3.index(pixlevel_v3_cat)
frequencies[pixlevel_v3_index]+=1
print('freq '+str(frequencies))
print('tgcat {} v3cat {} index {}'.format(tgcat,pixlevel_v3_cat,pixlevel_v3_index))
def remove_irrelevant_parts_of_image(img_arr,bb_x1y1x2y2,pixlevel_v3_cat):
'''
this is for training a pixlevel v3 net with single bb per image
so we need to remove the stuff that wasnt bounded , ie anything outside box
except - for upper_cover and upper_under, can keep top , kill anything below lower bb bound
for lower_cover_long/short , kill anything above upper bb bound
:param img_arr:
:param pixlevel_v3_cat:
:return:
'''
upper_frac = 0.1 #kill this many pixels above lower bb bound too
lower_frac = 0.1 #kill this many below upper bound
side_frac = 0.05
upper_margin=int(upper_frac*(bb_x1y1x2y2[3]-bb_x1y1x2y2[1]))
lower_margin=int(lower_frac*(bb_x1y1x2y2[3]-bb_x1y1x2y2[1]))
side_margin= int(side_frac*(bb_x1y1x2y2[3]-bb_x1y1x2y2[1]))
fillval = 255
swatch = img_arr[0:20,0:20,:]
fillval = np.mean(swatch,axis=(0,1))
fillval = np.array(fillval,dtype=np.uint8)
print('fillval:'+str(fillval))
h,w = img_arr.shape[0:2]
if pixlevel_v3_cat=='upper_cover_items' or pixlevel_v3_cat == 'upper_under_items':
top=0
bottom=max(0,bb_x1y1x2y2[3]-upper_margin)
left = max(0,bb_x1y1x2y2[0]-side_margin)
right = min(w,bb_x1y1x2y2[2]+side_margin)
img2=copy.copy(img_arr)
img2[:,:]=fillval
img2[top:bottom,left:right,:]=img_arr[top:bottom,left:right,:]
img_arr = img2
#
# bottom=bb_x1y1x2y2[3]-upper_margin
# left = bb_x1y1x2y2[0]
# right = bb_x1y1x2y2[2]
# img_arr[bottom:,left:right]=fillval
elif pixlevel_v3_cat=='lower_cover_long_items' or pixlevel_v3_cat == 'lower_cover_short_items':
top=min(h,bb_x1y1x2y2[1]+lower_margin)
left = max(0,bb_x1y1x2y2[0]-side_margin)
right = min(w,bb_x1y1x2y2[2]+side_margin)
img2=copy.copy(img_arr)
img2[:,:]=fillval
img2[top:,left:right,:]=img_arr[top:,left:right,:]##
img_arr = img2#
raw_input('ret to cont')
#
# top=bb_x1y1x2y2[1]+lower_margin
# left = bb_x1y1x2y2[0]
# right = bb_x1y1x2y2[2]
# img_arr[0:top,left:right,:]=fillval
elif pixlevel_v3_cat=='whole_body_items':
pass
else:
top=min(h,bb_x1y1x2y2[1]+lower_margin)
bottom=max(0,bb_x1y1x2y2[3]-upper_margin)
left = max(0,bb_x1y1x2y2[0]-side_margin)
right = min(w,bb_x1y1x2y2[2]+side_margin)
img2=copy.copy(img_arr)
img2[:,:,:]=fillval
img2[top:bottom,left:right,:]=img_arr[top:bottom,left:right,:]
img_arr = img2
raw_input('ret to cont')
return img_arr
def fadeout(img_arr, bb_x1y1x2y2,gc_img):
'''
tkae img, gc img, and bb, add background but fade it out outside of bb
:param img_arr:
:param bb_x1y1x2y2:
:param gc_img:
:return:
'''
fadeout = np.zeros_like(img_arr)
fadeout[bb_x1y1x2y2[1]:bb_x1y1x2y2[3],bb_x1y1x2y2[0]:bb_x1y1x2y2[2]]
fadeout[0:bb_x1y1x2y2[1],:]=np.arange(start=0,stop=1,step=1.0/bb_x1y1x2y2[1])
def grabcut_bb(img_arr,bb_x1y1x2y2,visual_output=False,clothing_type=None):
'''
grabcut with subsection of bb as fg, outer border of image bg, prbg to bb, prfg from bb to subsection
then kill anything outside of bb
also anything thats utter white or blacak should get prbgd
return mask and gc image
:param img_arr:
:param bb_x1y1x2y2:
:return:
'''
orig_arr = copy.copy(img_arr)
labels = ['bg','fg','prbg','prfg'] #this is the order of cv2 values cv2.BG etc
bgdmodel = np.zeros((1, 65), np.float64)
fgdmodel = np.zeros((1, 65), np.float64)
mask = np.zeros(img_arr.shape[:2], np.uint8)
h,w = img_arr.shape[0:2]
#start with everything bg
mask[:,:] = cv2.GC_BGD
#big box (except for outer margin ) is pr_bg
pr_bg_frac = 0.05
pr_bg_margin_ud= int(pr_bg_frac*(h))
pr_bg_margin_lr= int(pr_bg_frac*(w))
mask[pr_bg_margin_ud:h-pr_bg_margin_ud,pr_bg_margin_lr:w-pr_bg_margin_lr] = cv2.GC_PR_BGD
#prevent masks frrom adding together by doing boolean or
nprbgd = np.sum(mask==cv2.GC_PR_BGD)
print('after bigbox '+str(nprbgd))
# cv2.imwrite('perimeter.jpg',img_arr)
# imutils.count_values(mask,labels=labels)
# imutils.show_mask_with_labels(mask,labels,visual_output=True)
#everything in bb+margin is pr_fgd
pr_fg_frac = 0.0
pr_bg_margin_ud= int(pr_bg_frac*(bb_x1y1x2y2[3]-bb_x1y1x2y2[1]))
pr_bg_margin_lr= int(pr_bg_frac*(bb_x1y1x2y2[3]-bb_x1y1x2y2[1]))
mask[bb_x1y1x2y2[1]:bb_x1y1x2y2[3],bb_x1y1x2y2[0]:bb_x1y1x2y2[2]] = cv2.GC_PR_FGD
# print('after middlebox '+str(nprbgd))
# imutils.count_values(mask,labels)
# imutils.show_mask_with_labels(mask,labels,visual_output=True)
#everything in small box within bb is fg (unless upper cover in which case its probably - maybe its
#a coat over a shirt and the sirt is visible
center_frac=0.1
side_frac = 0.1
side_margin= int(side_frac*(bb_x1y1x2y2[3]-bb_x1y1x2y2[1]))
upper_margin=int(center_frac*(bb_x1y1x2y2[3]-bb_x1y1x2y2[1]))
lower_margin=int(center_frac*(bb_x1y1x2y2[3]-bb_x1y1x2y2[1]))
center_y=(bb_x1y1x2y2[1]+bb_x1y1x2y2[3])/2
center_x=(bb_x1y1x2y2[0]+bb_x1y1x2y2[2])/2
top=max(0,center_y-upper_margin)
bottom=min(h,center_y+lower_margin)
left = max(0,center_x-side_margin)
right = min(w,center_x+side_margin)
print('fg box t {} b {} l {} r {}'.format(top,bottom,left,right))
if top>bottom:
temp=top
top=bottom
bottom=temp
if clothing_type == 'upper_cover':
mask[top:bottom,left:right] = cv2.GC_PR_FGD
else:
mask[top:bottom,left:right] = cv2.GC_FGD
# print('after innerbox ')
# imutils.count_values(mask,labels)
# imutils.show_mask_with_labels(mask,['bg','fg','prbg','prfg'],visual_output=True)
# print('unqies '+str(np.unique(mask)))
#add white and black vals as pr bgd
whitevals = cv2.inRange(img_arr,np.array([254,254,254]),np.array([255,255,255]))
mask[np.array(whitevals)!=0]=cv2.GC_PR_BGD
#fmi this could also be done with whitevals= (img_arr==[255,255,255]).all(-1))
blackvals = cv2.inRange(img_arr,np.array([0,0,0]),np.array([1,1,1]))
mask[np.array(blackvals)!=0]=cv2.GC_PR_BGD
nprbgd = np.sum(mask==cv2.GC_PR_BGD)
# print('after blackwhite ')
# imutils.count_values(mask,labels)
# imutils.show_mask_with_labels(mask,labels,visual_output=True)
logging.debug('imgarr shape b4r gc '+str(img_arr.shape))
rect = (bb_x1y1x2y2[0],bb_x1y1x2y2[1],bb_x1y1x2y2[2],bb_x1y1x2y2[3])
try:
#TODO - try more than 1 grabcut call in itr
itr = 1
cv2.grabCut(img=img_arr,mask=mask, rect=rect,bgdModel= bgdmodel,fgdModel= fgdmodel,iterCount= itr, mode=cv2.GC_INIT_WITH_MASK)
except:
print('grabcut exception ')
return img_arr
#kill anything no t in gc
mask2 = np.where((mask==2)|(mask==0),0,1).astype('uint8') ##0 and 2 are bgd and pr_bgd
#kill anything out of bb (except head)
# mask2[:bb_x1y1x2y2[1],0:w]=0 #top
mask2[bb_x1y1x2y2[3]:,0:w]=0 #bottom
mask2[0:h,0:bb_x1y1x2y2[0]]=0 #left
mask2[0:h,bb_x1y1x2y2[2]:w]=0 #right
img_arr = img_arr*mask2[:,:,np.newaxis]
fadeout = np.zeros([h,w],dtype=np.float )
fadeout[bb_x1y1x2y2[1]:bb_x1y1x2y2[3],bb_x1y1x2y2[0]:bb_x1y1x2y2[2]]=1.0
# fadeout[0:bb_x1y1x2y2[3],bb_x1y1x2y2[0]:bb_x1y1x2y2[2]]=1.0
fadefrac = 0.1
fade_dist_ud = int(fadefrac*(bb_x1y1x2y2[3]-bb_x1y1x2y2[1]))
fade_dist_rl = int(fadefrac*(bb_x1y1x2y2[2]-bb_x1y1x2y2[0]))
fadevec = np.arange(start=0,stop=1,step=1.0/fade_dist_ud)
fademat = np.tile(fadevec,(bb_x1y1x2y2[2]-bb_x1y1x2y2[0],1))
fademat=fademat.transpose()
fadeout[bb_x1y1x2y2[1]:bb_x1y1x2y2[1]+fade_dist_ud,bb_x1y1x2y2[0]:bb_x1y1x2y2[2]]=fademat #top
fadeout[bb_x1y1x2y2[3]-fade_dist_ud:bb_x1y1x2y2[3],bb_x1y1x2y2[0]:bb_x1y1x2y2[2]]=(1-fademat) #bottom
fadevec = np.arange(start=0,stop=1,step=1.0/fade_dist_rl)
fademat = np.tile(fadevec,(bb_x1y1x2y2[3]-bb_x1y1x2y2[1],1))
fadeout[bb_x1y1x2y2[1]:bb_x1y1x2y2[3],bb_x1y1x2y2[0]:bb_x1y1x2y2[0]+fade_dist_rl]=fadeout[bb_x1y1x2y2[1]:bb_x1y1x2y2[3],bb_x1y1x2y2[0]:bb_x1y1x2y2[0]+fade_dist_rl]*fademat
#np.maximum(fadeout[bb_x1y1x2y2[1]:bb_x1y1x2y2[3],bb_x1y1x2y2[0]-fade_dist_rl:bb_x1y1x2y2[0]],fademat)
fadeout[bb_x1y1x2y2[1]:bb_x1y1x2y2[3],bb_x1y1x2y2[2]-fade_dist_rl:bb_x1y1x2y2[2]]= fadeout[bb_x1y1x2y2[1]:bb_x1y1x2y2[3],bb_x1y1x2y2[2]-fade_dist_rl:bb_x1y1x2y2[2]] * (1-fademat)
#=np.maximum(fadeout[bb_x1y1x2y2[1]:bb_x1y1x2y2[3],bb_x1y1x2y2[0]-fade_dist_rl:bb_x1y1x2y2[0]],(1-fademat))
skin_index = constants.pixlevel_categories_v3.index('skin')
skin_mask = kassper.skin_detection_fast(orig_arr) * 255
if visual_output:
cv2.imshow('skin',skin_mask)
cv2.waitKey(0)
fadeout = np.where(skin_mask!=0,skin_mask,fadeout)
# mask2 = np.where(skin_mask!=0,constants.pixlevel_categories_v3.index('skin'),mask2)
# cv2.imshow('fade',fadeout)
# cv2.waitKey(0)
# mask2[:bb_x1y1x2y2[1],0:w]=0 #top
# mask2[bb_x1y1x2y2[3]:,0:w]=0 #bottom
# mask2[0:h,0:bb_x1y1x2y2[0]]=0 #left
# mask2[0:h,bb_x1y1x2y2[2]:w]=0 #right
# img_arr = img_arr*mask2[:,:,np.newaxis]
#can use img_arr (after gc) here instead of orig_arr
dofade=False
if dofade:
img_arr = (orig_arr*fadeout[:,:,np.newaxis]).astype('uint8')
# cv2.imshow('after orig*fadeout',img_arr)
img_arr = np.where(skin_mask[:,:,np.newaxis]!=0,orig_arr,img_arr)
# cv2.imshow('after skin add',img_arr)
# cv2.waitKey(0)
# negmask = np.where(mask2==0,1,0).astype('uint8')
# imutils.show_mask_with_labels(negmask,['0','1','2','3'])
# # fadeout = fadeout/255.0 #this was defined as float so its ok
fillval = np.mean(orig_arr[0:20,0:20],axis=(0,1))
print('fillval '+str(fillval))
bgnd_arr = np.zeros_like(orig_arr).astype('uint8')
bgnd_arr[:,:]=fillval
# bgnd_arr = np.where(fadeout!=0,(fadeout[:,:,np.newaxis]*bgnd_arr),bgnd_arr) #+orig_arr*(fadeout[:,:,np.newaxis]).astype('uint8')
img_arr = np.where(img_arr==0,bgnd_arr,img_arr)
# cv2.imshow('bgnd arr',bgnd_arr)
# cv2.waitKey(0)
if(visual_output):
# plt.imshow(img),plt.colorbar(),plt.show()
cv2.imshow('after gc',img_arr)
cv2.waitKey(0)
logging.debug('imgarr shape after gc '+str(img_arr.shape))
return mask2,img_arr
def dir_of_catalog_images_to_pixlevel(catalog_images_dir='/media/jeremy/9FBD-1B00/data/jeremy/image_dbs/mongo/amazon_us_female/dress',
swatch_bgnds_dir='/media/jeremy/9FBD-1B00/data/jeremy/image_dbs/tg/backgrounds/textures/kept',
person_bgnds_dir='/media/jeremy/9FBD-1B00/data/jeremy/image_dbs/tg/backgrounds/street_scenes/kept',
destination_img_dir = '/media/jeremy/9FBD-1B00/data/jeremy/image_dbs/mongo/amazon_us_female/dress_images',
destination_label_dir = '/media/jeremy/9FBD-1B00/data/jeremy/image_dbs/mongo/amazon_us_female/dress_labels',
manual_oversight=False):
files = [os.path.join(catalog_images_dir,f) for f in os.listdir(catalog_images_dir)]
human_bgnds = [os.path.join(person_bgnds_dir,f) for f in os.listdir(person_bgnds_dir)]
inhuman_bgnds = [os.path.join(swatch_bgnds_dir,f) for f in os.listdir(swatch_bgnds_dir)]
dress_index = constants.pixlevel_categories_v3.index('whole_body_items')
Utils.ensure_dir(destination_img_dir)
Utils.ensure_dir(destination_label_dir)
n=0
n_tot = len(files)
for f in files:
n=n+1
print('doing {}/{} {}'.format(n,n_tot,f))
img_arr = cv2.imread(f)
if img_arr is None:
print('got none for {}'.format(f))
continue
human_bgnd = Utils.get_cv2_img_array(random.choice(human_bgnds))
inhuman_bgnd = Utils.get_cv2_img_array(random.choice(inhuman_bgnds))
logging.debug('sizes: {} human bgnd {} inbgnd {}'.format(img_arr.shape,human_bgnd.shape,inhuman_bgnd.shape))
dest_imagesize=(300,300) #chosen to get figures to fit into bgnd - bgnd resized, figure not (margin added instead)
human_bgnd = cv2.resize(human_bgnd,dest_imagesize) #dont worry about warping just fill all image
inhuman_bgnd = cv2.resize(inhuman_bgnd,dest_imagesize)
img_arr = imutils.resize_by_adding_border(img_arr,output_size=dest_imagesize)
mask,img = image_to_pixlevel_no_bb(img_arr,clothing_indices=[dress_index],human_bgd = human_bgnd,inhuman_bgnd = inhuman_bgnd,visual_output=False)
save = True
if manual_oversight:
imutils.show_mask_with_labels(mask,labels=constants.pixlevel_categories_v3,original_image=img,visual_output=True)
k=cv2.waitKey(0)
print('k='+str(k))
if save:
dest_imgname = os.path.join(destination_img_dir,os.path.basename(f))
cv2.imwrite(dest_imgname,img)
dest_lblname = os.path.join(destination_label_dir,os.path.basename(f)).replace('.jpg','.png')
cv2.imwrite(dest_lblname,mask)
print('wrote img to {} and label to {}'.format(dest_imgname,dest_lblname))
def image_to_pixlevel_no_bb(img_arr,clothing_indices,visual_output=True,labels=constants.pixlevel_categories_v3,human_bgd=None,inhuman_bgnd=None):
'''
grabcut with subsection of bb as fg, outer border of image bg, prbg to bb, prfg from bb to subsection
then kill anything outside of bb
also anything thats utter white or blacak should get prbgd
return mask and gc image
:param img_arr:
:param clotihing indices - first is top, second is bottom , or only is wholebody
:return:
'''
orig_arr = copy.copy(img_arr)
gc_mask_labels = ['bg','fg','prbg','prfg'] #this is the order of cv2 values cv2.BG etc
bgdmodel = np.zeros((1, 65), np.float64)
fgdmodel = np.zeros((1, 65), np.float64)
gc_mask = np.zeros(img_arr.shape[:2], np.uint8) #gc_mask for gc with prFg etc
mask= np.zeros(img_arr.shape[:2],np.uint8) #mask(also a gc_mask) with item numbers
h,w = img_arr.shape[0:2]
#start with everything pr_bg
gc_mask[:,:] = cv2.GC_PR_BGD
#outermost box is _bg
bg_frac = 0.05
bg_margin_ud= int(bg_frac*(h))
bg_margin_lr= int(bg_frac*(w))
gc_mask[0:bg_margin_ud,:] = cv2.GC_BGD
gc_mask[h-bg_margin_ud:h,:] = cv2.GC_BGD
gc_mask[:,0:bg_margin_lr] = cv2.GC_BGD
gc_mask[:,w-bg_margin_lr:w] = cv2.GC_BGD
if visual_output:
imutils.show_mask_with_labels(gc_mask,labels,visual_output=True,original_image=img_arr)
#prevent gc_masks frrom adding together by doing boolean or
nprbgd = np.sum(gc_mask==cv2.GC_PR_BGD)
logging.debug('after bigbox '+str(nprbgd))
#see if theres a face 1-501510371 refno
ff_cascade = background_removal.find_face_cascade(img_arr, max_num_of_faces=10)
likely_fg_bb = None
face = None
if ff_cascade['are_faces'] :
faces = ff_cascade['faces']
if faces == []:
print('ffascade reported faces but gave none')
else:
face = background_removal.choose_faces(img_arr,faces,1)[0]
print('got a face: {}'.format(face))
extra_height=8#as measured in faces
extra_width=3
head_extra = face[2]/1.5
likely_fg_bb = [face[0]+face[2]/2-face[2]*extra_width/2,face[1]-head_extra,face[2]*extra_width,face[3]*extra_height]
if likely_fg_bb is None: #assume middle of image
top_margin=.10 #as measured in % of image height
bottom_margin=0.1
left_margin= 0.3
right_margin= 0.3
likely_fg_bb = [int(left_margin*w),int(top_margin*h),w*(1-(left_margin+right_margin)),h*(1-(top_margin+bottom_margin))]
logging.debug('pre-check likely fg bb:{} h {} w {} shape {} '.format(likely_fg_bb,h,w,img_arr.shape))
#make sure nothing out of bounds
likely_fg_bb=[max(likely_fg_bb[0],0),max(likely_fg_bb[1],0),max(likely_fg_bb[2],0),max(likely_fg_bb[3],0)]
likely_fg_bb=[min(likely_fg_bb[0],w),min(likely_fg_bb[1],h),min(likely_fg_bb[2],w-likely_fg_bb[0]),min(likely_fg_bb[3],h-likely_fg_bb[1])]
likely_fg_bb=[int(likely_fg_bb[0]),int(likely_fg_bb[1]),int(likely_fg_bb[2]),int(likely_fg_bb[3])]
logging.debug('likely fg bb:{}'.format(likely_fg_bb))
gc_mask[likely_fg_bb[1]:likely_fg_bb[1]+likely_fg_bb[3],likely_fg_bb[0]:likely_fg_bb[0]+likely_fg_bb[2]] = cv2.GC_PR_FGD
# print('after face/margins ')
# imutils.count_values(gc_mask,labels)
# imutils.show_mask_with_labels(gc_mask,gc_mask_labels,visual_output=True,original_image=img_arr)
# if clothing_type == 'upper_cover':
# gc_mask[top:bottom,left:right] = cv2.GC_PR_FGD
# else:
# gc_mask[top:bottom,left:right] = cv2.GC_FGD
logging.debug('after mainbox b4 blackwhite ')
# imutils.count_values(gc_mask,gc_mask_labels)
#add white and black vals as pr bgd
white_tolerance = 5 #anything from 255-this to 255 is called white bgnd
black_tolerance = 5 #anything from 0 to this is called black gbgnd
whitevals = cv2.inRange(img_arr,np.array([255-white_tolerance,255-white_tolerance,255-white_tolerance]),np.array([255,255,255]))
gc_mask[np.array(whitevals)!=0]=cv2.GC_PR_BGD
#fmi this could also be done with whitevals= (img_arr==[255,255,255]).all(-1))
blackvals = cv2.inRange(img_arr,np.array([0,0,0]),np.array([black_tolerance,black_tolerance,black_tolerance]))
gc_mask[np.array(blackvals)!=0]=cv2.GC_PR_BGD
# print('after blackwhite w {} b {}'.format(np.count_nonzero(whitevals),np.count_nonzero(blackvals)))
# imutils.count_values(gc_mask,gc_mask_labels)
# imutils.show_mask_with_labels(gc_mask,gc_mask_labels,visual_output=True,original_image=img_arr)
logging.debug('imgarr shape b4r gc '+str(img_arr.shape))
rect = (0,0,1,1)
try:
#TODO - try more than 1 grabcut call in itr
itr = 1
cv2.grabCut(img=img_arr,mask=gc_mask, rect=rect,bgdModel= bgdmodel,fgdModel= fgdmodel,iterCount= itr, mode=cv2.GC_INIT_WITH_MASK)
except:
print('grabcut exception '+str( sys.exc_info()[0]))
print(sys.exc_info())
print(sys.exc_info()[1])
return img_arr
gc_mask2 = np.where((gc_mask==2)|(gc_mask==0),0,1).astype('uint8') ##0 and 2 are bgd and pr_bgd
#kill anything out of bb (except head)
# gc_mask2[:bb_x1y1x2y2[1],0:w]=0 #top
# gc_mask2[bb_x1y1x2y2[3]:,0:w]=0 #bottom
# gc_mask2[0:h,0:bb_x1y1x2y2[0]]=0 #left
# gc_mask2[0:h,bb_x1y1x2y2[2]:w]=0 #right
img_arr = img_arr*gc_mask2[:,:,np.newaxis]
if visual_output:
cv2.imshow('right after gc',img_arr)
cv2.waitKey(0)
skin_index = constants.pixlevel_categories_v3.index('skin')
skin_tolerance = 1.0
if face is not None:
# skin_mask = kassper.skin_detection_fast(orig_arr) * 255 #sdfdsf
skin_mask = kassper.skin_detection_fast(orig_arr,face=face,tol=skin_tolerance) * 255
else:
skin_mask = kassper.skin_detection_fast(orig_arr,tol=skin_tolerance) * 255
# if visual_output:
# cv2.imshow('skin',skin_mask)
# cv2.waitKey(0)
#erode skin to eliminate 1x1 edges detected as skin
kernel = np.ones((2,2),np.uint8)
skin_mask = cv2.erode(skin_mask,kernel,iterations = 1)
skin_mask = cv2.dilate(skin_mask,kernel,iterations = 1)
if visual_output:
cv2.imshow('skin after erode/dilate',skin_mask)
cv2.waitKey(0)
gc_mask = np.where(skin_mask!=0,cv2.GC_FGD,gc_mask)
if visual_output:
imutils.show_mask_with_labels(gc_mask,gc_mask_labels,visual_output=True,original_image=img_arr)
img_arr = np.where(skin_mask[:,:,np.newaxis]!=0,orig_arr,img_arr)
#take out white black aftewr gc too since gc sometimes includes these
#add white and black vals as pr bgd
white_tolerance = 5 #anything from 255-this to 255 is called white bgnd
black_tolerance = 5 #anything from 0 to this is called black gbgnd
whitevals = cv2.inRange(img_arr,np.array([255-white_tolerance,255-white_tolerance,255-white_tolerance]),np.array([255,255,255]))
#fmi this could also be done with whitevals= (img_arr==[255,255,255]).all(-1))
blackvals = cv2.inRange(img_arr,np.array([0,0,0]),np.array([black_tolerance,black_tolerance,black_tolerance]))
img_arr = np.where(whitevals[:,:,np.newaxis]!=0 ,0,img_arr)
img_arr = np.where(blackvals[:,:,np.newaxis]!=0 ,0,img_arr)
if visual_output:
cv2.imshow('img after skin',img_arr)
cv2.waitKey(0)
#get rid of outermost pixels , they seem to wind up white a lot of time
kernel = np.ones((1,1),np.uint8)
current_nonzero = np.where(img_arr!=0,1,0)[:,:,0].astype(dtype=np.uint8) #maynbe there a better way but this works and is easy to remember - 1st chan of nonzeros arr
logging.debug('n before erode:{} mask {} size {}'.format(np.count_nonzero(img_arr),np.count_nonzero(current_nonzero),current_nonzero.shape))
current_nonzero = cv2.erode(current_nonzero,kernel,iterations = 1)
img_arr = np.multiply(current_nonzero[:,:,np.newaxis],img_arr) #should really look into operation of newaxis but blv this is kosher
logging.debug('n after erode:{} mask {}'.format(np.count_nonzero(img_arr),np.count_nonzero(current_nonzero)))
if visual_output:
cv2.imshow('after getting rid of outermost',img_arr)
cv2.waitKey(0)
#now create mask from img_arr (exploiting gc action)
#either two part or one part
if len(clothing_indices)==2:
if face:
n_heads_for_ysplit = 3
y_split = face[1]+face[3]*n_heads_for_ysplit
mask[0:y_split,:] = clothing_indices[0] * np.array([img_arr!=0])[0:y_split,:]
mask[y_split:,:] = clothing_indices[1] * np.array([img_arr!=0])[y_split:,:]
#prob try gc on this would be better
elif len(clothing_indices) == 1 :
mask = np.where(img_arr!=0, clothing_indices[0],0)
else:
print('3 parts not dealt with ')
#remove anything above bottom of head box
if face:
extra_decapitation = int(face[3]*0.2)
mask[0:face[1]+face[3]+extra_decapitation,:] = 0
if visual_output:
imutils.show_mask_with_labels(mask,gc_mask_labels,original_image=img_arr)
#label skin
skin_index = labels.index('skin')
mask[skin_mask!=0] = skin_index
bgnd_arr = None
if human_bgd is not None: #street scenes or the like for people
if face:
print('doing bgnd human due to face')
human_bgd = Utils.get_cv2_img_array(human_bgd)
bgnd_arr = imutils.resize_keep_aspect(human_bgd,output_size=(img_arr.shape[0],img_arr.shape[1]))
elif inhuman_bgnd is not None: #brick wall or table or the like for clothing items alone
print('doing bgnd inhuman due to no face')
inhuman_bgnd = Utils.get_cv2_img_array(inhuman_bgnd)
bgnd_arr = imutils.resize_keep_aspect(inhuman_bgnd,output_size=(img_arr.shape[0],img_arr.shape[1]))
elif inhuman_bgnd is not None: #brick wall or table or the like for clothing items alone
print('doing bgnd inhuman due to no human')
inhuman_bgnd = Utils.get_cv2_img_array(inhuman_bgnd)
bgnd_arr = imutils.resize_keep_aspect(inhuman_bgnd,output_size=(img_arr.shape[0],img_arr.shape[1]))
if bgnd_arr == None:
print('doing bgnd static')
fillval = np.mean(orig_arr[0:20,0:20],axis=(0,1))
print('fillval '+str(fillval))
bgnd_arr = np.zeros_like(orig_arr).astype('uint8')
bgnd_arr[:,:]=fillval
# bgnd_arr = np.where(fadeout!=0,(fadeout[:,:,np.newaxis]*bgnd_arr),bgnd_arr) #+orig_arr*(fadeout[:,:,np.newaxis]).astype('uint8')
img_arr = np.where(img_arr==0,bgnd_arr,img_arr) #to give bgnd, can also fill in
# cv2.imshow('bgnd arr',bgnd_arr)
# cv2.waitKey(0)
if(visual_output):
# plt.imshow(img),plt.colorbar(),plt.show()
imutils.show_mask_with_labels(mask,labels,original_image=img_arr,visual_output=True)
cv2.imshow('after gc',img_arr)
cv2.waitKey(0)
logging.debug('imgarr shape after gc '+str(img_arr.shape))
return mask,img_arr
def inspect_yolo_annotations(dir='/media/jeremy/9FBD-1B00/data/image_dbs/hls/',
yolo_annotation_folder=None,
annotation_filter='.txt',image_filter='.jpg',manual_verification=True,verified_folder='verified_labels'):
'''
todo - this should call inspect_yolo_annotation to save duplicate code
the yolo annotations are like
object1_class bb0 bb1 bb2 bb3
object2_class bb0 bb1 bb2 bb3
where the bbs are x_center,y_center,width,height in percentages of image size
:param dir:
:param yolo_annotation_folder:
:param img_folder:
:param annotation_filter:
:param image_filter:
:param manual_verification:
:param verified_folder:
:return:
'''
#https://www.youtube.com/watch?v=c-vhrv-1Ctg jinjer
if yolo_annotation_folder is None:
annotation_dir = dir
else:
annotation_dir = yolo_annotation_folder
verified_dir = os.path.join(dir,verified_folder)
Utils.ensure_dir(verified_dir)
img_dir = dir
if annotation_filter:
annotation_files = [os.path.join(annotation_dir,f) for f in os.listdir(annotation_dir) if annotation_filter in f]
else:
annotation_files = [os.path.join(annotation_dir,f) for f in os.listdir(annotation_dir)]
classes = constants.hls_yolo_categories
print('inspecting {} yolo annotations in {}'.format(len(annotation_files),annotation_dir))
for f in annotation_files:
print('trying '+f)
annotation_base = os.path.basename(f)
if image_filter:
imgfile = annotation_base.replace(annotation_filter,image_filter)
img_path = os.path.join(img_dir,imgfile)
else:
imgfile = annotation_base[:-4]+'.jpg'
img_path = os.path.join(img_dir,imgfile)
if not os.path.exists(img_path): #yecch , this shouldnt actually get used but still
imgfile = annotation_base[:-4]+'.png'
img_path = os.path.join(img_dir,imgfile)
img_arr = cv2.imread(img_path)
if img_arr is None:
print('couldnt get '+img_path)
continue
h,w = img_arr.shape[0:2]
with open(f,'r') as fp:
lines = fp.readlines()
for line in lines:
if line.strip() == '':
print('empty line')
continue
print('got line:'+line.strip('\n'))
print('for image {} dims h{}Xw{}:'.format(imgfile,h,w))
if line.strip()[0]=='#':
print('commented line')
continue
object_class,bb0,bb1,bb2,bb3 = line.split()
bb_xywh = imutils.yolo_to_xywh([float(bb0),float(bb1),float(bb2),float(bb3)],(h,w))
classname = classes[int(object_class)]
print('class {} bb_xywh {}'.format(classname,bb_xywh))
img_arr = imutils.bb_with_text(img_arr,bb_xywh,classname)
cv2.imshow('img',img_arr)
fp.close()
print('(a)ccept, any other key to not accept '+str(f))
k=cv2.waitKey(0)
if manual_verification:
if k == ord('a'):
base = os.path.basename(f)
verified_path = os.path.join(verified_dir,base)
print('accepting images, wriing to '+str(verified_path))
with open(verified_path,'w') as fp2:
fp2.writelines(lines)
else:
print('not accepting image')
def get_yolo_annotation(img_path,yolo_annotation_folder = None,visual_output=False):
'''
get yolo annotations for a given image (assume parallel .txt file in same folder or in yolo_annotation_folder)
return in 'api form' namely
{'data': [{'confidence': None, 'object': 'bag', 'bbox': [454, 306, 512, 360]},...,]}
:param img_path:
:param yolo_annotation_folder:
:return:
'''
img_dir = os.path.dirname(img_path)
if yolo_annotation_folder is None:
# yolo_annotation_folder = img_dir+'labels'
yolo_annotation_folder = img_dir
yolo_annotation_basename = os.path.basename(img_path).replace('.jpg','.txt').replace('.png','.txt').replace('.jpeg','.txt')
yolo_annotation_file = os.path.join(yolo_annotation_folder,yolo_annotation_basename)
api_annotations = inspect_yolo_annotation(yolo_annotation_file,img_path,visual_output=visual_output)
return api_annotations
def inspect_yolo_trainingfile(trainingfile,yolo_annotation_folder=None,filter=None,replace_this=None,with_this=None,randomize=True):
'''
read the trainingfile that yolo reads (list of image files, labels in parallel dirs)
'''
print('inspecting trainingfile {}'.format(trainingfile))
if not os.path.exists(trainingfile):
print('os thinks {} does not exist'.format(trainingfile))
else:
print('{} exists'.format(trainingfile))
with open(trainingfile,'r') as fp:
lines = fp.readlines()
fp.close()
if randomize:
random.shuffle(lines)
if lines is None or lines == []:
print('got nothin from {}'.format(trainingfile))
return None
print('{} files in {}'.format(len(lines),trainingfile))
for line in lines:
if filter and not filter in line:
logging.warning('no {} in {} '.format(filter,line))
continue
logging.debug('line:'+str(line))
img_path = line.strip('\n')
if replace_this:
img_path=img_path.replace(replace_this,with_this)
img_dir = os.path.dirname(img_path)
if yolo_annotation_folder is None:
# yolo_annotation_folder = img_dir+'labels'
yolo_annotation_folder = img_dir
yolo_annotation_basename = os.path.basename(img_path).replace('.jpg','.txt').replace('.png','.txt').replace('.jpeg','.txt')
yolo_annotation_file = os.path.join(yolo_annotation_folder,yolo_annotation_basename)
inspect_yolo_annotation(yolo_annotation_file,img_path)
def inspect_yolo_annotation(annotation_file,img_file,visual_output=True,classes = constants.hls_yolo_categories):
'''
get yolo annotations for a given image (assume parallel .txt file in same folder or in yolo_annotation_folder)
return in 'api form' namely
{'data': [{'confidence': None, 'object': 'bag', 'bbox': [454, 306, 512, 360]},...,]}
:param annotation_file:
:param img_file:
:param visual_output:
:return:
'''
print('inspecting yolo annotation {} img {}'.format(annotation_file,img_file))
# classes = constants.hls_yolo_categories
if not os._exists(annotation_file):
logging.warning('annotations file {} does not exist'.format(annotation_file))
return None
if img_arr is None:
logging.warning('image file {} does not exist'.format(img_file))
return
img_arr = cv2.imread(img_file)
h,w = img_arr.shape[0:2]
bbs=[]
annotations = {'data':[]}
with open(annotation_file,'r') as fp:
lines = fp.readlines()
for line in lines:
if line.strip() == '':
print('empty line')
continue
print('got line:'+line)
if line.strip()[0]=='#':
print('commented line')
continue
object_class,bb0,bb1,bb2,bb3 = line.split()
bb_xywh = imutils.yolo_to_xywh([float(bb0),float(bb1),float(bb2),float(bb3)],(h,w))
bbs.append(bb_xywh)
classname = classes[int(object_class)]
print('class {} bb_xywh {} yolo {} h{} w{}'.format(classname,bb_xywh,[bb0,bb1,bb2,bb3],h,w))
annotation_dict = {'confidence':1.0,'object':classname,'bbox':bb_xywh}
annotations['data'].append(annotation_dict)
imutils.bb_with_text(img_arr,bb_xywh,classname)
if visual_output:
cv2.imshow('yolo_inspector',img_arr)
cv2.waitKey(0)
return(annotations)
def apply_color_map(image_array, labels):
color_array = np.zeros((image_array.shape[0], image_array.shape[1], 3), dtype=np.uint8)
for label_id, label in enumerate(labels):
# set all pixels with the current label to the color of the current label
color_array[image_array == label_id] = label["color"]
return color_array
def mapillary_people_only(dir='/data/jeremy/image_dbs/hls/mapillary/',visual_output=False):
# a nice example
os.chdir(dir)
# read in config file
with open('config.json') as config_file:
config = json.load(config_file)
# in this example we are only interested in the labels
labels = config['labels']
# print labels
print("There are {} labels in the config file".format(len(labels)))
for label_id, label in enumerate(labels):
print("{:>30} ({:2d}): {:<50} has instances: {}".format(label["readable"], label_id, label["name"], label["instances"]))
#for converting the humans , labels of interest are
# Person (19): human--person
# Bicyclist (20): human--rider--bicyclist
# Motorcyclist (21): human--rider--motorcyclist
# Other Rider (22): human--rider--other-rider
label_id_person=19
label_id_bicyclist=20
label_id_motorcyclist=21
label_id_other_rider=22
# set up paths for every image
keys = [f[:-4] for f in os.listdir('training/images/')]
for key in keys:
image_path = "training/images/{}.jpg".format(key)
label_path = "training/labels/{}.png".format(key)
instance_path = "training/instances/{}.png".format(key)
# load images
base_image = Image.open(image_path)
label_image = Image.open(label_path)
instance_image = Image.open(instance_path)
# convert labeled data to numpy arrays for better handling
label_array = np.array(label_image)
instance_array = np.array(instance_image, dtype=np.uint16)
# now we split the instance_array into labels and instance ids
instance_label_array = np.array(instance_array / 256, dtype=np.uint8)
instance_ids_array = np.array(instance_array % 256, dtype=np.uint8)
unique_labels = np.unique(label_array)
if not(label_id_person in unique_labels or label_id_bicyclist in unique_labels or label_id_motorcyclist in unique_labels or label_id_other_rider in unique_labels):
print('no person in this image')
continue
people_only_array = np.zeros((label_array.shape[0], label_array.shape[1]), dtype=np.uint8)
people_only_array[label_array == label_id_person] = 1 #labels here are people=1, everything else=0
people_only_array[label_array == label_id_bicyclist] = 1
people_only_array[label_array == label_id_motorcyclist] = 1
people_only_array[label_array == label_id_other_rider] = 1
label_array=people_only_array
# for visualization, we apply the colors stored in the config
colored_label_array = apply_color_map(label_array, labels)
colored_instance_label_array = apply_color_map(instance_label_array, labels)
if visual_output:
# plot the result
fig, ax = plt.subplots(nrows=2, ncols=2, figsize=(20,15))
ax[0][0].imshow(base_image)
ax[0][0].get_xaxis().set_visible(False)
ax[0][0].get_yaxis().set_visible(False)
ax[0][0].set_title("Base image")
ax[0][1].imshow(colored_label_array)
ax[0][1].get_xaxis().set_visible(False)
ax[0][1].get_yaxis().set_visible(False)
ax[0][1].set_title("Labels")
ax[1][0].imshow(instance_ids_array)
ax[1][0].get_xaxis().set_visible(False)
ax[1][0].get_yaxis().set_visible(False)
ax[1][0].set_title("Instance IDs")
ax[1][1].imshow(colored_instance_label_array)
ax[1][1].get_xaxis().set_visible(False)
ax[1][1].get_yaxis().set_visible(False)
ax[1][1].set_title("Labels from instance file (identical to labels above)")
# raw_input('ret to cont')
plt.show()
time.sleep(0.1)
plt.close()
# fig.savefig('MVD_plot.png')
def show_annotations_xywh(bb_xywh,img_arr):
classes = constants.hls_yolo_categories
if img_arr is None:
logging.warning('got no image')
return
h,w = img_arr.shape[0:2]
for bb in bb_xywh:
print('bb_xywh {} h{} w{}'.format(bb,h,w))
cv2.rectangle(img_arr,(bb[0],bb[1]),(bb[0]+bb[2],bb[1]+bb[3]),color=[100,255,100],thickness=2)
img_arr[bb_xywh[1]:bb_xywh[1]+20,bb_xywh[0]:bb_xywh[0]+bb_xywh[2]]=img_arr[bb_xywh[1]:bb_xywh[1]+20,bb_xywh[0]:bb_xywh[0]+bb_xywh[2]]/2+[100,50,100]
cv2.imshow('img',img_arr)
cv2.imshow('out',img_arr)
cv2.waitKey(0)
def inspect_tg_dict(d,visual_output=True,check_img_existence=True):
'''
read file like:
[{'filename':'image423.jpg','annotations':[{'object':'person','bbox_xywh':[x,y,w,h]},{'object':'person','bbox_xywh':[x,y,w,h],'sId':104}],
{'filename':'image423.jpg','annotations':[{'object':'person','bbox_xywh':[x,y,w,h]},{'object':'person','bbox_xywh':[x,y,w,h],'sId',105} ,...]
:param jsonfile:
:return:
'''
filename = d['filename']
annotations = d['annotations']
n_bbs = len(annotations)
print('filename {} with {} annotations'.format(filename,n_bbs))
if 'dimensions_h_w_c' in d:
dims = d['dimensions_h_w_c']
print('dimensions {}'.format(dims))
if check_img_existence:
if not os.path.exists(filename):
logging.warning('could not find '+filename+' WARNING')
if visual_output:
img_arr = cv2.imread(filename)
if img_arr is None:
logging.warning('could not read '+filename+' WARNING')
for annotation in annotations:
object = annotation['object']
bb_xywh = annotation['bbox_xywh']
if visual_output:
imutils.bb_with_text(img_arr,bb_xywh,object)
if visual_output:
cv2.imshow('out',img_arr)
cv2.waitKey(0)
def inspect_json(jsonfile='rio.json',visual_output=False,check_img_existence=True,movie=False):
'''
read file like:
[{'filename':'image423.jpg','annotations':[{'object':'person','bbox_xywh':[x,y,w,h]},{'object':'person','bbox_xywh':[x,y,w,h],'sId':104}],
{'filename':'image423.jpg','annotations':[{'object':'person','bbox_xywh':[x,y,w,h]},{'object':'person','bbox_xywh':[x,y,w,h],'sId',105} ,...]
:param jsonfile:
:return:
'''
#todo add visual inspect here
object_counts = {}
print('inspecting json annotations in '+jsonfile)
with open(jsonfile,'r') as fp:
annotation_list = json.load(fp)
# Define the codec and create VideoWriter object
if movie:
fourcc = cv2.VideoWriter_fourcc(*'XVID')
out = cv2.VideoWriter('output.mp4',fourcc, 20.0, (640,480))
# out = cv2.VideoWriter('output.avi',fourcc, 20.0, (640,480))
for d in annotation_list:
# print d
filename = d['filename']
annotations = d['annotations']
sid = None
if 'sId' in d:
sid = d['sId']
n_bbs = len(annotations)
print('file {}\n{} annotations {}\nsid {}'.format(filename,n_bbs,annotations,sid))
if check_img_existence:
if not os.path.exists(filename):
print('WARNNING could not find '+filename+' WARNING')
if visual_output:
img_arr = cv2.imread(filename)
if img_arr is None:
print('WARNNING could not read '+filename+' WARNING')
for annotation in annotations:
object = annotation['object']
bb_xywh = annotation['bbox_xywh']
if visual_output:
imutils.bb_with_text(img_arr,bb_xywh,object)
if not object in object_counts:
object_counts[object] = 1
else:
object_counts[object] = object_counts[object] + 1
if visual_output:
cv2.imshow('out',img_arr)
cv2.waitKey(0)
if movie:
out.write(img_arr)
print('n annotated files {}'.format(len(annotation_list)))
print('bb counts by category {}'.format(object_counts))
if visual_output:
cv2.destroyAllWindows()
if movie:
out.release()
def augment_yolo_bbs(yolo_annotation_dir='/media/jeremy/9FBD-1B00/data/jeremy/image_dbs/hls/object-detection-crowdailabels',
img_dir='/media/jeremy/9FBD-1B00/data/jeremy/image_dbs/hls/object-detection-crowdai',
annotation_filter='.txt',image_filter='.jpg'):
# bbs,img_arr = inspect_yolo_annotation(lblname,img_filename)
#https://www.youtube.com/watch?v=c-vhrv-1Ctg jinjer
annotation_dir = yolo_annotation_dir
annotation_files = [os.path.join(annotation_dir,f) for f in os.listdir(annotation_dir) if annotation_filter in f]
classes = constants.hls_yolo_categories
print('augmenting yolo annotations in '+annotation_dir)
for f in annotation_files:
print('trying '+f)
annotation_base = os.path.basename(f)
imgfile = annotation_base.replace(annotation_filter,image_filter)
img_path = os.path.join(img_dir,imgfile)
img_arr = cv2.imread(img_path)
if img_arr is None:
print('coulndt get '+img_path)
continue
h,w = img_arr.shape[0:2]
bb_list_xywh=[]
with open(f,'r') as fp:
lines = fp.readlines()
for line in lines:
if line.strip() == '':
print('empty line')
continue
print('got line:'+line)
if line.strip()[0]=='#':
print('commented line')
continue
object_class,bb0,bb1,bb2,bb3 = line.split()
bb_xywh = imutils.yolo_to_xywh([float(bb0),float(bb1),float(bb2),float(bb3)],(h,w))
bb_list_xywh.append(bb_xywh)
classname = classes[int(object_class)]
print('class {} bb_xywh {}'.format(classname,bb_xywh))
cv2.rectangle(img_arr,(bb_xywh[0],bb_xywh[1]),(bb_xywh[0]+bb_xywh[2],bb_xywh[1]+bb_xywh[3]),color=[100,255,100],thickness=2)
img_arr[bb_xywh[1]:bb_xywh[1]+20,bb_xywh[0]:bb_xywh[0]+bb_xywh[2]]=img_arr[bb_xywh[1]:bb_xywh[1]+20,bb_xywh[0]:bb_xywh[0]+bb_xywh[2]]/2+[100,50,100]
cv2.putText(img_arr,classname,(bb_xywh[0]+5,bb_xywh[1]+20),cv2.FONT_HERSHEY_PLAIN, 1, [255,0,255])
cv2.imshow('img',img_arr)
fp.close()
print('any key to not continue '+str(f))
k=cv2.waitKey(0)
#test augs
img_arr,bb_list = augment_images.transform_image_and_bbs(img_arr,bb_list_xywh)
show_annotations_xywh(bb_list,img_arr)
if __name__ == "__main__":
dir = '/data/jeremy/image_dbs/hls/insecam/07.05.2015_cameras_01-73'
trainfile=vietnam_dir_to_yolo(dir)
inspect_yolo_trainingfile(trainfile)
# dir_of_catalog_images_to_pixlevel(manual_oversight=False)
# dir_of_catalog_images_to_pixlevel(catalog_images_dir='/media/jeremy/9FBD-1B00/data/jeremy/image_dbs/mongo/amazon_us_female/dress',
# swatch_bgnds_dir='/media/jeremy/9FBD-1B00/data/jeremy/image_dbs/tg/backgrounds/textures/kept',
# person_bgnds_dir='/media/jeremy/9FBD-1B00/data/jeremy/image_dbs/tg/backgrounds/street_scenes/kept',
# destination_img_dir = '/media/jeremy/9FBD-1B00/data/jeremy/image_dbs/mongo/amazon_us_female/dress_images',
# destination_label_dir = '/media/jeremy/9FBD-1B00/data/jeremy/image_dbs/mongo/amazon_us_female/dress_labels',
# manual_oversight=False)
#mapillary_people_only(visual_output=True)
# kitti_to_tgdict()
#
# augment_yolo_bbs()
# inspect_yolo_annotation('/home/jeremy/projects/core/images/female1_yololabels.txt',
# '/home/jeremy/projects/core/images/female1.jpg')
#
#
# # inspect_yolo_annotations(dir='/media/jeremy/9FBD-1B00/data/jeremy/image_dbs/hls/VOCdevkit/VOC2005_1',
# # yolo_annotation_folder='labels',img_folder='images',manual_verification=False)
# #
# inspect_yolo_annotations(dir='/media/jeremy/9FBD-1B00/data/jeremy/image_dbs/hls/VOCdevkit/',
# yolo_annotation_folder='annotations_2007-2012',img_folder='images_2007-2012',manual_verification=False)
# read_and_convert_deepfashion_bbfile(multiprocess_it=False,visual_output=True)
# bbfile = '/data/olympics/olympics_augmentedlabels/10031828_augmented.txt'
# imgfile = '/data/olympics/olympics_augmented/10031828_augmented.jpg'
# d = yolo_to_tgdict(bbfile,img_file=None,visual_output=True)
# print('tgdict: '+str(d))
# apidict = tgdict_to_api_dict(d)
# print('apidict:'+str(apidict))
# inspect_yolo_annotations(dir='/data/jeremy/image_dbs/hls/kyle/',yolo_annotation_folder='/data/jeremy/image_dbs/hls/kyle/person_wearing_hatlabels/',img_folder='/data/jeremy/image_dbs/hls/kyle/person_wearing_hat',manual_verification=True)
# txt_to_tgdict()
```
#### File: ml-support-code-in-python/nn_utils/sysutils.py
```python
from __future__ import print_function
import multiprocessing
__author__ = 'liorsabag and jeremyrutman'
import csv
import gzip
import json
import requests
import logging
import os
import numpy as np
import math
import cv2
import re
import string
import sys
import hashlib
logging.basicConfig(level=logging.WARNING)
USER_AGENT = 'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/51.0.2704.103 Safari/537.36'
#db = constants.db
def format_filename(s):
"""Take a string and return a valid filename constructed from the string.
Uses a whitelist approach: any characters not present in valid_chars are
removed. Also spaces are replaced with underscores.
Note: this method may produce invalid filenames such as ``, `.` or `..`
When I use this method I prepend a date string like '2009_01_15_19_46_32_'
and append a file extension like '.txt', so I avoid the potential of using
an invalid filename.
"""
valid_chars = "-_.() %s%s" % (string.ascii_letters, string.digits)
filename = ''.join(c for c in s if c in valid_chars)
filename = filename.replace(' ', '_') # I don't like spaces in filenames.
return filename
class GZipCSVReader:
def __init__(self, filename):
self.gzfile = gzip.open(filename)
self.reader = csv.DictReader(self.gzfile)
def next(self):
return self.reader.next()
def close(self):
self.gzfile.close()
def __iter__(self):
return self.reader.__iter__()
class npAwareJSONEncoder(json.JSONEncoder):
def default(self, obj):
if isinstance(obj, np.ndarray) and obj.ndim == 1:
return obj.tolist()
return json.JSONEncoder.default(self, obj)
class ThreadSafeCounter(object):
def __init__(self):
self.val = multiprocessing.Value('i', 0)
def increment(self, n=1):
with self.val.get_lock():
self.val.value += n
@property
def value(self):
return self.val.value
###########################
### OS stuff_
###########################
def safely_close(fh):
fh.flush()
os.fsync(fh.fileno()) # this and f.flush were needed since after file close, file wasn't immediately available.
fh.close()
def ensure_dir(f):
'''
:param f: file or directory name
:return: no return val, creates dir if it doesnt exist
'''
if f is None:
logging.warning('cannot create directory for None!')
return
logging.debug('f:' + f)
# d = os.path.dirname(f)
if not os.path.exists(f):
# print('d:'+str(d))
os.makedirs(f)
def ensure_file(fname):
if not os.path.exists(fname):
fhandle = open(fname, 'a')
try:
os.utime(fname, None)
finally:
fhandle.close()
def immediate_subdirs(dir):
'''
returns paths of immediate subdirectories of dir (not recursive)
'''
return filter(os.path.isdir, [os.path.join(dir, f) for f in os.listdir(dir)])
def files_in_directory(dir):
'''
returns paths of files in directory (not recursive)
'''
try:
list = filter(os.path.isfile, [os.path.join(dir, f) for f in os.listdir(dir)])
return list
except OSError:
logging.error('no such directory ' + dir)
def purge(dir, pattern):
for f in os.listdir(dir):
if re.search(pattern, f):
os.remove(os.path.join(dir, f))
def depth_of_subdir_of_calling_function():
'''
this finds the depth of subdirectory in which the caller resides
:return:
'''
path = os.getcwd()
# print('path:'+str(path))
p2 = path.split('trendi_guru_modules')
# print('path split on trendigurumodules:'+str(p2))
if len(p2) < 2:
print('not in trendi_guru_modules')
secondhalf = p2[1]
# print('secondhalf:'+str(secondhalf))
cur = secondhalf.split('/')
# print('cur:'+str(cur))
if len(cur) > 1:
in_subdir_of_trendi_guru_modules = True
return len(cur) - 1
def get_images_list(dir_url):
paths_list = files_in_directory(dir_url)
images_list = []
# i=0
for url in paths_list:
images_list.append(get_cv2_img_array(url))
# cv2.imshow('1', images_list[i])
# cv2.waitKey(0)
# i += 1
return images_list
def show_parse(filename=None, img_array=None):
if filename is not None:
img_array = cv2.imread(filename)
if img_array is not None:
# minVal, maxVal, minLoc, maxLoc = cv2.minMaxLoc(img_array)
maxVal = np.amax(img_array)
scaled = np.multiply(img_array, int(255 / maxVal))
dest = cv2.applyColorMap(scaled, cv2.COLORMAP_RAINBOW)
return dest
cv2.imshow("dest", dest)
cv2.waitKey(0)
logging.warning('got None as image array from:'+filename)
def shorten_url_googl(long_url):
url = "https://www.googleapis.com/urlshortener/v1/url"
querystring = {"key": "<KEY>"}
payload = json.dumps({"longUrl": long_url})
headers = {'content-type': 'application/json'}
response = requests.request("POST", url, data=payload, headers=headers, params=querystring)
return response.json().get("id") or long_url
def shorten_url_bitly(long_url):
url = "https://api-ssl.bitly.com/v3/shorten"
querystring = {"access_token": "1b131dcc7af91f1fa7f481ab7c20da0f658acff9",
"longUrl": long_url,
"format": "txt"}
response = requests.request("GET", url, params=querystring)
return response.text.rstrip()
def get_files_from_dir_and_subdirs(path=None):
'''
this returns the full paths of every file in and under the given dir
:param path:
:return: list of full paths of files (not dirs)
'''
if path is None:
path = os.getcwd()
# print('basepath:' + path)
# raw_input('enter to continue')
file_list = []
done_paths = []
for paths, dirs, files in os.walk(path):
if paths not in done_paths:
count = paths.count('/')
if files:
for ele1 in files:
# raw_input('enter to continue')
# print('---------' * (count), ele1)
full_name = os.path.join(path, ele1)
# print('filename:' + str(full_name))
file_list.append(full_name)
print('{0} files found in dir {1}'.format(len(files),path))
if dirs:
for ele2 in dirs:
# print('dir:'+str( ele2))
abs_path = os.path.join(paths, ele2)
# recursively calling the direct function on each directory
more_files = get_files_from_dir_and_subdirs(path=abs_path)
# adding the paths to the list that got traversed
done_paths.append(abs_path)
for file_n in more_files:
file_list.append(file_n)
return(file_list)
def remove_duplicate_files(dir):
'''
remove dupe files from dir - warning this deletes files
:param dir:
:return: number of dupes removed
'''
files = [f for f in os.listdir(dir) if os.path.isfile(os.path.join(dir, f))]
print('n files:'+str(len(files)))
hashes = []
dupe_count = 0
for a_file in files:
fullname = os.path.join(dir,a_file)
# img_arr = cv2.imread(fullname)
with open(fullname,'r') as f:
logging.debug('current file:'+fullname)
contents = f.read()
if contents is not None:
m = hashlib.md5()
m.update(contents)
current_hash = m.hexdigest()
logging.debug('image hash:' + current_hash + ' for ' + a_file)
dupe_flag = False
for a_previous_hash in hashes:
if current_hash == a_previous_hash:
fullpath = os.path.join(dir,a_file)
print('going to remove '+str(fullpath))
os.remove(fullpath)
dupe_flag = True
dupe_count = dupe_count + 1
break
if not dupe_flag:
hashes.append(current_hash)
print(fullname+' not a dupe')
print('found {} dupes'.format(dupe_count))
# testing git pull on pp2
def git_pull(**kwargs):
import subprocess
path = os.path.abspath(__file__)
module_directory = os.path.dirname(path)
print("Git_pull pulling to: " + module_directory)
try:
result = subprocess.check_output('git -C {dir} pull'.format(dir=module_directory), shell=True)
except subprocess.CalledProcessError, e:
# logging.warning("git_pull failed with exception: {0}\ngit output:{1}".format(e)) #needs the second field
logging.warning("git_pull failed with exception: {0}".format(e))
return
def map_function_on_dir(func,dirname,**arglist):
'''
takes a function that has a filename as first arg and maps it onto files in dirname
:param func: function to map
:param dirname: dir of files to do function on
:param arglist: args to func
:return:
'''
logging.debug('applying function {} to files in directory {} with arguments {}'.format(func,dirname,str(arglist)))
only_files = [f for f in os.listdir(dirname) if os.path.isfile(os.path.join(dirname, f))]
for a_file in only_files:
fullpath = os.path.join(dirname,a_file)
func(fullpath,arglist)
def map_function_on_dir_of_dirs(func,dir_of_dirs,**arglist):
'''
takes a function that has a filename as first arg and maps it onto files in directory of directories
:param func: function to map
:param dir_of_dirs: dir of dirs to do function on
:param arglist: args to func
:return:
'''
logging.debug('applying function {} to files in directories under directory {} with arguments {}'.format(func,dir_of_dirs,str(arglist)))
only_dirs = [dir for dir in os.listdir(dir_of_dirs) if os.path.isdir(os.path.join(dir_of_dirs,dir))]
for a_dir in only_dirs:
fullpath = os.path.join(dir_of_dirs,a_dir)
map_function_on_dir(func,fullpath,**arglist)
def parent_dir(path):
'''
returns parent of file or dir pointed to by path
:param path:
:return: parent
'''
return os.path.abspath(os.path.join(path, os.pardir))
############################
### math stuff
############################
def precision(true_pos=0, false_pos=0):
if true_pos + false_pos:
precision = float(true_pos) / (true_pos + false_pos)
else:
precision = 0
return precision
def recall(true_pos=0, false_neg=0):
if true_pos + false_neg:
recall = float(true_pos) / (true_pos + false_neg)
else:
recall = 0
return recall
def error_of_fraction(numerator, numerator_stdev, denominator, denominator_stdev):
"""
this gives the error on fraction numerator/denominator assuming no covariance
:param numerator:
:param numerator_stdev:
:param denominator:
:param denominator_stdev:
:return:
"""
n = float(numerator)
d = float(denominator)
n_e = float(numerator_stdev)
d_e = float(denominator_stdev)
if n == 0 or d == 0:
print('caught div by zero in error_of_fraction, n=' + str(n) + ' d=' + str(d))
return (-1.0)
fraction_error = abs(n / d) * math.sqrt((n_e / n) ** 2 + (d_e / d) ** 2)
return fraction_error
def lines_in_file(filename):
line_count = 0
try:
with open(filename, 'r') as fp:
for line in fp:
line_count = line_count + 1
except EnvironmentError: # parent of IOError, OSError *and* WindowsError where available
logging.error('oops. an environment error. take cover!!! ' + str(sys.exc_info()[0]))
return line_count
return line_count
def isnumber(str):
num_format = re.compile("^[1-9][0-9]*\.?[0-9]*")
isnumber = re.match(num_format, str)
if isnumber:
return True
else:
return False
def kick_fp_out():
fp = 'people.0.items.0.similar_results.0.fingerprint'
idx = 0
for doc in db.images.find({fp: {'$exists': 1}}):
print("started")
idx += 1
for person in doc['people']:
for item in person['items']:
for result in item['similar_results']:
if 'fingerprint' in result.keys():
result.pop('fingerprint')
db.images.replace_one({'_id': doc['_id']}, doc)
print("did {0} docs".format(idx))
print("{0} docs modified".format(idx))
def data_url_to_cv2_img(url):
nparr = np.fromstring(url.split(',')[1].decode('base64'), np.uint8)
img = cv2.imdecode(nparr, cv2.IMREAD_COLOR)
return img
def flatten_list(l):
flatlist = []
for sublist in l:
if isinstance(sublist,list):
flattened = flatten_list(sublist)
# print('flattened sublist:'+str(flattened))
flatlist = flatlist + flattened
else:
flatlist.append(sublist)
return flatlist
def give_me_a_list_of_synonyms(category,synonym_lists=None):
'''
this takes a synonymlist of list and returns the relevant list if any
synonym list is
# synonymous_cats =[ ['womens_swimwear_nonbikini','women\'s swimwear'],
# ['leggings','stockings'] ... ]
:param category:
:return:
'''
for l in synonym_lists:
if category in l:
return l
return [category] #no list available so return just the category itself
if __name__ == '__main__':
print('starting')
# show_all_bbs_in_db()
# fix_all_bbs_in_db()
# step_thru_db(use_visual_output=True)
# http://glamradar.com/wp-content/uploads/2013/01/Chiffon-Maxi-Skirts-celebs-500x500.jpg
show_parse(
'/home/jeremy/<EMAIL>/TrendiGuru/techdev/trendi_guru_modules/paperdoll/glamradar.com_wp-content_uploads_2013_01_Chiffon-Maxi-Skirts-celebs-500x500.png')
```
|
{
"source": "jeremy-rutman/prjxray",
"score": 2
}
|
#### File: 005-tilegrid/iob/top.py
```python
import os
import random
random.seed(int(os.getenv("SEED"), 16))
from prjxray import util
from prjxray import verilog
def gen_iobs():
'''
IOB33S: main IOB of a diff pair
IOB33M: secondary IOB of a diff pair
IOB33: not a diff pair. Relatively rare (at least in ROI...2 of them?)
Focus on IOB33S to start
'''
for _tile_name, site_name, site_type in util.get_roi().gen_sites(
# ['IOB33', 'IOB33S']):
# FIXME: special cases on IOB33
['IOB33S']):
yield site_name, site_type
def write_params(ports):
pinstr = ''
for site, (name, dir_, cell) in sorted(ports.items(), key=lambda x: x[1]):
# pinstr += 'set_property -dict "PACKAGE_PIN %s IOSTANDARD LVCMOS33" [get_ports %s]' % (packpin, port)
pinstr += '%s,%s,%s,%s\n' % (site, name, dir_, cell)
open('params.csv', 'w').write(pinstr)
def run():
# All possible values
iosites = {}
for site_name, site_type in gen_iobs():
iosites[site_name] = site_type
# Assigned in this design
ports = {}
DIN_N = 0
DOUT_N = 0
def remain_sites():
return set(iosites.keys()) - set(ports.keys())
def rand_site():
'''Get a random, unused site'''
return random.choice(list(remain_sites()))
def assign_i(site, name):
nonlocal DIN_N
assert site not in ports
cell = "di_bufs[%u].ibuf" % DIN_N
DIN_N += 1
ports[site] = (name, 'input', cell)
def assign_o(site, name):
nonlocal DOUT_N
assert site not in ports
cell = "do_bufs[%u].obuf" % DOUT_N
DOUT_N += 1
ports[site] = (name, 'output', cell)
# Assign at least one di and one do
assign_i(rand_site(), 'di[0]')
assign_o(rand_site(), 'do[0]')
# Now assign the rest randomly
while len(remain_sites()):
if random.randint(0, 1):
assign_i(rand_site(), 'di[%u]' % DIN_N)
else:
assign_o(rand_site(), 'do[%u]' % DOUT_N)
write_params(ports)
print(
'''
`define N_DI %u
`define N_DO %u
module top(input wire [`N_DI-1:0] di, output wire [`N_DO-1:0] do);
genvar i;
//Instantiate BUFs so we can LOC them
wire [`N_DI-1:0] di_buf;
generate
for (i = 0; i < `N_DI; i = i+1) begin:di_bufs
IBUF ibuf(.I(di[i]), .O(di_buf[i]));
end
endgenerate
wire [`N_DO-1:0] do_unbuf;
generate
for (i = 0; i < `N_DO; i = i+1) begin:do_bufs
OBUF obuf(.I(do_unbuf[i]), .O(do[i]));
end
endgenerate
roi roi(.di(di_buf), .do(do_unbuf));
endmodule
//Arbitrary terminate into LUTs
module roi(input wire [`N_DI-1:0] di, output wire [`N_DO-1:0] do);
genvar i;
generate
for (i = 0; i < `N_DI; i = i+1) begin:dis
(* KEEP, DONT_TOUCH *)
LUT6 #(
.INIT(64'h8000_0000_0000_0001)
) lut (
.I0(di[i]),
.I1(di[i]),
.I2(di[i]),
.I3(di[i]),
.I4(di[i]),
.I5(di[i]),
.O());
end
endgenerate
generate
for (i = 0; i < `N_DO; i = i+1) begin:dos
(* KEEP, DONT_TOUCH *)
LUT6 #(
.INIT(64'h8000_0000_0000_0001)
) lut (
.I0(),
.I1(),
.I2(),
.I3(),
.I4(),
.I5(),
.O(do[i]));
end
endgenerate
endmodule
''' % (DIN_N, DOUT_N))
if __name__ == '__main__':
run()
```
#### File: minitest/test_zero/process.py
```python
import sys
import os
import time
import json
def run_types(tilej, verbose=False):
def process(etype):
# dict[model] = set((tile, wire/pip))
zeros = {}
print('Processing %s' % etype)
# Index delay models by type, recording where they occured
for tilek, tilev in tilej['tiles'].items():
for ename, emodel in tilev[etype].items():
if emodel.find('ZERO') >= 0:
zeros.setdefault(emodel, set()).add((tilek, ename))
# Print out delay model instances
print('%s ZERO types: %u, %s' % (etype, len(zeros), zeros.keys()))
print(
'%s ZERO instances: %u' %
(etype, sum([len(x) for x in zeros.values()])))
for model in sorted(zeros.keys()):
modelv = zeros[model]
print('Model: %s' % model)
for tile_name, element_name in sorted(list(modelv)):
print(' %s: %s' % (tile_name, element_name))
process('wires')
print('')
process('pips')
def run_prefix(tilej, verbose=False):
def process(etype):
prefixes = set()
print('Processing %s' % etype)
# Index delay models by type, recording where they occured
for tilek, tilev in tilej['tiles'].items():
for ename, emodel in tilev[etype].items():
prefix = emodel.split('_')[0]
prefixes.add(prefix)
print('%s prefixes: %u' % (etype, len(prefixes)))
for prefix in sorted(prefixes):
print(' %s' % prefix)
process('wires')
print('')
process('pips')
def run(fnin, verbose=False):
tilej = json.load((open(fnin, 'r')))
run_types(tilej)
print('')
print('')
run_prefix(tilej)
def main():
import argparse
parser = argparse.ArgumentParser(description='Solve timing solution')
parser.add_argument(
'fnin',
default="../timgrid/build/timgrid-s.json",
nargs='?',
help='input timgrid JSON')
args = parser.parse_args()
run(args.fnin, verbose=False)
if __name__ == '__main__':
main()
```
#### File: fuzzers/007-timing/rref.py
```python
from timfuz import Benchmark, Ar_di2np, loadc_Ads_b, index_names, A_ds2np, simplify_rows, OrderedSet
import numpy as np
import glob
import math
import json
import sympy
from collections import OrderedDict
from fractions import Fraction
def rm_zero_cols(Ads, verbose=True):
removed = OrderedSet()
print('Removing ZERO elements')
for row_ds in Ads:
for k in set(row_ds.keys()):
if k in removed:
del row_ds[k]
elif k.find('ZERO') >= 0:
del row_ds[k]
removed.add(k)
if verbose:
print(' Removing %s' % k)
return removed
def fracr_quick(r):
return [Fraction(numerator=int(x), denominator=1) for x in r]
def fracm_quick(m):
'''Convert integer matrix to Fraction matrix'''
t = type(m[0][0])
print('fracm_quick type: %s' % t)
return [fracr_quick(r) for r in m]
class State(object):
def __init__(self, Ads, zero_names=[]):
self.Ads = Ads
self.names = index_names(self.Ads)
# known zero delay elements
self.zero_names = OrderedSet(zero_names)
# active names in rows
# includes sub variables, excludes variables that have been substituted out
self.base_names = OrderedSet(self.names)
#self.names = OrderedSet(self.base_names)
self.names = set(self.base_names)
# List of variable substitutions
# k => dict of v:n entries that it came from
self.subs = OrderedDict()
self.verbose = True
def print_stats(self):
print("Stats")
print(" Substitutions: %u" % len(self.subs))
if self.subs:
print(
" Largest: %u" % max([len(x) for x in self.subs.values()]))
print(" Rows: %u" % len(self.Ads))
print(
" Cols (in): %u" % (len(self.base_names) + len(self.zero_names)))
print(" Cols (preprocessed): %u" % len(self.base_names))
print(" ZERO names: %u" % len(self.zero_names))
print(" Cols (out): %u" % len(self.names))
print(" Solvable vars: %u" % len(self.names & self.base_names))
assert len(self.names) >= len(self.subs)
@staticmethod
def load(fn_ins, simplify=False, corner=None, rm_zero=False):
zero_names = OrderedSet()
Ads, b = loadc_Ads_b(fn_ins, corner=corner)
if rm_zero:
zero_names = rm_zero_cols(Ads)
if simplify:
print('Simplifying corner %s' % (corner, ))
Ads, b = simplify_rows(Ads, b, remove_zd=False, corner=corner)
return State(Ads, zero_names=zero_names)
def write_state(state, fout):
j = {
'names':
OrderedDict([(x, None) for x in state.names]),
'zero_names':
sorted(list(state.zero_names)),
'base_names':
sorted(list(state.base_names)),
'subs':
OrderedDict([(name, values) for name, values in state.subs.items()]),
'pivots':
state.pivots,
}
json.dump(j, fout, sort_keys=True, indent=4, separators=(',', ': '))
def row_np2ds(rownp, names):
ret = OrderedDict()
assert len(rownp) == len(names), (len(rownp), len(names))
for namei, name in enumerate(names):
v = rownp[namei]
if v:
ret[name] = v
return ret
def row_sym2dsf(rowsym, names):
'''Convert a sympy row into a dictionary of keys to (numerator, denominator) tuples'''
from sympy import fraction
ret = OrderedDict()
assert len(rowsym) == len(names), (len(rowsym), len(names))
for namei, name in enumerate(names):
v = rowsym[namei]
if v:
(num, den) = fraction(v)
ret[name] = (int(num), int(den))
return ret
def state_rref(state, verbose=False):
print('Converting rows to integer keys')
names, Anp = A_ds2np(state.Ads)
print('np: %u rows x %u cols' % (len(Anp), len(Anp[0])))
mnp = Anp
print('Matrix: %u rows x %u cols' % (len(mnp), len(mnp[0])))
print('Converting np to sympy matrix')
mfrac = fracm_quick(mnp)
# doesn't seem to change anything
#msym = sympy.MutableSparseMatrix(mfrac)
msym = sympy.Matrix(mfrac)
# internal encoding has significnat performance implications
#assert type(msym[0]) is sympy.Integer
if verbose:
print('names')
print(names)
print('Matrix')
sympy.pprint(msym)
print('Making rref')
rref, pivots = msym.rref(normalize_last=False)
if verbose:
print('Pivots')
sympy.pprint(pivots)
print('rref')
sympy.pprint(rref)
state.pivots = OrderedDict()
def row_solved(rowsym, row_pivot):
for ci, c in enumerate(rowsym):
if ci == row_pivot:
continue
if c != 0:
return False
return True
#rrefnp = np.array(rref).astype(np.float64)
#print('Computing groups w/ rref %u row x %u col' % (len(rrefnp), len(rrefnp[0])))
#print(rrefnp)
# rows that have a single 1 are okay
# anything else requires substitution (unless all 0)
# pivots may be fewer than the rows
# remaining rows should be 0s
for row_i, row_pivot in enumerate(pivots):
rowsym = rref.row(row_i)
# yipee! nothign to report
if row_solved(rowsym, row_pivot):
continue
# a grouping
group_name = "GRP_%u" % row_i
rowdsf = row_sym2dsf(rowsym, names)
state.subs[group_name] = rowdsf
# Add the new variables
state.names.add(group_name)
# Remove substituted variables
# Note: variables may appear multiple times
state.names.difference_update(OrderedSet(rowdsf.keys()))
pivot_name = names[row_pivot]
state.pivots[group_name] = pivot_name
if verbose:
print("%s (%s): %s" % (group_name, pivot_name, rowdsf))
return state
def run(fnout, fn_ins, simplify=False, corner=None, rm_zero=False, verbose=0):
print('Loading data')
assert len(fn_ins) > 0
state = State.load(
fn_ins, simplify=simplify, corner=corner, rm_zero=rm_zero)
state_rref(state, verbose=verbose)
state.print_stats()
if fnout:
with open(fnout, 'w') as fout:
write_state(state, fout)
def main():
import argparse
parser = argparse.ArgumentParser(
description=
'Compute reduced row echelon (RREF) to form sub.json (variable groups)'
)
parser.add_argument('--verbose', action='store_true', help='')
parser.add_argument('--simplify', action='store_true', help='')
parser.add_argument('--corner', default="slow_max", help='')
parser.add_argument(
'--rm-zero', action='store_true', help='Remove ZERO elements')
parser.add_argument(
'--speed-json',
default='build_speed/speed.json',
help='Provides speed index to name translation')
parser.add_argument('--out', help='Output sub.json substitution result')
parser.add_argument('fns_in', nargs='*', help='timing4i.csv input files')
args = parser.parse_args()
bench = Benchmark()
fns_in = args.fns_in
if not fns_in:
fns_in = glob.glob('specimen_*/timing4i.csv')
try:
run(
fnout=args.out,
fn_ins=fns_in,
simplify=args.simplify,
corner=args.corner,
rm_zero=args.rm_zero,
verbose=args.verbose)
finally:
print('Exiting after %s' % bench)
if __name__ == '__main__':
main()
```
#### File: fuzzers/007-timing/solve_linprog.py
```python
import scipy.optimize as optimize
from timfuz import Benchmark, load_sub, A_ub_np2d, acorner2csv, corner_s2i
import numpy as np
import glob
import json
import math
import sys
import os
import time
import timfuz_solve
def run_corner(
Anp, b, names, corner, verbose=False, opts={}, meta={}, outfn=None):
if len(Anp) == 0:
print('WARNING: zero equations')
if outfn:
timfuz_solve.solve_save(outfn, [], [], corner)
return
maxcorner = {
'slow_max': True,
'slow_min': False,
'fast_max': True,
'fast_min': False,
}[corner]
# Given timing scores for above delays (-ps)
assert type(Anp[0]) is np.ndarray, type(Anp[0])
assert type(b) is np.ndarray, type(b)
#check_feasible(Anp, b)
'''
Be mindful of signs
t1, t2: total delay contants
d1, d2..: variables to solve for
Max corner intuitive form:
d1 + d2 + d4 >= t1
d2 + d3 >= t2
But need it in compliant form:
-d1 + -d2 + -d4 <= -t1
-d2 + -d3 <= -t2
Minimize delay elements
Min corner intuitive form:
d1 + d2 + d4 <= t1
d2 + d3 <= t2
Maximize delay elements
'''
rows = len(Anp)
cols = len(Anp[0])
if maxcorner:
print('maxcorner => scaling to solution form...')
b_ub = -1.0 * b
#A_ub = -1.0 * Anp
A_ub = [-1.0 * x for x in Anp]
else:
print('mincorner => no scaling required')
b_ub = b
A_ub = Anp
print('Creating misc constants...')
# Minimization function scalars
# Treat all logic elements as equally important
if maxcorner:
# Best result are min delays
c = [1 for _i in range(len(names))]
else:
# Best result are max delays
c = [-1 for _i in range(len(names))]
# Delays cannot be negative
# (this is also the default constraint)
#bounds = [(0, None) for _i in range(len(names))]
# Also you can provide one to apply to all
bounds = (0, None)
# Seems to take about rows + 3 iterations
# Give some margin
#maxiter = int(1.1 * rows + 100)
#maxiter = max(1000, int(1000 * rows + 1000))
# Most of the time I want it to just keep going unless I ^C it
maxiter = 1000000
if verbose >= 2:
print('b_ub', b)
print('Unique delay elements: %d' % len(names))
print(' # delay minimization weights: %d' % len(c))
print(' # delay constraints: %d' % len(bounds))
print('Input paths')
print(' # timing scores: %d' % len(b))
print(' Rows: %d' % rows)
tlast = [time.time()]
iters = [0]
printn = [0]
def callback(xk, **kwargs):
iters[0] = kwargs['nit']
if time.time() - tlast[0] > 1.0:
sys.stdout.write('I:%d ' % kwargs['nit'])
tlast[0] = time.time()
printn[0] += 1
if printn[0] % 10 == 0:
sys.stdout.write('\n')
sys.stdout.flush()
print('')
# Now find smallest values for delay constants
# Due to input bounds (ex: column limit), some delay elements may get eliminated entirely
# https://docs.scipy.org/doc/scipy-0.18.1/reference/generated/scipy.optimize.linprog.html
print('Running linprog w/ %d r, %d c (%d name)' % (rows, cols, len(names)))
res = optimize.linprog(
c,
A_ub=A_ub,
b_ub=b_ub,
bounds=bounds,
callback=callback,
options={
"disp": True,
'maxiter': maxiter,
'bland': True,
'tol': 1e-6,
})
nonzeros = 0
print('Ran %d iters' % iters[0])
if res.success:
print('Result sample (%d elements)' % (len(res.x)))
plim = 3
for xi, (name, x) in enumerate(zip(names, res.x)):
nonzero = x >= 0.001
if nonzero:
nonzeros += 1
#if nonzero and (verbose >= 1 or xi > 30):
if nonzero and (verbose or (
(nonzeros < 100 or nonzeros % 20 == 0) and nonzeros <= plim)):
print(' % 4u % -80s % 10.1f' % (xi, name, x))
print('Delay on %d / %d' % (nonzeros, len(res.x)))
if outfn:
timfuz_solve.solve_save(
outfn, res.x, names, corner, verbose=verbose)
def main():
import argparse
parser = argparse.ArgumentParser(
description=
'Solve timing solution using linear programming inequalities')
parser.add_argument('--verbose', action='store_true', help='')
parser.add_argument('--massage', action='store_true', help='')
parser.add_argument(
'--bounds-csv', help='Previous solve result starting point')
parser.add_argument(
'--sub-json', help='Group substitutions to make fully ranked')
parser.add_argument('--corner', required=True, default="slow_max", help='')
parser.add_argument(
'--out', default=None, help='output timing delay .json')
parser.add_argument('fns_in', nargs='+', help='timing4i.csv input files')
args = parser.parse_args()
# Store options in dict to ease passing through functions
bench = Benchmark()
fns_in = args.fns_in
if not fns_in:
fns_in = glob.glob('specimen_*/timing4i.csv')
sub_json = None
if args.sub_json:
sub_json = load_sub(args.sub_json)
try:
timfuz_solve.run(
run_corner=run_corner,
sub_json=sub_json,
bounds_csv=args.bounds_csv,
fns_in=fns_in,
corner=args.corner,
massage=args.massage,
outfn=args.out,
verbose=args.verbose)
finally:
print('Exiting after %s' % bench)
if __name__ == '__main__':
main()
```
#### File: fuzzers/007-timing/timfuz_solve.py
```python
from timfuz import simplify_rows, loadc_Ads_b, index_names, A_ds2np, run_sub_json, print_eqns, Ads2bounds, instances, SimplifiedToZero, allow_zero_eqns, corner_s2i, acorner2csv
from timfuz_massage import massage_equations
import numpy as np
import sys
import math
def check_feasible(A_ub, b_ub):
'''
Put large timing constants into the equations
See if that would solve it
Its having trouble giving me solutions as this gets bigger
Make a terrible baseline guess to confirm we aren't doing something bad
'''
sys.stdout.write('Check feasible ')
sys.stdout.flush()
rows = len(b_ub)
cols = len(A_ub[0])
progress = max(1, rows / 100)
'''
Delays should be in order of ns, so a 10 ns delay should be way above what anything should be
Series can have several hundred delay elements
Max delay in ballpark
'''
xs = [1e9 for _i in range(cols)]
# FIXME: use the correct np function to do this for me
# Verify bounds
#b_res = np.matmul(A_ub, xs)
#print(type(A_ub), type(xs)
#A_ub = np.array(A_ub)
#xs = np.array(xs)
#b_res = np.matmul(A_ub, xs)
def my_mul(A_ub, xs):
#print('cols', cols
#print('rows', rows
ret = [None] * rows
for row in range(rows):
this = 0
for col in range(cols):
this += A_ub[row][col] * xs[col]
ret[row] = this
return ret
b_res = my_mul(A_ub, xs)
# Verify bound was respected
for rowi, (this_b, this_b_ub) in enumerate(zip(b_res, b_ub)):
if rowi % progress == 0:
sys.stdout.write('.')
sys.stdout.flush()
if this_b >= this_b_ub or this_b > 0:
print(
'% 4d Want res % 10.1f <= % 10.1f <= 0' %
(rowi, this_b, this_b_ub))
raise Exception("Bad ")
print(' done')
def filter_bounds(Ads, b, bounds, corner):
'''
Given min variable delays, remove rows that won't constrain solution
Ex for max corner:
Given bounds:
a >= 10
b >= 1
c >= 0
Given equations:
a + b >= 10
a + c >= 100
The first equation is already satisfied
However, the second needs either an increase in a or an increase in c '''
if 'max' in corner:
# Keep delays possibly larger than current bound
def keep(row_b, est):
return row_b > est
T_UNK = 0
elif 'min' in corner:
# Keep delays possibly smaller than current bound
def keep(row_b, est):
return row_b < est
T_UNK = 1e9
else:
assert 0
ret_Ads = []
ret_b = []
unknowns = set()
for row_ds, row_b in zip(Ads, b):
# some variables get estimated at 0
def getvar(k):
#return bounds.get(k, T_UNK)
ret = bounds.get(k, None)
if ret is not None:
return ret
unknowns.add(k)
return T_UNK
est = sum([getvar(k) * v for k, v in row_ds.items()])
# will this row potentially constrain us more?
if keep(row_b, est):
ret_Ads.append(row_ds)
ret_b.append(row_b)
if len(unknowns):
print('WARNING: %u encountered undefined bounds' % len(unknowns))
return ret_Ads, ret_b
def solve_save(outfn, xvals, names, corner, save_zero=True, verbose=False):
# ballpark minimum actual observed delay is around 7 (carry chain)
# anything less than one is probably a solver artifact
delta = 0.5
corneri = corner_s2i[corner]
roundf = {
'fast_max': math.ceil,
'fast_min': math.floor,
'slow_max': math.ceil,
'slow_min': math.floor,
}[corner]
print('Writing results')
zeros = 0
with open(outfn, 'w') as fout:
# write as one variable per line
# this natively forms a bound if fed into linprog solver
fout.write('ico,fast_max fast_min slow_max slow_min,rows...\n')
for xval, name in zip(xvals, names):
row_ico = 1
if xval < delta:
if verbose:
print('WARNING: near 0 delay on %s: %0.6f' % (name, xval))
zeros += 1
if not save_zero:
continue
items = [str(row_ico), acorner2csv(roundf(xval), corneri)]
items.append('%u %s' % (1, name))
fout.write(','.join(items) + '\n')
nonzeros = len(names) - zeros
print(
'Wrote: %u / %u constrained delays, %u zeros' %
(nonzeros, len(names), zeros))
# max only...min corner seems to like 0
# see https://github.com/SymbiFlow/prjxray/issues/136
if 'max' in corner:
assert nonzeros, 'Failed to estimate delay'
def run(
fns_in,
corner,
run_corner,
sub_json=None,
bounds_csv=None,
dedup=True,
massage=False,
outfn=None,
verbose=False,
**kwargs):
print('Loading data')
Ads, b = loadc_Ads_b(fns_in, corner)
# Remove duplicate rows
# is this necessary?
# maybe better to just add them into the matrix directly
if dedup:
oldn = len(Ads)
iold = instances(Ads)
Ads, b = simplify_rows(Ads, b, corner=corner)
print('Simplify %u => %u rows' % (oldn, len(Ads)))
print('Simplify %u => %u instances' % (iold, instances(Ads)))
if sub_json:
print('Sub: %u rows' % len(Ads))
iold = instances(Ads)
names_old = index_names(Ads)
run_sub_json(Ads, sub_json, verbose=verbose)
names = index_names(Ads)
print("Sub: %u => %u names" % (len(names_old), len(names)))
print('Sub: %u => %u instances' % (iold, instances(Ads)))
else:
names = index_names(Ads)
'''
Substitution .csv
Special .csv containing one variable per line
Used primarily for multiple optimization passes, such as different algorithms or additional constraints
'''
if bounds_csv:
Ads2, b2 = loadc_Ads_b([bounds_csv], corner)
bounds = Ads2bounds(Ads2, b2)
assert len(bounds), 'Failed to load bounds'
rows_old = len(Ads)
Ads, b = filter_bounds(Ads, b, bounds, corner)
print(
'Filter bounds: %s => %s + %s rows' %
(rows_old, len(Ads), len(Ads2)))
Ads = Ads + Ads2
b = b + b2
assert len(Ads) or allow_zero_eqns()
assert len(Ads) == len(b), 'Ads, b length mismatch'
if verbose:
print
print_eqns(Ads, b, verbose=verbose)
#print
#col_dist(A_ubd, 'final', names)
if massage:
try:
Ads, b = massage_equations(Ads, b, corner=corner)
except SimplifiedToZero:
if not allow_zero_eqns():
raise
print('WARNING: simplified to zero equations')
Ads = []
b = []
print('Converting to numpy...')
names, Anp = A_ds2np(Ads)
run_corner(
Anp,
np.asarray(b),
names,
corner,
outfn=outfn,
verbose=verbose,
**kwargs)
```
#### File: 007-timing/timgrid/tile_txt2json.py
```python
import sys
import os
import time
import json
SI_NONE = 0xFFFF
def load_speed_json(f):
j = json.load(f)
# Index speed indexes to names
speed_i2s = {}
for k, v in j['speed_model'].items():
i = v['speed_index']
if i != SI_NONE:
speed_i2s[i] = k
return j, speed_i2s
def gen_tiles(fnin, speed_i2s):
for l in open(fnin):
# lappend items pip $name $speed_index
# puts $fp "$type $tile $grid_x $grid_y $items"
parts = l.strip().split()
tile_type, tile_name, grid_x, grid_y = parts[0:4]
grid_x, grid_y = int(grid_x), int(grid_y)
tuples = parts[4:]
assert len(tuples) % 3 == 0
pips = {}
wires = {}
for i in range(0, len(tuples), 3):
ttype, name, speed_index = tuples[i:i + 3]
name_local = name.split('/')[1]
{
'pip': pips,
'wire': wires,
}[ttype][name_local] = speed_i2s[int(speed_index)]
yield (tile_type, tile_name, grid_x, grid_y, pips, wires)
def run(fnin, fnout, speed_json_fn, verbose=False):
speedj, speed_i2s = load_speed_json(open(speed_json_fn, 'r'))
tiles = {}
for tile in gen_tiles(fnin, speed_i2s):
(tile_type, tile_name, grid_x, grid_y, pips, wires) = tile
this_dat = {'pips': pips, 'wires': wires}
if tile_type not in tiles:
tiles[tile_type] = this_dat
else:
if tiles[tile_type] != this_dat:
print(tile_name, tile_type)
print(this_dat)
print(tiles[tile_type])
assert 0
j = {'tiles': tiles}
json.dump(
j, open(fnout, 'w'), sort_keys=True, indent=4, separators=(',', ': '))
def main():
import argparse
parser = argparse.ArgumentParser(description='Solve timing solution')
parser.add_argument(
'--speed-json',
default='../../speed/build/speed.json',
help='Provides speed index to name translation')
parser.add_argument('fnin', default=None, help='input tcl output .txt')
parser.add_argument('fnout', default=None, help='output .json')
args = parser.parse_args()
run(args.fnin, args.fnout, speed_json_fn=args.speed_json, verbose=False)
if __name__ == '__main__':
main()
```
#### File: fuzzers/007-timing/timing_txt2json.py
```python
from timfuz import Benchmark, A_di2ds
import glob
import math
import json
import sys
from collections import OrderedDict
# Speed index: some sort of special value
SI_NONE = 0xFFFF
def parse_pip(s, speed_i2s):
# Entries like
# CLK_BUFG_REBUF_X60Y117/CLK_BUFG_REBUF.CLK_BUFG_REBUF_R_CK_GCLK0_BOT<<->>CLK_BUFG_REBUF_R_CK_GCLK0_TOP
# Convert to (site, type, pip_junction, pip)
pipstr, speed_index = s.split(':')
speed_index = int(speed_index)
site, instance = pipstr.split('/')
#type, pip_junction, pip = others.split('.')
#return (site, type, pip_junction, pip)
return site, instance, speed_i2s[int(speed_index)]
def parse_pips(pips, speed_i2s):
if not pips:
return []
return [parse_pip(pip, speed_i2s) for pip in pips.split('|')]
def parse_wire(s, speed_i2s):
# CLBLM_R_X3Y80/CLBLM_M_D6:952
wirestr, speed_index = s.split(':')
site, instance = wirestr.split('/')
return site, instance, speed_i2s[int(speed_index)]
def parse_wires(wires, speed_i2s):
if not wires:
return []
return [parse_wire(wire, speed_i2s) for wire in wires.split('|')]
def gen_timing4(fn, speed_i2s):
f = open(fn, 'r')
header_want = "linetype,net,src_site,src_site_type,src_site_pin,src_bel,src_bel_pin,dst_site,dst_site_type,dst_site_pin,dst_bel,dst_bel_pin,ico,fast_max,fast_min,slow_max,slow_min,pips,wires"
ncols = len(header_want.split(','))
# src_bel dst_bel ico fast_max fast_min slow_max slow_min pips
header_got = f.readline().strip()
if header_got != header_want:
raise Exception("Unexpected columns")
rets = 0
# XXX: there were malformed lines, but think they are fixed now?
bads = 0
net_lines = 0
for l in f:
def group_line():
ncols = len('lintype,ico,delays'.split(','))
assert len(parts) == ncols
_lintype, ico, delays = parts
return int(ico), int(delays)
def net_line():
assert len(parts) == ncols, "Expected %u parts, got %u" % (
ncols, len(parts))
_lintype, net, src_site, src_site_type, src_site_pin, src_bel, src_bel_pin, dst_site, dst_site_type, dst_site_pin, dst_bel, dst_bel_pin, ico, fast_max, fast_min, slow_max, slow_min, pips, wires = parts
def filt_passthru_lut(bel_pins):
'''
Ex: SLICE_X11Y110/A6LUT/A6 SLICE_X11Y110/AFF/D
'''
parts = bel_pins.split()
if len(parts) == 1:
return parts[0]
else:
assert len(parts) == 2
# the LUT shoudl always go first?
bel_pin_lut, bel_pin_dst = parts
assert '6LUT' in bel_pin_lut
return bel_pin_dst
return {
'net': net,
'src': {
'site': src_site,
'site_type': src_site_type,
'site_pin': src_site_pin,
'bel': src_bel,
'bel_pin': src_bel_pin,
},
'dst': {
'site': dst_site,
'site_type': dst_site_type,
'site_pin': dst_site_pin,
'bel': dst_bel,
'bel_pin': filt_passthru_lut(dst_bel_pin),
},
't': {
# ps
'fast_max': int(fast_max),
'fast_min': int(fast_min),
'slow_max': int(slow_max),
'slow_min': int(slow_min),
},
'ico': int(ico),
'pips': parse_pips(pips, speed_i2s),
'wires': parse_wires(wires, speed_i2s),
'line': l,
}
l = l.strip()
if not l:
continue
parts = l.split(',')
lintype = parts[0]
val = {
'NET': net_line,
'GROUP': group_line,
}[lintype]()
yield lintype, val
rets += 1
print(' load %s: %d bad, %d good lines' % (fn, bads, rets))
def gen_timing4n(fn, speed_i2s):
'''Only generate nets'''
for lintype, val in gen_timing4(fn, speed_i2s):
if lintype == 'NET':
yield val
def gen_timing4a(fn, speed_i2s):
'''
Like above, but aggregate ico + non-ico into single entries
Key these based on uniqueness of (src_bel, dst_bel)
ico 0 is followed by 1
They should probably even be in the same order
Maybe just assert that?
'''
entries = {}
timgen = gen_timing4(fn, speed_i2s)
rets = 0
while True:
def get_ico(exp_ico):
ret = []
try:
lintype, val = next(timgen)
except StopIteration:
return None
assert lintype == 'GROUP'
ico, delays = val
assert ico == exp_ico
for _ in range(delays):
lintype, val = next(timgen)
assert lintype == 'NET'
ret.append(val)
return ret
ico0s = get_ico(0)
if ico0s is None:
break
ico1s = get_ico(1)
# TODO: verify this is actually true
assert len(ico0s) == len(ico1s)
def same_path(l, r):
# if source and dest are the same, should be the same thing
return l['src']['bel_pin'] == r['src']['bel_pin'] and l['dst'][
'bel_pin'] == r['dst']['bel_pin']
for ico0, ico1 in zip(ico0s, ico1s):
# TODO: verify this is actually true
# otherwise move to more complex algorithm
assert same_path(ico0, ico1)
# aggregate timing info as (ic0, ic1) into ico0
ico0['t'] = (
ico0['t'],
ico1['t'],
)
yield ico0
rets += 1
print(' load %s: %u aggregated lines' % (fn, rets))
def load_speed_json(f):
j = json.load(f)
# Index speed indexes to names
speed_i2s = {}
for k, v in j['speed_model'].items():
i = v['speed_index']
if i != SI_NONE:
speed_i2s[i] = k
return j, speed_i2s
```
#### File: fuzzers/013-clbncy0/top.py
```python
import random
random.seed(0)
from prjxray import util
from prjxray import verilog
CLBN = 400
print('//Requested CLBs: %s' % str(CLBN))
def gen_slices():
for _tile_name, site_name, _site_type in util.get_roi().gen_sites(
['SLICEL', 'SLICEM']):
yield site_name
DIN_N = CLBN * 8
DOUT_N = CLBN * 8
lut_bels = ['A6LUT', 'B6LUT', 'C6LUT', 'D6LUT']
verilog.top_harness(DIN_N, DOUT_N)
f = open('params.csv', 'w')
f.write('module,loc,bel,n\n')
slices = gen_slices()
print(
'module roi(input clk, input [%d:0] din, output [%d:0] dout);' %
(DIN_N - 1, DOUT_N - 1))
for i in range(CLBN):
bel = ''
if random.randint(0, 1):
module = 'clb_NCY0_MX'
else:
module = 'clb_NCY0_O5'
n = random.randint(0, 3)
loc = next(slices)
bel = lut_bels[n]
print(' %s' % module)
print(' #(.LOC("%s"), .BEL("%s"), .N(%d))' % (loc, bel, n))
print(
' clb_%d (.clk(clk), .din(din[ %d +: 8]), .dout(dout[ %d +: 8]));'
% (i, 8 * i, 8 * i))
f.write('%s,%s,%s,%s\n' % (module, loc, bel, n))
f.close()
print(
'''endmodule
// ---------------------------------------------------------------------
''')
print(
'''
module clb_NCY0_MX (input clk, input [7:0] din, output [7:0] dout);
parameter LOC="SLICE_X16Y129_FIXME";
parameter BEL="A6LUT_FIXME";
parameter N=-1;
wire [3:0] o;
assign dout[0] = o[1];
wire o6, o5;
reg [3:0] s;
always @(*) begin
s = din[7:4];
s[N] = o6;
end
(* LOC=LOC, BEL=BEL, KEEP, DONT_TOUCH *)
LUT6_2 #(
.INIT(64'h8000_0000_0000_0001)
) lut (
.I0(din[0]),
.I1(din[1]),
.I2(din[2]),
.I3(din[3]),
.I4(din[4]),
.I5(din[5]),
.O5(o5),
.O6(o6));
(* LOC=LOC, KEEP, DONT_TOUCH *)
CARRY4 carry4(.O(o), .CO(), .DI(din[3:0]), .S(s), .CYINIT(1'b0), .CI());
endmodule
module clb_NCY0_O5 (input clk, input [7:0] din, output [7:0] dout);
parameter LOC="SLICE_X16Y129_FIXME";
parameter BEL="A6LUT_FIXME";
parameter N=-1;
wire [3:0] o;
assign dout[0] = o[1];
wire o6, o5;
reg [3:0] s;
reg [3:0] di;
always @(*) begin
s = din[7:4];
s[N] = o6;
di = {din[3:0]};
di[N] = o5;
end
(* LOC=LOC, BEL=BEL, KEEP, DONT_TOUCH *)
LUT6_2 #(
.INIT(64'h8000_0000_0000_0001)
) lut (
.I0(din[0]),
.I1(din[1]),
.I2(din[2]),
.I3(din[3]),
.I4(din[4]),
.I5(din[5]),
.O5(o5),
.O6(o6));
(* LOC=LOC, KEEP, DONT_TOUCH *)
CARRY4 carry4(.O(o), .CO(), .DI(di), .S(s), .CYINIT(1'b0), .CI());
endmodule
''')
```
#### File: prjxray/gridinfo/gridinfo-txt2json.py
```python
import sys
import json
import re
db_tiles = set()
db_tile_prop = dict()
db_tile_sites = dict()
db_sites = set()
db_site_prop = dict()
db_site_tile = dict()
db_site_bit = dict()
def add_tile(tile):
if tile not in db_tiles:
db_tiles.add(tile)
db_tile_prop[tile] = dict()
db_tile_sites[tile] = list()
def add_site(site):
if site not in db_sites:
db_sites.add(site)
db_site_prop[site] = dict()
with open("%s.txt" % sys.argv[1]) as f:
for line in f:
line = line.split()
if line[0] == "TILEPROP":
add_tile(line[1])
db_tile_prop[line[1]][line[2]] = " ".join(line[3:])
continue
if line[0] == "TILESITE":
add_tile(line[1])
add_site(line[2])
db_tile_sites[line[1]].append(line[2])
db_site_tile[line[2]] = line[1]
continue
if line[0] == "SITEPROP":
add_site(line[1])
db_site_prop[line[1]][line[2]] = " ".join(line[3:])
continue
if line[0] == "SLICEBIT":
db_site_bit[line[1]] = line[2]
continue
assert False
print("Number of tiles: %d" % len(db_tiles))
print("Number of sites: %d" % len(db_sites))
print("Number of sites with bit: %d" % len(db_site_bit))
database = dict()
loc_to_tile = dict()
database["device"] = sys.argv[2]
database["tiles"] = dict()
for tile in db_tiles:
entry = dict()
entry["props"] = db_tile_prop[tile]
entry["sites"] = db_tile_sites[tile]
database["tiles"][tile] = entry
col = int(db_tile_prop[tile]["COLUMN"])
row = int(db_tile_prop[tile]["ROW"])
loc_to_tile[(col, row)] = tile
database["sites"] = dict()
for site in db_sites:
entry = dict()
entry["props"] = db_site_prop[site]
entry["tile"] = db_site_tile[site]
database["sites"][site] = entry
for site, bit in db_site_bit.items():
bit = bit.split("_")
bit_type = int(bit[4][1:])
bit_half = int(bit[5][1:])
bit_row = int(bit[6][1:])
bit_col = int(bit[7][1:])
bit_word = int(bit[9][1:])
assert len(bit) == 11
for i in range(50):
m = re.match("(.*)Y([0-9]+)", site)
this_site = "%sY%d" % (m.group(1), int(m.group(2)) + i)
tile = db_site_tile[this_site]
word = bit_word + 2 * i
if word >= 50: word += 1
entry = dict()
entry["BASE_FRAMEID"] = "0x%08x" % (
(bit_type << 23) | (bit_half << 22) | (bit_row << 17) |
(bit_col << 7))
entry["FRAME_TYPE"] = bit_type
entry["FRAME_HALF"] = bit_half
entry["FRAME_ROW"] = bit_row
entry["FRAME_COLUMN"] = bit_col
entry["WORDS"] = [word, word + 1]
database["tiles"][tile]["cfgcol"] = entry
if database["tiles"][tile]["props"]["TILE_TYPE"] in ("CLBLL_L",
"CLBLM_L"):
col = int(db_tile_prop[tile]["COLUMN"])
row = int(db_tile_prop[tile]["ROW"])
right_tile = loc_to_tile[(col + 1, row)]
database["tiles"][right_tile]["cfgcol"] = entry
if database["tiles"][tile]["props"]["TILE_TYPE"] in ("CLBLL_R",
"CLBLM_R"):
col = int(db_tile_prop[tile]["COLUMN"])
row = int(db_tile_prop[tile]["ROW"])
left_tile = loc_to_tile[(col - 1, row)]
database["tiles"][left_tile]["cfgcol"] = entry
tile_cfgcol_count = 0
cfgcols = set()
for tile in db_tiles:
if "cfgcol" in database["tiles"][tile]:
cfgcols.add(database["tiles"][tile]["cfgcol"]["BASE_FRAMEID"])
tile_cfgcol_count += 1
print("Number of assigned columns: %d" % len(cfgcols))
print("Number of tiles with assigned column: %d" % tile_cfgcol_count)
with open("%s.json" % sys.argv[1], "w") as f:
print(json.dumps(database, sort_keys=True, indent="\t"), file=f)
```
#### File: prjxray/prjxray/grid.py
```python
from collections import namedtuple
import enum
from prjxray import segment_map
class BlockType(enum.Enum):
# Frames describing CLB features, interconnect, clocks and IOs.
CLB_IO_CLK = 'CLB_IO_CLK'
# Frames describing block RAM initialization.
BLOCK_RAM = 'BLOCK_RAM'
GridLoc = namedtuple('GridLoc', 'grid_x grid_y')
GridInfo = namedtuple('GridInfo', 'segment bits sites tile_type')
Bits = namedtuple('Bits', 'base_address frames offset words')
BitsInfo = namedtuple('BitsInfo', 'segment_type tile bits')
class Grid(object):
""" Object that represents grid for a given database.
Provides methods to inspect grid by name or location. Also provides mapping
of segment offsets for particular grid locations and their tile types.
"""
def __init__(self, tilegrid):
self.tilegrid = tilegrid
self.loc = {}
self.tileinfo = {}
# Map of segment name to tiles in that segment
self.segments = {}
# Map of (base_address, segment type) -> segment name
self.base_addresses = {}
# Map of base_address -> (segment type, segment name)
self.base_addresses = {}
for tile in self.tilegrid:
tileinfo = self.tilegrid[tile]
grid_loc = GridLoc(tileinfo['grid_x'], tileinfo['grid_y'])
assert grid_loc not in self.loc
self.loc[grid_loc] = tile
bits = {}
if 'segment' in tileinfo:
if tileinfo['segment'] not in self.segments:
self.segments[tileinfo['segment']] = []
self.segments[tileinfo['segment']].append(tile)
if 'bits' in tileinfo:
for k in tileinfo['bits']:
segment_type = BlockType(k)
base_address = int(tileinfo['bits'][k]['baseaddr'], 0)
bits[segment_type] = Bits(
base_address=base_address,
frames=tileinfo['bits'][k]['frames'],
offset=tileinfo['bits'][k]['offset'],
words=tileinfo['bits'][k]['words'],
)
self.tileinfo[tile] = GridInfo(
segment=tileinfo['segment'] if 'segment' in tileinfo else None,
bits=bits,
sites=tileinfo['sites'],
tile_type=tileinfo['type'],
)
x, y = zip(*self.loc.keys())
self._dims = (min(x), max(x), min(y), max(y))
def tiles(self):
""" Return list of tiles. """
return self.tileinfo.keys()
def tile_locations(self):
""" Return list of tile locations. """
return self.loc.keys()
def dims(self):
""" Returns (x_min, x_max, y_min, y_max) for given Grid. """
return self._dims
def is_populated(self, grid_loc):
return grid_loc in self.loc
def loc_of_tilename(self, tilename):
tileinfo = self.tilegrid[tilename]
return GridLoc(tileinfo['grid_x'], tileinfo['grid_y'])
def tilename_at_loc(self, grid_loc):
return self.loc[grid_loc]
def gridinfo_at_loc(self, grid_loc):
return self.tileinfo[self.loc[grid_loc]]
def gridinfo_at_tilename(self, tilename):
return self.tileinfo[tilename]
def iter_all_frames(self):
for tile, tileinfo in self.tileinfo.items():
for segment_type, bits in tileinfo.bits.items():
yield BitsInfo(
segment_type=segment_type,
tile=tile,
bits=bits,
)
def get_segment_map(self):
return segment_map.SegmentMap(self)
```
#### File: prjxray/utils/test_fasm2frames.py
```python
import fasm2frames
import unittest
from io import StringIO
import re
def frm2bits(txt):
'''
Convert output .frm file text to set of (frame addr, word #, bit index) tuples
'''
bits_out = set()
for l in txt.split('\n'):
l = l.strip()
if not l:
continue
# 0x00020500 0x00000000,0x00000000,0x00000000,...
addr, words = l.split(' ')
addr = int(addr, 0)
words = words.split(',')
assert (101 == len(words))
for wordi, word in enumerate(words):
word = int(word, 0)
for biti in range(32):
val = word & (1 << biti)
if val:
bits_out.add((addr, wordi, biti))
return bits_out
def bitread2bits(txt):
'''
Convert .bits text file (ie bitread output) to set of (frame addr, word #, bit index) tuples
'''
bits_ref = set()
for l in txt.split('\n'):
l = l.strip()
if not l:
continue
# bit_0002050b_004_14
m = re.match(r'bit_(.{8})_(.{3})_(.{2})', l)
addr = int(m.group(1), 16)
word = int(m.group(2), 10)
bit = int(m.group(3), 10)
bits_ref.add((addr, word, bit))
return bits_ref
class TestStringMethods(unittest.TestCase):
def test_lut(self):
'''Simple smoke test on just the LUTs'''
fout = StringIO()
fasm2frames.run(open('test_data/lut.fasm', 'r'), fout)
def bitread_frm_equals(self, frm_fn, bitread_fn):
fout = StringIO()
fasm2frames.run(open(frm_fn, 'r'), fout)
# Build a list of output used bits
bits_out = frm2bits(fout.getvalue())
# Build a list of reference used bits
bits_ref = bitread2bits(open(bitread_fn, 'r').read())
# Now check for equivilence vs reference design
self.assertEquals(len(bits_ref), len(bits_out))
self.assertEquals(bits_ref, bits_out)
def test_lut_int(self):
self.bitread_frm_equals(
'test_data/lut_int.fasm', 'test_data/lut_int/design.bits')
def test_ff_int(self):
self.bitread_frm_equals(
'test_data/ff_int.fasm', 'test_data/ff_int/design.bits')
def test_ff_int_op1(self):
'''Omitted key set to '''
self.bitread_frm_equals(
'test_data/ff_int_op1.fasm', 'test_data/ff_int/design.bits')
# Same check as above, but isolated test case
def test_opkey_01_default(self):
'''Optional key with binary omitted value should produce valid result'''
fin = StringIO("CLBLM_L_X10Y102.SLICEM_X0.SRUSEDMUX")
fout = StringIO()
fasm2frames.run(fin, fout)
def test_opkey_01_1(self):
fin = StringIO("CLBLM_L_X10Y102.SLICEM_X0.SRUSEDMUX 1")
fout = StringIO()
fasm2frames.run(fin, fout)
def test_opkey_enum(self):
'''Optional key with enumerated value should produce syntax error'''
# CLBLM_L.SLICEM_X0.AMUX.O6 !30_06 !30_07 !30_08 30_11
fin = StringIO("CLBLM_L_X10Y102.SLICEM_X0.AMUX.O6")
fout = StringIO()
try:
fasm2frames.run(fin, fout)
self.fail("Expected syntax error")
except fasm2frames.FASMSyntaxError:
pass
def test_ff_int_0s(self):
'''Explicit 0 entries'''
self.bitread_frm_equals(
'test_data/ff_int_0s.fasm', 'test_data/ff_int/design.bits')
def test_badkey(self):
'''Bad key should throw syntax error'''
fin = StringIO("CLBLM_L_X10Y102.SLICEM_X0.SRUSEDMUX 2")
fout = StringIO()
try:
fasm2frames.run(fin, fout)
self.fail("Expected syntax error")
except fasm2frames.FASMSyntaxError:
pass
def test_dupkey(self):
'''Duplicate key should throw syntax error'''
fin = StringIO(
"""\
CLBLM_L_X10Y102.SLICEM_X0.SRUSEDMUX 0
CLBLM_L_X10Y102.SLICEM_X0.SRUSEDMUX 1
""")
fout = StringIO()
try:
fasm2frames.run(fin, fout)
self.fail("Expected syntax error")
except fasm2frames.FASMSyntaxError:
pass
def test_sparse(self):
'''Verify sparse equivilent to normal encoding'''
frm_fn = 'test_data/lut_int.fasm'
fout_sparse = StringIO()
fasm2frames.run(open(frm_fn, 'r'), fout_sparse, sparse=True)
fout_sparse_txt = fout_sparse.getvalue()
bits_sparse = frm2bits(fout_sparse_txt)
fout_full = StringIO()
fasm2frames.run(open(frm_fn, 'r'), fout_full, sparse=False)
fout_full_txt = fout_full.getvalue()
bits_full = frm2bits(fout_full_txt)
# Now check for equivilence vs reference design
self.assertEquals(len(bits_sparse), len(bits_full))
self.assertEquals(bits_sparse, bits_full)
# Verify the full ROI is way bigger description
# It will still be decent size though since even sparse occupies all columns in that area
self.assertGreaterEqual(len(fout_full_txt), len(fout_sparse_txt) * 4)
if __name__ == '__main__':
unittest.main()
```
|
{
"source": "jeremysalwen/open_spiel",
"score": 2
}
|
#### File: python/examples/leduc_nfsp.py
```python
import abc
import os
import random
from typing import List
import tensorflow.compat.v1 as tf
from absl import app
from absl import flags
from absl import logging
from open_spiel.python import policy, rl_agent
from open_spiel.python import rl_environment
from open_spiel.python.algorithms import exploitability
from open_spiel.python.algorithms import nfsp
FLAGS = flags.FLAGS
flags.DEFINE_string("game_name", "leduc_poker",
"Name of the game.")
flags.DEFINE_integer("num_players", 2,
"Number of players.")
flags.DEFINE_integer("num_train_episodes", int(20e6),
"Number of training episodes.")
flags.DEFINE_integer("eval_every", 10000,
"Episode frequency at which the agents are evaluated.")
flags.DEFINE_list("hidden_layers_sizes", [
128,
], "Number of hidden units in the avg-net and Q-net.")
flags.DEFINE_integer("replay_buffer_capacity", int(2e5),
"Size of the replay buffer.")
flags.DEFINE_integer("reservoir_buffer_capacity", int(2e6),
"Size of the reservoir buffer.")
flags.DEFINE_integer("min_buffer_size_to_learn", 1000,
"Number of samples in buffer before learning begins.")
flags.DEFINE_float("anticipatory_param", 0.1,
"Prob of using the rl best response as episode policy.")
flags.DEFINE_integer("batch_size", 128,
"Number of transitions to sample at each learning step.")
flags.DEFINE_integer("learn_every", 64,
"Number of steps between learning updates.")
flags.DEFINE_float("rl_learning_rate", 0.01,
"Learning rate for inner rl agent.")
flags.DEFINE_float("sl_learning_rate", 0.01,
"Learning rate for avg-policy sl network.")
flags.DEFINE_string("optimizer_str", "sgd",
"Optimizer, choose from 'adam', 'sgd'.")
flags.DEFINE_string("loss_str", "mse",
"Loss function, choose from 'mse', 'huber'.")
flags.DEFINE_integer("update_target_network_every", 19200,
"Number of steps between DQN target network updates.")
flags.DEFINE_float("discount_factor", 1.0,
"Discount factor for future rewards.")
flags.DEFINE_integer("epsilon_decay_duration", int(20e6),
"Number of game steps over which epsilon is decayed.")
flags.DEFINE_float("epsilon_start", 0.06,
"Starting exploration parameter.")
flags.DEFINE_float("epsilon_end", 0.001,
"Final exploration parameter.")
flags.DEFINE_enum("evaluation_metric", "nash_conv", ["exploitability", "nash_conv", "avg_return"],
"Choose from 'exploitability', 'nash_conv', 'avg_return'.")
flags.DEFINE_integer("evaluation_opponent_pool_size", 5,
"Only affects the 'avg_return' evaluation metric. How many past checkpoints to use as the pool of opponents.")
flags.DEFINE_enum("evaluation_opponent_pool", "stratified", ["recent", "random", "stratified"],
"Only affects the 'avg_return' evaluation metric. Determines which how to sample the pool of past opponents to use when evaluating average returns.")
flags.DEFINE_enum("evaluation_opponent_sampling", "independent", ["independent", "correlated", "perturbed"],
"Only affects the' avg_return' evaluation metric. Determines how to sample rosters of opponents from the pool of possible opponents."
"Options are 'indpendent' to sample each player independently, 'correlated' to copy entire rosters from a previous episode, "
"and 'perturbed' to use the current episode's roster with a single opponent copied from a previous episode.")
flags.DEFINE_integer("evaluation_num_samples", 1000,
"Only affects the 'avg_return' evaluation metric. How many episodes of play to sample for calculating the average return.")
flags.DEFINE_bool("use_checkpoints", True, "Save/load neural network weights.")
flags.DEFINE_string("checkpoint_dir", "/tmp/nfsp_test",
"Directory to save/load the agent.")
class NFSPPolicies(policy.Policy):
"""Joint policy to be evaluated."""
def __init__(self, env, nfsp_policies, mode):
game = env.game
player_ids = list(range(FLAGS.num_players))
super(NFSPPolicies, self).__init__(game, player_ids)
self._policies = nfsp_policies
self._mode = mode
self._obs = {
"info_state": [None] * FLAGS.num_players,
"legal_actions": [None] * FLAGS.num_players
}
def action_probabilities(self, state, player_id=None):
cur_player = state.current_player()
legal_actions = state.legal_actions(cur_player)
self._obs["current_player"] = cur_player
self._obs["info_state"][cur_player] = (
state.information_state_tensor(cur_player))
self._obs["legal_actions"][cur_player] = legal_actions
info_state = rl_environment.TimeStep(
observations=self._obs, rewards=None, discounts=None, step_type=None)
with self._policies[cur_player].temp_mode_as(self._mode):
p = self._policies[cur_player].step(info_state, is_evaluation=True).probs
prob_dict = {action: p[action] for action in legal_actions}
return prob_dict
def latest_checkpoint_dir():
return os.path.join(FLAGS.checkpoint_dir, "latest")
def checkpoint_dir(episode):
return os.path.join(FLAGS.checkpoint_dir, str(episode))
def list_saved_checkpoints():
return list(sorted(int(p) for p in os.listdir(FLAGS.checkpoint_dir) if p != "latest"))
def most_recent_checkpoints(n):
return list_saved_checkpoints()[-n + 1:-1]
def random_checkpoints(n):
checkpoints = list_saved_checkpoints()[:-1]
if n > len(checkpoints):
n = len(checkpoints)
return random.sample(checkpoints, n)
def stratified_checkpoints(n):
checkpoints = list_saved_checkpoints()[:-1]
if n > len(checkpoints):
n = len(checkpoints)
if n == 0:
return []
elif n == 1:
return [checkpoints[0]]
else:
return [checkpoints[int(i * (len(checkpoints) - 1) / (n - 1))] for i in range(n)]
class OpponentDistribution(metaclass=abc.ABCMeta):
"""
Represents a distribution of opponent agents, conditional on a single fixed agent.
"""
@abc.abstractmethod
def sample(self, fixed_agent: rl_agent.AbstractAgent, fixed_agent_player_id: int) -> List[rl_agent.AbstractAgent]:
pass
class IndependentOpponentDistribution(OpponentDistribution):
"""
A distribution where each agent is chosen independently from a uniform distribution.
"""
def __init__(self, agents: List[List[rl_agent.AbstractAgent]]):
"""
:param agents: List of agents for each player id. Note that player_id is the first dimension.
"""
self.agents = agents
def sample(self, fixed_agent: rl_agent.AbstractAgent, fixed_agent_player_id: int) -> List[rl_agent.AbstractAgent]:
selected_agents = [random.choice(choices) for choices in self.agents]
selected_agents[fixed_agent_player_id] = fixed_agent
return selected_agents
class CorrelatedOpponentDistribution(OpponentDistribution):
"""
A distribution where opponents are chosen uniformly from a list of N "rosters".
"""
def __init__(self, rosters: List[List[rl_agent.AbstractAgent]]):
"""
:param rosters: A list of agent rosters to randomly select from. Note that player_id is the second dimension.
"""
self.rosters = rosters
def sample(self, fixed_agent: rl_agent.AbstractAgent, fixed_agent_player_id: int) -> List[rl_agent.AbstractAgent]:
selected_agents = random.choice(self.rosters)
selected_agents[fixed_agent_player_id] = fixed_agent
return selected_agents
class PerturbedOpponentDistribution(OpponentDistribution):
"""
A distribution where there is a baseline roster of agents, and only a single agent is randomized as a deviation from
that roster.
"""
def __init__(self, default_roster: List[rl_agent.AbstractAgent],
other_agents: List[List[rl_agent.AbstractAgent]]) -> List[rl_agent.AbstractAgent]:
"""
:param default_roster: The default set of agents, one per each player_id.
:param other_agents: The list of alternative agents, per player id. Note that player_id is the first dimension.
"""
self.default_roster = default_roster
self.other_agents = other_agents or [[] for _ in self.default_roster]
def sample(self, fixed_agent: rl_agent.AbstractAgent, fixed_agent_player_id: int) -> List[rl_agent.AbstractAgent]:
selected_agents = self.default_roster
selected_agents[fixed_agent_player_id] = fixed_agent
other_positions = [i for i in range(len(selected_agents)) if i != fixed_agent_player_id]
if other_positions:
position_to_perturb = random.choice(other_positions)
if self.other_agents[position_to_perturb]:
selected_agents[position_to_perturb] = random.choice(self.other_agents[position_to_perturb])
return selected_agents
def monte_carlo_returns(env: rl_environment.Environment, agents: List[rl_agent.AbstractAgent]) -> List[float]:
returns = [0.0 for _ in agents]
discounts = [1.0 for _ in agents]
time_step = env.reset()
while True:
if time_step.rewards:
returns = [R + r * d for (R, r, d) in zip(returns, time_step.rewards, discounts)]
discounts = time_step.discounts if time_step.discounts else [1.0 for _ in time_step.rewards]
if time_step.last():
break
player_id = time_step.observations["current_player"]
agent_output = agents[player_id].step(time_step, True)
action_list = [agent_output.action]
time_step = env.step(action_list)
for agent in agents:
agent.step(time_step)
return returns
def average_returns(env: rl_environment.Environment, agents_to_evaluate: List[rl_agent.AbstractAgent],
opponent_distribution: OpponentDistribution, n_samples: int) -> List[float]:
"""
:param env: Game environment
:param agents_to_evaluate: List of N agents to evaluate, one for each player in the game.
:param opponent_distribution: The distribution of opponents to evaluate the agents against.
:param n_samples: Number of games to play
:return: Length N array of average agent returns
"""
samples = [0.0 for _ in agents_to_evaluate]
for i in range(n_samples):
for player_id, agent in enumerate(agents_to_evaluate):
agents = opponent_distribution.sample(agent, player_id)
returns = monte_carlo_returns(env, agents)
samples[player_id] += returns[player_id]
return [s / n_samples for s in samples]
def evaluate_monte_carlo(env: rl_environment.Environment, latest_agents: List[rl_agent.AbstractAgent], nfsp_args,
opponent_episodes, n_samples, sampling_mode="independent") -> List[float]:
"""
:param env: Game environment
:param latest_agents:
:param nfsp_args: Args for constructing the NFSP agent. Must match those used to save the previous agents.
:param opponent_episodes: List of episode numbers to load agents from for use as opponents
:param n_samples: Number of games to sample.
:param sampling_mode: How to construct rosters of agents for each game options are "independent", "correlated", and "perturbed"
:return: List of average returns, per agent in latest_agents.
"""
opponent_agents = []
for episode in opponent_episodes:
episode_agents = []
for player_id in range(len(latest_agents)):
agent = nfsp.NFSP(player_id=player_id, **nfsp_args)
agent.restore(checkpoint_dir(episode))
episode_agents.append(agent)
opponent_agents.append(episode_agents)
if sampling_mode == "independent":
distribution = IndependentOpponentDistribution(list(zip(*opponent_agents, latest_agents)))
elif sampling_mode == "correlated":
distribution = CorrelatedOpponentDistribution(opponent_agents + [latest_agents])
elif sampling_mode == "perturbed":
distribution = PerturbedOpponentDistribution(latest_agents, list(zip(*opponent_agents)))
else:
raise ValueError("Invalid sampling_mode argument to evaluate_monte_carlo: " + sampling_mode)
return average_returns(env, latest_agents, distribution, n_samples)
def main(unused_argv):
logging.info("Loading %s", FLAGS.game_name)
game = FLAGS.game_name
env_configs = {}
if FLAGS.num_players:
env_configs["players"] = FLAGS.num_players
env = rl_environment.Environment(game, **env_configs)
num_players = env.num_players
info_state_size = env.observation_spec()["info_state"][0]
num_actions = env.action_spec()["num_actions"]
hidden_layers_sizes = [int(l) for l in FLAGS.hidden_layers_sizes]
kwargs = {
"replay_buffer_capacity": FLAGS.replay_buffer_capacity,
"reservoir_buffer_capacity": FLAGS.reservoir_buffer_capacity,
"min_buffer_size_to_learn": FLAGS.min_buffer_size_to_learn,
"anticipatory_param": FLAGS.anticipatory_param,
"batch_size": FLAGS.batch_size,
"learn_every": FLAGS.learn_every,
"rl_learning_rate": FLAGS.rl_learning_rate,
"sl_learning_rate": FLAGS.sl_learning_rate,
"optimizer_str": FLAGS.optimizer_str,
"loss_str": FLAGS.loss_str,
"update_target_network_every": FLAGS.update_target_network_every,
"discount_factor": FLAGS.discount_factor,
"epsilon_decay_duration": FLAGS.epsilon_decay_duration,
"epsilon_start": FLAGS.epsilon_start,
"epsilon_end": FLAGS.epsilon_end,
}
with tf.Session() as sess:
nfsp_args = {
"session": sess,
"state_representation_size": info_state_size,
"num_actions": num_actions,
"hidden_layers_sizes": hidden_layers_sizes,
**kwargs
}
# pylint: disable=g-complex-comprehension
agents = [
nfsp.NFSP(player_id=idx, **nfsp_args) for idx in range(num_players)
]
joint_avg_policy = NFSPPolicies(env, agents, nfsp.MODE.average_policy)
sess.run(tf.global_variables_initializer())
start_episode = 0
if FLAGS.use_checkpoints:
os.makedirs(FLAGS.checkpoint_dir, exist_ok=True)
for agent in agents:
if agent.has_checkpoint(latest_checkpoint_dir()):
agent.restore(latest_checkpoint_dir())
start_episode = list_saved_checkpoints()[-1]
for ep in range(start_episode, start_episode + FLAGS.num_train_episodes):
if (ep + 1) % FLAGS.eval_every == 0:
losses = [agent.loss for agent in agents]
logging.info("Losses: %s", losses)
if FLAGS.use_checkpoints:
for agent in agents:
agent.save(latest_checkpoint_dir())
agent.save(checkpoint_dir(ep))
if FLAGS.evaluation_metric == "exploitability":
# Avg exploitability is implemented only for 2 players constant-sum
# games, use nash_conv otherwise.
expl = exploitability.exploitability(env.game, joint_avg_policy)
logging.info("[%s] Exploitability AVG %s", ep + 1, expl)
elif FLAGS.evaluation_metric == "nash_conv":
nash_conv = exploitability.nash_conv(env.game, joint_avg_policy)
logging.info("[%s] NashConv %s", ep + 1, nash_conv)
elif FLAGS.evaluation_metric == "avg_return":
if FLAGS.evaluation_opponent_pool == "recent":
opponent_checkpoints = most_recent_checkpoints(FLAGS.evaluation_opponent_pool_size)
elif FLAGS.evaluation_opponent_pool == "random":
opponent_checkpoints = random_checkpoints(FLAGS.evaluation_opponent_pool_size)
elif FLAGS.evaluation_opponent_pool == "stratified":
opponent_checkpoints = stratified_checkpoints(FLAGS.evaluation_opponent_pool_size)
avg_return = evaluate_monte_carlo(env, agents, nfsp_args, opponent_checkpoints,
FLAGS.evaluation_num_samples, FLAGS.evaluation_opponent_sampling)
logging.info("[%s] AvgReturn %s", ep + 1, avg_return)
else:
raise ValueError(" ".join(("Invalid evaluation metric, choose from",
"'exploitability', 'nash_conv', 'avg_return.")))
logging.info("_____________________________________________")
time_step = env.reset()
while not time_step.last():
player_id = time_step.observations["current_player"]
agent_output = agents[player_id].step(time_step)
action_list = [agent_output.action]
time_step = env.step(action_list)
# Episode is over, step all agents with final info state.
for agent in agents:
agent.step(time_step)
if __name__ == "__main__":
app.run(main)
```
|
{
"source": "jeremysanders/dump_spex",
"score": 2
}
|
#### File: jeremysanders/dump_spex/spex_to_xspec.py
```python
import os
import subprocess
import os.path
import re
import numpy as np
from astropy.io import fits
###############################################################################
# Adjustable parameters
# output root (using spex version) for apec format filenames
# creates outroot_(line|coco).fits
outroot = 'spex'
##############
# temperature grid: uncomment line and comment other to select
# old APEC temperature grid
#temperatures = np.logspace(np.log10(0.0008617385), np.log10(86.17385), 51)
# new APEC temperature grid from 3.0.9+ (see http://atomdb.org/interpolation/)
temperatures = np.logspace(np.log10(0.0008617385), np.log10(86.17385), 201)
# denser gridding between useful 0.01 and 100 keV
#temperatures = np.logspace(np.log10(0.01), np.log10(100), 201)
# for quick testing
#temperatures = np.array([1,2,4,8])
##############
# energy range and stepping to sample continuum (log spacing used)
contminenergy = 0.05
contmaxenergy = 15.
contenergysteps = 1024
# energy range and stepping to sample pseudo-continuum (log spacing used)
pcontminenergy = 0.05
pcontmaxenergy = 15.
pcontenergysteps = 2048
# Limit for storing lines separately to save space. Lines lower than
# this flux (photon cm^3/s) are put into a pseudo-continuum rather
# than stored separately. The APEC default is 1e-20, but this
# produces many fewer lines using this for SPEX. Set to None to
# disable putting weak lines into a pseudo-continuum.
minepsilon = 1e-22
#minepsilon = None
# where to put output files (workdir by default)
tmpdir = os.environ.get('WORKDIR', 'workdir')
# end adjustable parameters
###############################################################################
# location of spex installation
try:
spexroot = os.environ['SPEX90']
except KeyError:
raise RuntimeError('Please set SPEX90 and initialize the SPEX environment')
# executable to use to run spex
spexexecutable = os.path.join(spexroot, 'bin/spex')
# for checking for numerical data
digits = set('0123456789')
# conversions
keV_K = 11.6048e6
keV_erg = 1.6022e-9
# convert from unit norm to cm3 in spex
norm_factor_cm3 = 1e58
# identify element names with element numbers in spex
elements = (
'H', 'He', 'Li', 'Be', 'B', 'C', 'N', 'O',
'F', 'Ne', 'Na', 'Mg', 'Al', 'Si', 'P',
'S', 'Cl', 'Ar', 'K', 'Ca', 'Sc', 'Ti',
'V', 'Cr', 'Mn', 'Fe', 'Co', 'Ni', 'Cu', 'Zn',
)
# create dict to convert element names into numbers
element_nums = {}
for num, element in enumerate(elements):
element_nums[element] = num+1
# these are the apec elements (non hydrogen)
# (note this distinction is historical, as apec used fewer elements)
apec_elements = elements[1:]
# with hydrogen
all_elements = elements
# roman numerals (which come out of spex)
roman_numerals = (
'I', 'II', 'III', 'IV', 'V', 'VI', 'VII',
'VIII', 'IX', 'X', 'XI', 'XII', 'XIII', 'XIV',
'XV', 'XVI', 'XVII', 'XVIII', 'XIX', 'XX',
'XXI', 'XXII', 'XXIII', 'XXIV', 'XXV', 'XXVI', 'XXVII',
'XXVIII', 'XXIX', 'XXX'
)
# dict to convert numerals to numbers
roman_to_number = {}
for num, numeral in enumerate(roman_numerals):
roman_to_number[numeral] = num+1
# abundance to use to get continuua of elements more exactly
continuum_mult = 1000.
class Line:
def __init__(self, element, ion, wavelength, epsilon, energy):
self.element = element
self.ion = ion
self.wavelength = wavelength
self.epsilon = epsilon
self.energy = energy
def deleteFile(f):
"""For debugging."""
os.unlink(f)
def cnvtNum(s):
"""Sometimes scientific notation values in spex output are expressed
without an 'e', e.g. 5.62495-100, so we need to convert by hand.
"""
try:
v = float(s)
except ValueError:
m = re.match('([0-9.]+)([+-][0-9]{3})', s)
if not m:
raise ValueError
else:
a, b = m.groups()
v = float(a)*10**float(b)
return v
def writeScriptElements(fobj, elements, val):
"""Write commands to set abundance to val."""
for e in elements:
print('par %02i val %f' % (element_nums[e], val), file=fobj)
def writeScriptLines(fobj, T):
"""Write script to output lines."""
# set temperature
print('par t val %e' % T, file=fobj)
# switch on all apec elements
writeScriptElements(fobj, apec_elements, 1)
# compute model
print('calc', file=fobj)
# dump out lines
outfile = os.path.join(tmpdir, 'tmp_lines_T%010f' % T)
print('ascdump set flux 0', file=fobj)
print('ascdump set range %g:%g unit kev' % (contminenergy, contmaxenergy), file=fobj)
print('ascdump file %s 1 1 line' % outfile, file=fobj)
writeScriptElements(fobj, apec_elements, 0)
def writeScriptContinuua(fobj, T):
# set temperature
print('par t val %e' % T, file=fobj)
print('calc', file=fobj)
outfile = os.path.join(tmpdir, 'tmp_conti_T%010f_%s' % (T, 'H'))
print('ascdump file %s 1 1 tcl' % outfile, file=fobj)
for el in apec_elements:
writeScriptElements(fobj, (el,), continuum_mult)
print('calc', file=fobj)
outfile = os.path.join(tmpdir, 'tmp_conti_T%010f_%s' % (T, el))
print('ascdump set range %g:%g unit kev' % (contminenergy, contmaxenergy), file=fobj)
print('ascdump file %s 1 1 tcl' % outfile, file=fobj)
writeScriptElements(fobj, (el,), 0)
def writeScript(fobj, T):
"""Write header to script sent to spex."""
print('egrid log %e:%e %i' % (
contminenergy, contmaxenergy,
contenergysteps), file=fobj)
# switch to the latest spex model
print('var calc new', file=fobj)
print('var newmekal all true', file=fobj)
print('comp cie', file=fobj)
print('abundance ag', file=fobj)
writeScriptElements(fobj, elements[1:], 0)
writeScriptLines(fobj, T)
writeScriptContinuua(fobj, T)
print('quit', file=fobj)
def generateOutput():
"""Process all the temperatures and generate output files."""
for T in temperatures:
fname = os.path.join(tmpdir, 'tmp_spex_T%010f.script' % T)
with open(fname, 'w') as fout:
writeScript(fout, T)
with open(fname) as fin:
subprocess.call([spexexecutable], stdin=fin)
deleteFile(fname)
def makeLineHDU(lines, T, totflux):
"""Given lines list, produce line HDU.
lines is (element, ion, wavelength, epsilon, energy) list."""
# sort lines by element and ion and energy
lines.sort(key=lambda x: (x.element, x.ion, 1/x.wavelength))
# construct up FITS table to APEC format
col_lambda = fits.Column(
name='Lambda', format='1E', unit='A',
array=[i.wavelength for i in lines])
col_lambda_err = fits.Column(
name='Lambda_Err', format='1E', unit='A',
array=np.zeros( (len(lines),) ) + np.nan)
col_epsilon = fits.Column(
name='Epsilon', format='1E',
unit='photons cm^3 s^-1',
array=[v.epsilon for v in lines])
col_epsilon_err = fits.Column(
name='Epsilon_Err', format='1E',
unit='photons cm^3 s^-1',
array=np.zeros((len(lines),)) + np.nan)
col_element = fits.Column(
name='Element', format='1J',
array=[i.element for i in lines])
col_ion = fits.Column(
name='Ion', format='1J',
array=[i.ion for i in lines])
col_upperlev = fits.Column(
name='UpperLev', format='1J',
array=np.zeros((len(lines),)) + 2)
col_lowerlev = fits.Column(
name='LowerLev', format='1J',
array=np.zeros((len(lines),)) + 1)
tabhdu = fits.BinTableHDU.from_columns([
col_lambda, col_lambda_err, col_epsilon,
col_epsilon_err, col_element, col_ion,
col_upperlev, col_lowerlev])
tabhdu.name = 'EMISSIVITY'
h = tabhdu.header
h['HIERARCH TEMPERATURE'] = T*keV_K
h['XTEMP'] = T
# fixme wrong below (erg not photon)
h['TOT_LINE'] = totflux
h['N_LINES'] = len(lines)
return tabhdu
def interpretDumpedLines(T):
"""Interpret dumped lines file.
Returns new HDU, number of lines, and number of elements
"""
print('Interpreting dumped lines for T=%g' % T)
totflux = 0.
elements = set()
weak_lines = []
lines = []
outfile = os.path.join(tmpdir, 'tmp_lines_T%010f.asc' % T)
for rline in open(outfile):
if rline.strip()[:1] in digits:
# data lines start with numbers
# horribly, have to hard code in column numbers here
element = element_nums[rline[9:12].strip()]
ion = roman_to_number[ rline[12:17].strip() ]
wavelength = cnvtNum( rline[102:115] )
energy_keV = cnvtNum( rline[87:100] )
# convert from total photon flux to normalised photon flux
epsilon = cnvtNum( rline[117:126] ) / norm_factor_cm3
# skip lines out of energy range
if energy_keV<contminenergy or energy_keV>contmaxenergy:
continue
line = Line(element, ion, wavelength, epsilon, energy_keV)
if minepsilon is None or epsilon > minepsilon:
# keep track of total flux in ergs
totflux += energy_keV*keV_erg*epsilon
elements.add(element)
lines.append(line)
else:
weak_lines.append(line)
deleteFile(outfile)
print('T=%g, %i strong lines, %i weak lines' % (
T, len(lines), len(weak_lines)))
tabhdu = makeLineHDU(lines, T, totflux)
return tabhdu, len(lines), len(elements), weak_lines
def readContinuum(filename):
"""Take spex dumped model spectrum file, and extract continuum."""
outenergy = []
outval = []
for line in open(filename):
p = line.strip().split()
if p[0][0] in digits:
outenergy.append(float(p[1]))
outval.append(cnvtNum(p[2]) * 1e44 / norm_factor_cm3)
return (np.array(outenergy), np.array(outval))
def interpretDumpedContinuum(T):
"""Interpret dumped continuum file.
"""
print('Interpreting dumped continuum for T=%g' % T)
# read in continum from each file
continuua = {}
for element in all_elements:
filename = os.path.join(
tmpdir, 'tmp_conti_T%010f_%s.asc' % (T, element))
energy, vals = readContinuum(filename)
continuua[element] = vals
deleteFile(filename)
# subtract H continuum from each element except hydrogen
# also divide by abundance these were generated at
for element in apec_elements:
continuua[element] -= continuua['H']
continuua[element] /= continuum_mult
# construct table
contformat = '%iE' % contenergysteps
col_element = fits.Column(
name='Z', format='1J',
array=[element_nums[i] for i in all_elements])
col_rmJ = fits.Column(
name='rmJ', format='1J', array=np.zeros(len(all_elements)))
col_N_Cont = fits.Column(
name='N_Cont', format='1J',
array= [contenergysteps]*len(all_elements))
col_E_Cont = fits.Column(
name='E_Cont', format=contformat,
unit='keV', array=np.resize(
energy, (len(all_elements), contenergysteps)))
col_Continuum = fits.Column(
name='Continuum', format=contformat,
unit='photons cm^3 s^-1 keV^-1',
array=[continuua[i] for i in all_elements])
col_Cont_Err = fits.Column(
name='Cont_Err', format=contformat,
unit='photons cm^3 s^-1 keV^-1',
array=np.zeros( (len(all_elements), contenergysteps) ))
# create zero pseudo-continuum to fill in later
pcontformat = '%iE' % pcontenergysteps
col_N_Pseudo = fits.Column(
name='N_Pseudo', format='1J',
array= [pcontenergysteps]*len(all_elements))
col_E_Pseudo = fits.Column(
name='E_Pseudo', format=pcontformat,
unit='keV',
array=np.zeros( (len(all_elements), pcontenergysteps) ))
col_Pseudo = fits.Column(
name='Pseudo', format=pcontformat,
array=np.zeros( (len(all_elements), pcontenergysteps) ))
col_Pseudo_Err = fits.Column(
name='Pseudo_Err', format=pcontformat,
array=np.zeros( (len(all_elements), pcontenergysteps) ))
tabhdu = fits.BinTableHDU.from_columns([
col_element, col_rmJ,
col_N_Cont, col_E_Cont,
col_Continuum, col_Cont_Err,
col_N_Pseudo, col_E_Pseudo,
col_Pseudo, col_Pseudo_Err])
tabhdu.name = 'EMISSIVITY'
h = tabhdu.header
h['HIERARCH TEMPERATURE'] = T*keV_K
h['XTEMP'] = T
h['DENSITY'] = 1.0
# sum flux
totcoco = 0.
for i in all_elements:
totcoco += continuua[i].sum()
h['TOT_COCO'] = totcoco
return (
tabhdu,
len(all_elements),
contenergysteps*len(all_elements),
pcontenergysteps*len(all_elements)
)
def interpretAllLines():
"""Interpret spex dumped spectra."""
# generate HDUs for each temperature
hdus = []
Nelement = []
Nline = []
weaklinelist = []
for T in temperatures:
hdu, numlines, numelements, weaklines = interpretDumpedLines(T)
Nline.append(numlines)
Nelement.append(numelements)
hdus.append(hdu)
weaklinelist.append(weaklines)
# construct HDU describing parameters
col_kT = fits.Column(
name='kT', format='1E', unit='keV',
array=temperatures)
col_EDensity = fits.Column(
name='EDensity', format='1E', unit='cm**-3',
array=np.zeros(len(temperatures))+1)
col_Nelement = fits.Column(
name='Nelement', format='1J',
array=Nelement)
col_Nline = fits.Column(
name='Nline', format='1J',
array=Nline)
tabhdu = fits.BinTableHDU.from_columns([
col_kT, col_EDensity, col_Nelement, col_Nline])
tabhdu.name = 'PARAMETERS'
# make output file containing all lines
hdulist = fits.HDUList([fits.PrimaryHDU(), tabhdu] + hdus)
hdulist.writeto('%s_line.fits' % outroot, overwrite=True)
return weaklinelist
def computePseudoContinuum(hdu, weaklines):
"""Compute pseudo continuum and enter into continuum HDU."""
energyedges = np.logspace(
np.log10(pcontminenergy), np.log10(pcontmaxenergy),
pcontenergysteps+1)
for i, element in enumerate(all_elements):
elidx = element_nums[element]
energies = [line.energy for line in weaklines if line.element==elidx]
epsilons = [line.epsilon for line in weaklines if line.element==elidx]
summedlines, edgesout = np.histogram(
energies, weights=epsilons, bins=energyedges)
# divide by bin width to convert to photon cm^3/s/keV
flux = summedlines / (energyedges[1:]-energyedges[:-1])
hdu.data.field('Pseudo')[i,:] = flux
hdu.data.field('E_Pseudo')[i,:] = 0.5*(energyedges[1:]+energyedges[:-1])
def interpretAllContinuum(weaklinelist):
"""Build up continuum output file."""
# make continuum HDUs for each temperature
hdus = []
NElement = []
NCont = []
NPseudo = []
for T, weaklines in zip(temperatures, weaklinelist):
hdu, numelem, numcont, numpseudo = interpretDumpedContinuum(T)
computePseudoContinuum(hdu, weaklines)
hdus.append(hdu)
NElement.append(numelem)
NCont.append(numcont)
NPseudo.append(numpseudo)
# construct HDU describing parameters
col_kT = fits.Column(
name='kT', format='1E', unit='keV',
array=temperatures)
col_EDensity = fits.Column(
name='EDensity', format='1E', unit='cm**-3',
array=np.zeros(len(temperatures))+1)
col_NElement = fits.Column(
name='NElement', format='1J',
array=NElement)
col_NCont = fits.Column(
name='NCont', format='1J',
array=NCont)
col_NPseudo = fits.Column(
name='NPseudo', format='1J',
array=NPseudo)
tabhdu = fits.BinTableHDU.from_columns([
col_kT, col_EDensity, col_NElement, col_NCont, col_NPseudo
])
tabhdu.name = 'PARAMETERS'
# make output file containing the continuum
hdulist = fits.HDUList([fits.PrimaryHDU(), tabhdu] + hdus)
hdulist.writeto('%s_coco.fits' % outroot, overwrite=True)
def main():
"""Main routine."""
generateOutput()
weaklinelist = interpretAllLines()
interpretAllContinuum(weaklinelist)
if __name__ == '__main__':
main()
```
|
{
"source": "jeremysanders/taskproc",
"score": 2
}
|
#### File: taskproc/tests/test_simple.py
```python
from __future__ import print_function
import hashlib
import sys
sys.path.append('..')
sys.path.append('.')
import taskproc
# a task class test
class TestTask(taskproc.Task):
def __init__(self, retnval, correct, requires=[]):
taskproc.Task.__init__(self, requires=requires)
self.retnval = retnval
self.correct = correct
def run(self):
assert self.correct == self.reqresults
#print(self.retnval, self.correct)
return self.retnval
# a function test
def testfunc(reqresults, retnval, correct):
assert reqresults == correct
#print(retnval, correct)
return retnval
def test(taskqueue):
t1 = TestTask(1, [])
t2 = taskproc.Task(func=testfunc, args=(2, [1]), requires=[t1])
t3 = taskproc.Task(func=testfunc, args=(3, [2,1]), requires=[t2, t1])
t4 = TestTask(4, [2,3], requires=[t2, t3])
t5 = TestTask(5, [1,2], requires=[t1, t2])
t10 = TestTask(10, [4, 5], requires=[t4, t5])
taskqueue.add(t10)
with taskqueue:
taskqueue.process()
def runtest():
try:
# test both standard and threaded queues
test(taskproc.TaskQueue())
# do this lots of times to try to check for races
for i in range(1000):
test(taskproc.TaskQueueThread(4))
except Exception:
print('%s: test failure' % sys.argv[0])
sys.exit(1)
print('%s: test sucess' % sys.argv[0])
sys.exit(0)
if __name__ == "__main__":
runtest()
```
|
{
"source": "jeremyscatigna/advance_lanelines_finding",
"score": 3
}
|
#### File: jeremyscatigna/advance_lanelines_finding/draw.py
```python
import numpy as np
import cv2
import lines
def draw_lane(img, warped_img, left_points, right_points, Minv):
# Create an image to draw the lines on
warp_zero = np.zeros_like(warped_img).astype(np.uint8)
color_warp = np.dstack((warp_zero, warp_zero, warp_zero))
# Recast the x and y points into usable format for cv2.fillPoly()
left_fitx = left_points[0]
right_fitx = right_points[0]
ploty = left_points[1]
pts_left = np.array([np.transpose(np.vstack([left_fitx, ploty]))])
pts_right = np.array([np.flipud(np.transpose(np.vstack([right_fitx, ploty])))])
pts = np.hstack((pts_left, pts_right))
# Draw the lane onto the warped blank image
cv2.fillPoly(color_warp, np.int_([pts]), (0,255, 0))
# Warp the blank back to original image space using inverse perspective matrix (Minv)
newwarp = cv2.warpPerspective(color_warp, Minv, (img.shape[1], img.shape[0]))
# Combine the result with the original image
return cv2.addWeighted(img, 1, newwarp, 0.3, 0)
def add_metrics(img, leftx, rightx, xm_per_pix=3.7/800, ym_per_pix = 25/720):
# Calculate radius of curvature
curvature_rads = lines.curvature_radius(leftx=leftx, rightx=rightx, img_shape=img.shape,
xm_per_pix=xm_per_pix, ym_per_pix=ym_per_pix)
# Calculate car offset
offsetx = lines.car_offset(leftx=leftx, rightx=rightx, img_shape=img.shape)
# Display lane curvature
out_img = img.copy()
cv2.putText(out_img, 'Left lane line curvature: {:.2f} m'.format(curvature_rads[0]),
(60, 60), cv2.FONT_HERSHEY_SIMPLEX, 1.5, (255,255,255), 5)
cv2.putText(out_img, 'Right lane line curvature: {:.2f} m'.format(curvature_rads[1]),
(60, 110), cv2.FONT_HERSHEY_SIMPLEX, 1.5, (255,255,255), 5)
# Display car offset
cv2.putText(out_img, 'Horizontal car offset: {:.2f} m'.format(offsetx),
(60, 160), cv2.FONT_HERSHEY_SIMPLEX, 1.5, (255,255,255), 5)
return out_img
```
#### File: jeremyscatigna/advance_lanelines_finding/helpers.py
```python
import matplotlib.pyplot as plt
import numpy as np
import cv2
# Helper method to plot two images side by side
def plt_images(img_1, title_1, img_2, title_2, cmap='gray'):
f, (ax1, ax2) = plt.subplots(1, 2, figsize=(20, 10))
ax1.set_title(title_1, fontsize=16)
ax1.imshow(img_1)
ax2.set_title(title_2, fontsize=16)
ax2.imshow(img_2, cmap='gray')
def get_histogram(image):
return np.sum(image[image.shape[0]//2:, :], axis=0)
```
|
{
"source": "jeremyschlatter/augur",
"score": 2
}
|
#### File: tests/gov/test_gov.py
```python
from eth_tester.exceptions import TransactionFailed
from utils import captureFilteredLogs, AssertLog, nullAddress, TokenDelta, PrintGasUsed
from pytest import raises, mark
pytestmark = mark.skip(reason="We might not even need governance and currently dont account for transfering ownership")
def test_gov(contractsFixture, universe, reputationToken, cash):
if not contractsFixture.paraAugur:
return
nexus = contractsFixture.contracts["OINexus"]
deployer = contractsFixture.accounts[0]
bob = contractsFixture.accounts[1]
alice = contractsFixture.accounts[2]
reputationToken.faucet(100, sender=bob)
reputationToken.faucet(100, sender=alice)
feePot = contractsFixture.getFeePot(universe)
cash.faucet(10000)
cash.approve(feePot.address, 10000000000000000)
reputationToken.approve(feePot.address, 10000000000000000, sender=bob)
reputationToken.approve(feePot.address, 10000000000000000, sender=alice)
rewardsToken = contractsFixture.upload('../src/contracts/Cash.sol', "rewardsToken")
lpToken = contractsFixture.upload('../src/contracts/Cash.sol', "lpToken")
# Deploy GOV token
govToken = contractsFixture.upload("../src/contracts/gov/GovToken.sol", constructorArgs=[deployer])
# Deploy Timelock
timelock = contractsFixture.upload("../src/contracts/gov/Timelock.sol", constructorArgs=[deployer])
# Deploy a FeePotStakingContract for S_REP (Fee Pot Tokens)
feePotStakingContract = contractsFixture.upload("../src/contracts/gov/FeePotStakingRewards.sol", constructorArgs=[deployer, deployer, govToken.address, feePot.address])
initialSupply = 11 * 10**6 * 10**18
govToken.setMintAllowance(feePotStakingContract.address, initialSupply)
feePotStakingContract.notifyRewardAmount(initialSupply)
# Deploy Governance
governance = contractsFixture.upload("../src/contracts/gov/Governance.sol", constructorArgs=[timelock.address, govToken.address])
# Cede control of Timelock to Governance
timelock.setAdmin(governance.address)
# Cede control of GOV Token to Governance
govToken.transferOwnership(timelock.address)
# Cede control of OINexus to Governance
nexus.transferOwnership(timelock.address)
# Cede control of FeePotStakingContract to Governance
feePotStakingContract.setRewardsDistribution(timelock.address)
feePotStakingContract.transferOwnership(timelock.address)
# Get S_REP
feePot.stake(1, sender=alice)
# Stake
feePot.approve(feePotStakingContract.address, 100, sender=alice)
feePotStakingContract.stake(1, sender=alice)
# Move time
timestamp = contractsFixture.eth_tester.backend.chain.header.timestamp
contractsFixture.eth_tester.time_travel(int(timestamp + 24 * 60 * 60))
# Redeem
feePotStakingContract.exit(sender=alice)
totalSupply = govToken.totalSupply()
assert governance.quorumVotes() - (totalSupply / 25) == 0
assert governance.proposalThreshold() - (totalSupply / 1000) == 0
assert govToken.balanceOf(alice) == totalSupply
target = govToken.address
signature = ""
calldata = govToken.mint_encode(bob, 100)
# Delegate votes to self and propose to mint GOV and fail due to time constraint
govToken.delegate(alice, sender=alice)
with raises(TransactionFailed):
governance.propose([target], [0], [signature], [calldata], "Give Bob the Monies", sender=alice)
# Move time forward
timestamp = contractsFixture.eth_tester.backend.chain.header.timestamp
contractsFixture.eth_tester.time_travel(int(timestamp + 24 * 60 * 60 * 7))
# Propose to mint GOV to Bob
proposalId = governance.propose([target], [0], [signature], [calldata], "Give Bob the Monies", sender=alice)
# Vote it into effect
contractsFixture.eth_tester.backend.chain.mine_block()
governance.castVote(proposalId, True, sender=alice)
# Queue the proposal
timestamp = contractsFixture.eth_tester.backend.chain.header.timestamp
contractsFixture.eth_tester.time_travel(int(timestamp + 24 * 60 * 60 * 3))
governance.queue(proposalId)
# Execute the proposal
timestamp = contractsFixture.eth_tester.backend.chain.header.timestamp
contractsFixture.eth_tester.time_travel(int(timestamp + 24 * 60 * 60 * 2))
governance.execute(proposalId)
assert govToken.balanceOf(bob) == 100
```
#### File: tests/reporting/test_rep_oracle.py
```python
from eth_tester.exceptions import TransactionFailed
from utils import longToHexString, nullAddress, stringToBytes
from pytest import raises
import codecs
import functools
from old_eth_utils import sha3
def test_rep_oracle(contractsFixture, augur, cash, market, universe):
if contractsFixture.paraAugur:
return
reputationTokenAddress = universe.getReputationToken()
reputationToken = contractsFixture.applySignature('TestNetReputationToken', reputationTokenAddress)
repOracle = contractsFixture.contracts["RepOracle"]
repExchange = contractsFixture.applySignature("UniswapV2Pair", repOracle.getExchange(reputationTokenAddress))
account = contractsFixture.accounts[0]
# Initially the price will just be the initialization value
initialPrice = repOracle.genesisInitialRepPriceinAttoCash()
assert roughlyEqual(repOracle.poke(reputationTokenAddress), initialPrice)
token0IsCash = cash.address < reputationTokenAddress
# Add liquidity to suggest the price is 1 REP = 20 Cash
cashAmount = 20 * 10**18
repAmount = 1 * 10**18
addLiquidity(repExchange, cash, reputationToken, cashAmount, repAmount, account)
# The reserves have been modified, however little time has passed so the price will not have diverged much
repOracle.poke(reputationTokenAddress)
assert roughlyEqual(repOracle.poke(reputationTokenAddress), initialPrice)
# If we "mine" a block and advance the time 1/2 the period value of the oracle we should see the new value significantly closer to the price dictated by reserves. Specifically about half of the difference
period = repOracle.PERIOD()
mineBlock(contractsFixture, period / 2)
expectedNewRepPrice = initialPrice + ((cashAmount - initialPrice) / 2)
assert roughlyEqual(repOracle.poke(reputationTokenAddress), expectedNewRepPrice)
# Just calling poke again will be a no op
assert roughlyEqual(repOracle.poke(reputationTokenAddress), expectedNewRepPrice)
# If we "mine" a block after period time then the new value will simply be the price
mineBlock(contractsFixture, period)
assert roughlyEqual(repOracle.poke(reputationTokenAddress), cashAmount)
# Buy REP and manipulate blockNumber to affect cummulative amounts
cashAmount = 10**18 # Trade 1 Dai for ~.05 REP
repAmount = 4.7 * 10**16
buyRep(repExchange, cash, cashAmount, repAmount, token0IsCash, account)
mineBlock(contractsFixture, period)
expectedNewRepPrice = 22 * 10**18 # Cash reserves of ~ 21 Dai and REP reserves of ~.95 REP means a price of 22 Dai / REP
assert roughlyEqual(repOracle.poke(reputationTokenAddress), expectedNewRepPrice, 2 * 10**17)
# Now Sell REP
repAmount = 1 * 10**17 # Trade .1 REP for 1.8 DAI
cashAmount = 1.8 * 10**18
sellRep(repExchange, reputationToken, repAmount, cashAmount, token0IsCash, account)
mineBlock(contractsFixture, period)
expectedNewRepPrice = 18.2 * 10**18 # Cash reserves of ~ 19.2 Dai and REP reserves of ~1.05 REP means a price of ~18.2 Dai / REP
assert roughlyEqual(repOracle.poke(reputationTokenAddress), expectedNewRepPrice, 2 * 10**17)
def addLiquidity(exchange, cash, reputationToken, cashAmount, repAmount, address):
cash.faucet(cashAmount)
reputationToken.faucet(repAmount)
cash.transfer(exchange.address, cashAmount)
reputationToken.transfer(exchange.address, repAmount)
exchange.mint(address)
def buyRep(exchange, cash, cashAmount, repAmount, token0IsCash, address):
cash.faucet(cashAmount)
cash.transfer(exchange.address, cashAmount)
exchange.swap(0 if token0IsCash else repAmount, repAmount if token0IsCash else 0, address, "")
def sellRep(exchange, reputationToken, repAmount, cashAmount, token0IsCash, address):
reputationToken.faucet(repAmount)
reputationToken.transfer(exchange.address, repAmount)
exchange.swap(cashAmount if token0IsCash else 0, 0 if token0IsCash else cashAmount, address, "")
def mineBlock(contractsFixture, timePassed):
timestamp = contractsFixture.eth_tester.backend.chain.header.timestamp
contractsFixture.eth_tester.time_travel(int(timestamp + timePassed))
def roughlyEqual(amount1, amount2, tolerance=5 * 10**16):
return abs(amount1 - amount2) < tolerance
def is_bytes(value):
return isinstance(value, (bytes, bytearray))
def combine(f, g):
return lambda x: f(g(x))
def compose(*functions):
return functools.reduce(combine, functions, lambda x: x)
# ensure we have the *correct* sha3 installed (keccak)
assert codecs.encode(sha3(b''), 'hex') == b'c5d2460186f7233c927e7db2dcc703c0e500b653ca82273b7bfad8045d85a470' # noqa
def _sub_hash(value, label):
return sha3(value + sha3(label))
def namehash(name):
node = b'\x00' * 32
if name:
if is_bytes(name):
encoded_name = name
else:
encoded_name = codecs.encode(name, 'utf8')
labels = encoded_name.split(b'.')
return compose(*(
functools.partial(_sub_hash, label=label)
for label
in labels
))(node)
return node
```
|
{
"source": "jeremyschlatter/multi-agent",
"score": 3
}
|
#### File: jeremyschlatter/multi-agent/model.py
```python
import torch
import torch.nn as nn
import torch.nn.functional as F
from torch.autograd import Variable
class Policy(nn.Module):
def __init__(self, observation_size):
super(Policy, self).__init__()
self.affine1 = nn.Linear(observation_size, 128)
self.affine2 = nn.Linear(128, 8)
self.saved_actions = []
self.rewards = []
def forward(self, x):
x = F.relu(self.affine1(x))
action_scores = self.affine2(x)
return F.softmax(action_scores)
def save_weights(self):
torch.save(self.state_dict(), 'model.pkl')
def load_weights(self):
self.load_state_dict(torch.load('model.pkl'))
def select_action(self, state):
state = torch.from_numpy(state).float().unsqueeze(0)
probs = self(Variable(state))
action = probs.multinomial()
self.saved_actions.append(action)
return action.data[0, 0]
```
|
{
"source": "jeremyschlatter/vaccine-feed-ingest",
"score": 2
}
|
#### File: tests/utils/test_match.py
```python
from vaccine_feed_ingest.utils import match
def test_is_concordance_similar(full_location, minimal_location, vial_location):
assert match.is_concordance_similar(full_location, vial_location)
assert not match.is_concordance_similar(minimal_location, vial_location)
def test_is_address_similar(full_location, minimal_location, vial_location):
assert match.is_address_similar(full_location, vial_location)
assert not match.is_address_similar(minimal_location, vial_location)
def test_is_provider_similar(full_location, minimal_location, vial_location):
assert match.is_provider_similar(full_location, vial_location)
assert not match.is_provider_similar(minimal_location, vial_location)
def test_has_matching_phone_number(full_location, minimal_location, vial_location):
assert match.has_matching_phone_number(full_location, vial_location)
assert not match.has_matching_phone_number(minimal_location, vial_location)
```
#### File: tests/utils/test_misc.py
```python
from typing import Iterator
from vaccine_feed_ingest.utils import misc
def test_batch():
orig_items = [1, 2, 3, 4, 5]
batched_iter = misc.batch(orig_items, 3)
assert batched_iter is not None
assert isinstance(batched_iter, Iterator)
batched_list = list(batched_iter)
assert len(batched_list) == 2
final_items = []
for group_iter in batched_list:
assert group_iter is not None
assert isinstance(group_iter, Iterator)
group_list = list(group_iter)
print(group_list)
assert len(group_list) <= 3
final_items.extend(group_list)
assert orig_items == final_items
```
#### File: az/pinal_ph_vaccinelocations_gov/normalize.py
```python
import datetime
import json
import pathlib
import re
import sys
from typing import List, Optional
from vaccine_feed_ingest_schema import location as schema
from vaccine_feed_ingest.utils.log import getLogger
logger = getLogger(__file__)
RUNNER_ID = "az_pinal_ph_vaccinelocations_gov"
def _get_id(site: dict) -> str:
id = f"{_get_name(site)}_{_get_city(site)}".lower()
id = id.replace(" ", "_").replace(".", "_").replace("\u2019", "_")
id = id.replace("(", "_").replace(")", "_").replace("/", "_")
return id
def _get_name(site: dict) -> str:
return site["providerName"]
def _get_city(site: dict) -> str:
return site["city"].lstrip().rstrip()
# address is loosely structured and inconsistent, so we're going to bash our
# way through it, mostly parsing from the end of the string
def _get_address(site: dict) -> Optional[schema.Address]:
if "address" not in site or not site["address"]:
return None
address = site["address"]
address = re.sub("\\s+", " ", address)
address = re.sub("\\s*,+", ",", address)
address = address.strip()
# pull a zip code off the end
zip = None
if match := re.search(" (\\d\\d\\d\\d\\d-\\d\\d\\d\\d)$", address):
zip = match.group(1)
address = address.rstrip(f" {zip}")
if match := re.search(" (\\d\\d\\d\\d\\d)$", address):
zip = match.group(1)
address = address.rstrip(f" {zip}")
state = "AZ"
address = address.rstrip()
address = address.rstrip(",")
address = address.rstrip(".")
address = address.rstrip(f" {state}")
address = address.rstrip()
address = address.rstrip(",")
address = address.rstrip(f" {_get_city(site)}")
address = address.rstrip()
address = address.rstrip(",")
address_split = address.split(",")
street1 = address_split[0]
street2 = ", ".join(address_split[1:]) if len(address_split) > 1 else None
return schema.Address(
street1=street1,
street2=street2,
city=_get_city(site),
state=state,
zip=zip,
)
def _get_contacts(site: dict) -> schema.Contact:
ret = []
if "phoneNumber" in site and site["phoneNumber"]:
raw_phone = str(site["phoneNumber"]).lstrip("1").lstrip("-")
if raw_phone[3] == "-" or raw_phone[7] == "-":
phone = "(" + raw_phone[0:3] + ") " + raw_phone[4:7] + "-" + raw_phone[8:12]
elif len(raw_phone) == 10:
phone = "(" + raw_phone[0:3] + ") " + raw_phone[3:6] + "-" + raw_phone[6:10]
else:
phone = raw_phone[0:14]
ret.append(schema.Contact(phone=phone))
if "website" in site and site["website"]:
ret.append(schema.Contact(website=site["website"]))
return ret
def _get_inventories(site: dict) -> List[schema.Vaccine]:
ret = []
if "vaccineType" in site and site["vaccineType"]:
if "Moderna" in site["vaccineType"]:
ret.append(schema.Vaccine(vaccine=schema.VaccineType.MODERNA))
if "Pfizer" in site["vaccineType"]:
ret.append(schema.Vaccine(vaccine=schema.VaccineType.PFIZER_BIONTECH))
if "Janssen" in site["vaccineType"]:
ret.append(
schema.Vaccine(vaccine=schema.VaccineType.JOHNSON_JOHNSON_JANSSEN)
)
return ret
def _get_organization(site: dict) -> Optional[schema.Organization]:
if "Kroger" in site["providerName"]:
return schema.Organization(id=schema.VaccineProvider.KROGER)
if "Safeway" in site["providerName"]:
return schema.Organization(id=schema.VaccineProvider.SAFEWAY)
if "Walgreen" in site["providerName"]:
return schema.Organization(id=schema.VaccineProvider.WALGREENS)
if "Walmart" in site["providerName"]:
return schema.Organization(id=schema.VaccineProvider.WALMART)
if "CVS" in site["providerName"]:
return schema.Organization(id=schema.VaccineProvider.CVS)
return None
def _get_source(site: dict, timestamp: str) -> schema.Source:
return schema.Source(
data=site,
fetched_at=timestamp,
fetched_from_uri="https://www.pinalcountyaz.gov/publichealth/CoronaVirus/Pages/vaccinelocations.aspx",
id=_get_id(site),
source=RUNNER_ID,
)
def normalize(site: dict, timestamp: str) -> str:
normalized = schema.NormalizedLocation(
id=f"{RUNNER_ID}:{_get_id(site)}",
name=_get_name(site),
address=_get_address(site),
contact=_get_contacts(site),
inventory=_get_inventories(site),
parent_organization=_get_organization(site),
source=_get_source(site, timestamp),
).dict()
return normalized
parsed_at_timestamp = datetime.datetime.utcnow().isoformat()
input_dir = pathlib.Path(sys.argv[2])
input_file = input_dir / "data.parsed.ndjson"
output_dir = pathlib.Path(sys.argv[1])
output_file = output_dir / "data.normalized.ndjson"
with input_file.open() as parsed_lines:
with output_file.open("w") as fout:
for line in parsed_lines:
site_blob = json.loads(line)
normalized_site = normalize(site_blob, parsed_at_timestamp)
json.dump(normalized_site, fout)
fout.write("\n")
```
#### File: la/tableau/parse.py
```python
import json
import pathlib
import re
import sys
from tableauscraper import utils
from vaccine_feed_ingest.utils.parse import location_id_from_name
def tableau_item_to_parsed_site(tableau_entry):
"""Put the tableau entry in something closer to the normalized format."""
main_data, extra_data = tableau_entry
name, street, city_state = main_data["Site-value"].split("\n")
city_state = city_state.strip()
if city_state.endswith(" LA"):
city = city_state[:-3]
else:
city = city_state
state = "LA"
address = {"street1": street, "city": city, "state": state}
if name.startswith("** "):
name = name[3:]
minimum_age = 16
else:
minimum_age = 18
id = location_id_from_name(name)
contact = {}
if main_data["Dimension-value"] == "Website":
contact["website"] = main_data["Value-alias"]
elif extra_data["Dimension-value"] == "Website":
contact["website"] = extra_data["Value-alias"]
if main_data["Phone-value"] != "%null%":
contact["phone"] = main_data["Phone-value"]
out = {}
out["id"] = id
if contact:
out["contact"] = contact
out["name"] = name
out["address"] = address
out["minimum_age"] = minimum_age
return out
def parse_tableau(file_contents):
"""
This is a weird blob containing *two* JSON encoded dictionaries.
Each is preceded by its length in bytes, but using a regex (instead of counting) is simple enough.
Follows the approach in tableau-scraping. See the links below:
https://github.com/bertrandmartel/tableau-scraping/blob/9dba25af057ac29f921a75df374943060ab79b0a/tableauscraper/TableauScraper.py#L77-L84
https://github.com/bertrandmartel/tableau-scraping/blob/9dba25af057ac29f921a75df374943060ab79b0a/tableauscraper/dashboard.py#L35
"""
info_and_data = re.search(r"\d+;({.*})\d+;({.*})", file_contents, re.MULTILINE)
data = json.loads(info_and_data.group(2))
presModelMap = data["secondaryInfo"]["presModelMap"]
dataSegments = presModelMap["dataDictionary"]["presModelHolder"][
"genDataDictionaryPresModel"
]["dataSegments"]
full_data = utils.getDataFull(presModelMap, dataSegments)
indices_info = utils.getIndicesInfo(presModelMap, "Vaccination Sites")
data_dict = utils.getData(full_data, indices_info)
num_entries = len(data_dict["Site-value"])
# Transpose columns to rows (tableau-scraping uses pandas, but we don't strictly need to do that)
# Rows are actually duplicated; some have map, some have website.
entries = []
for i in range(0, num_entries, 2):
main_data = {k: v[i] for (k, v) in data_dict.items()}
extra_data = {
"Dimension-value": data_dict["Dimension-value"][i + 1],
"Value-alias": data_dict["Value-alias"][i + 1],
}
entries.append((main_data, extra_data))
return [tableau_item_to_parsed_site(entry) for entry in entries]
output_dir = pathlib.Path(sys.argv[1])
input_dir = pathlib.Path(sys.argv[2])
json_filepaths = input_dir.glob("*.json")
for in_filepath in json_filepaths:
with in_filepath.open() as fin:
sites = parse_tableau(fin.read())
filename = in_filepath.name.split(".", maxsplit=1)[0]
out_filepath = output_dir / f"{filename}.parsed.ndjson"
with out_filepath.open("w") as fout:
for site in sites:
json.dump(site, fout)
fout.write("\n")
```
#### File: ny/northwell_health/fetch.py
```python
import json
import os
import sys
import requests
base_url = "https://api.northwell.edu/"
url = "https://api.northwell.edu/v2/vax-locations/all"
def get_paginated_urls():
response = requests.get(url)
data = response.json()
return [page_url["url"] for page_url in data["response"]["pagination"]["display"]]
def get_locations(page_url):
response = requests.get(base_url + page_url)
data = response.json()
return data["response"]["locations"]
def main():
output_dir = sys.argv[1]
if output_dir is None:
raise Exception("Must pass an output_dir as first argument")
page_urls = get_paginated_urls()
for index, page_url in enumerate(page_urls):
locations = get_locations(page_url)
output_file_path = os.path.join(output_dir, f"output{index}.json")
with open(output_file_path, "w", encoding="utf-8") as f:
json.dump(locations, f, ensure_ascii=False, indent=4)
if __name__ == "__main__":
sys.exit(main())
```
#### File: us/giscorps_vaccine_providers/normalize.py
```python
import datetime
import json
import os
import pathlib
import re
import sys
from typing import List, Optional
from vaccine_feed_ingest_schema import location as schema
from vaccine_feed_ingest.utils.log import getLogger
logger = getLogger(__file__)
SOURCE_NAME = "us_giscorps_vaccine_providers"
def _get_availability(site: dict) -> schema.Availability:
appt_only = site["attributes"]["appt_only"]
appt_options = {
"Yes": True,
"No": False,
"Vax only": True,
"Test only": False,
}
avail = try_lookup(appt_options, appt_only, None, name="availability lookup")
if avail is not None:
return schema.Availability(appointments=avail)
# there seems to be no walk-in data unless you want to parse "drive_in" = yes and "vehiche_required" = no into a "walk-in = yes"
return None
def _get_id(site: dict) -> str:
data_id = site["attributes"]["GlobalID"]
# Could parse these from the input file name, but do not for now to avoid
# accidental mutation.
arcgis = "c50a1a352e944a66aed98e61952051ef"
layer = 0
return f"{arcgis}_{layer}_{data_id}"
def _get_contacts(site: dict) -> Optional[List[schema.Contact]]:
contacts = []
if site["attributes"]["phone"]:
sourcePhone = re.sub("[^0-9]", "", site["attributes"]["phone"])
if len(sourcePhone) == 11:
sourcePhone = sourcePhone[1:]
# TODO: handle 3-digit phone numbers like 211, 411 .etc
if len(sourcePhone) == 10:
phone = f"({sourcePhone[0:3]}) {sourcePhone[3:6]}-{sourcePhone[6:]}"
contacts.append(schema.Contact(phone=phone))
# if site["attributes"]["publicEmail"]:
# contacts.append(schema.Contact(email=site["attributes"]["publicEmail"]))
# there are multiple urls, vaccine, agency, health dept. etc
if site["attributes"]["vaccine_url"]:
url = site["attributes"]["vaccine_url"]
url = sanitize_url(url)
if url:
contacts.append(schema.Contact(website=url))
if len(contacts) > 0:
return contacts
return None
def sanitize_url(url):
url = url.strip()
url = url.replace("#", "")
url = url.replace("\\", "/") # thanks windows
url = url if url.startswith("http") else "https://" + url
if len(url.split(" ")) == 1:
return url
return None
def _get_notes(site: dict) -> Optional[List[str]]:
if site["attributes"]["Instructions"]:
return [site["attributes"]["Instructions"]]
return None
def _get_active(site: dict) -> Optional[bool]:
# end date may be important to check to determine if the site is historicle or current but i dont really feel like digging through the docs rn. see https://github.com/CAVaccineInventory/vaccine-feed-ingest/pull/119 for links that eventually lead to specs on the
# end_date = site["attributes"].get("end_date")
status = site["attributes"].get("status")
status_options = {
"Open": True,
"Closed": False,
"Testing Restricted": True,
"Scheduled to Open": False,
"Temporarily Closed": False,
}
return try_lookup(status_options, status, None, name="active status lookup")
def _get_access(site: dict) -> Optional[List[str]]:
drive = site["attributes"].get("drive_through")
drive_bool = drive is not None
# walk = site["attributes"].get("drive_through")
# walk_bool = drive is not None
wheelchair = site["attributes"].get("Wheelchair_Accessible")
wheelchair_options = {
"Yes": "yes",
"Partially": "partial",
"Unknown": "no",
"Not Applicable": "no",
"NA": "no",
}
wheelchair_bool = try_lookup(
wheelchair_options, wheelchair, "no", name="wheelchair access"
)
return schema.Access(drive=drive_bool, wheelchair=wheelchair_bool)
def try_lookup(mapping, value, default, name=None):
if value is None:
return default
try:
return mapping[value]
except KeyError as e:
name = " for " + name or ""
logger.warn("value not present in lookup table%s: %s", name, e)
return default
def _get_published_at(site: dict) -> Optional[str]:
date_with_millis = site["attributes"]["CreationDate"]
if date_with_millis:
date = datetime.datetime.fromtimestamp(date_with_millis / 1000) # Drop millis
return date.isoformat()
return None
def try_get_list(lis, index, default=None):
if lis is None:
return default
try:
value = lis[index]
if value == "none":
logger.warn("saw none value")
return value
except IndexError:
return default
def try_get_lat_long(site):
location = None
try:
location = schema.LatLng(
latitude=site["geometry"]["y"],
longitude=site["geometry"]["x"],
)
except KeyError:
pass
return location
def _get_normalized_location(site: dict, timestamp: str) -> schema.NormalizedLocation:
addrsplit = site["attributes"]["fulladdr"].split(", ")
zip = site["attributes"]["fulladdr"][-5:]
zip = zip if zip.isnumeric() else None
city_state_zip = addrsplit[1].split(" ") if try_get_list(addrsplit, 1) else None
state = site["attributes"]["State"] or None
state = state.strip() if state is not None else None
return schema.NormalizedLocation(
id=f"{SOURCE_NAME}:{_get_id(site)}",
name=site["attributes"]["name"],
address=schema.Address(
street1=addrsplit[0],
street2=None,
city=site["attributes"]["municipality"]
or try_get_list(city_state_zip, -3, default=""),
state=state,
zip=zip,
),
location=try_get_lat_long(site),
contact=_get_contacts(site),
languages=None,
opening_dates=None,
opening_hours=None, # TODO: the format for this probably needs some mega-parsing as it looks like this -> "operhours": "Monday - Friday 8:00 am - 2:00 pm Saturdays 9:00 am - 12:00 pm",
availability=_get_availability(site),
inventory=None,
access=_get_access(site),
parent_organization=None,
links=None, # TODO
notes=_get_notes(site),
active=_get_active(site),
source=schema.Source(
source=SOURCE_NAME,
id=site["attributes"]["GlobalID"],
fetched_from_uri="https://services.arcgis.com/8ZpVMShClf8U8dae/arcgis/rest/services/Covid19_Vaccination_Locations/FeatureServer/0", # noqa: E501
fetched_at=timestamp,
published_at=_get_published_at(site),
data=site,
),
)
output_dir = pathlib.Path(sys.argv[1])
input_dir = pathlib.Path(sys.argv[2])
json_filepaths = input_dir.glob("*.ndjson")
parsed_at_timestamp = datetime.datetime.utcnow().isoformat()
for in_filepath in json_filepaths:
filename, _ = os.path.splitext(in_filepath.name)
out_filepath = output_dir / f"{filename}.normalized.ndjson"
logger.info(
"normalizing %s => %s",
in_filepath,
out_filepath,
)
with in_filepath.open() as fin:
with out_filepath.open("w") as fout:
for site_json in fin:
parsed_site = json.loads(site_json)
normalized_site = _get_normalized_location(
parsed_site, parsed_at_timestamp
)
json.dump(normalized_site.dict(), fout)
fout.write("\n")
```
#### File: wa/prepmod/parse.py
```python
import json
import pathlib
import re
import sys
from bs4 import BeautifulSoup
input_dir = pathlib.Path(sys.argv[2])
output_dir = pathlib.Path(sys.argv[1])
output_file = output_dir / "data.parsed.ndjson"
input_filenames = [p for p in pathlib.Path(input_dir).iterdir() if p.is_file()]
def find_data_item(parent, label, offset):
try:
row_matches = [
x for x in parent.find_all(["p", "div"]) if label in x.get_text()
]
content = row_matches[-1].contents[offset]
return (
content.strip() if isinstance(content, str) else content.get_text().strip()
)
except Exception:
return ""
EXTRACT_CLINIC_ID = re.compile(r".*clinic(\d*)\.png")
with output_file.open("w") as fout:
for filename in input_filenames:
text = open(filename, "r").read()
soup = BeautifulSoup(text, "html.parser")
# classes only used on titles for search results
for title in soup.select(".text-xl.font-black"):
parent = title.parent
combined_name = title.get_text().strip()
name, date = combined_name.split(" on ")
address = title.find_next_sibling("p").get_text().strip()
vaccines = find_data_item(parent, "Vaccinations offered", -2)
ages = find_data_item(parent, "Age groups served", -1)
additional_info = find_data_item(parent, "Additional Information", -1)
hours = find_data_item(parent, "Clinic Hours", -1)
available_count = find_data_item(parent, "Available Appointments", -1) or 0
special = find_data_item(parent, "Special Instructions", -1)
find_clinic_id = EXTRACT_CLINIC_ID.match(
parent.find_next_sibling("div", "map-image").find("img")["src"]
)
clinic_id = find_clinic_id.group(1)
data = {
"name": name,
"date": date,
"address": address,
"vaccines": vaccines,
"ages": ages,
"info": additional_info,
"hours": hours,
"available": available_count,
"special": special,
"clinic_id": clinic_id,
}
json.dump(data, fout)
fout.write("\n")
```
#### File: vaccine_feed_ingest/utils/normalize.py
```python
import re
from typing import Optional, Tuple
import url_normalize
from vaccine_feed_ingest_schema.location import VaccineProvider
from .log import getLogger
logger = getLogger(__file__)
# Add to this list in alphabetical order
VACCINE_PROVIDER_REGEXES = {
VaccineProvider.ACME: [
re.compile(r"ACME PHARMACY #(\d+)", re.I),
],
VaccineProvider.ALBERTSONS: [
re.compile(r"ALBERTSONS(?: MARKET)? PHARMACY #(\d+)", re.I),
],
VaccineProvider.BIG_Y: [
re.compile(r"BIG Y PHARMACY(?: #\d+ Rx)? #(\d+)", re.I),
],
VaccineProvider.BROOKSHIRE: [
re.compile(r"BROOKSHIRE PHARMACY #\d+ #(\d+)", re.I),
],
VaccineProvider.COSTCO: [
re.compile(r"COSTCO(?: MARKET)? PHARMACY #\s*(\d+)", re.I),
re.compile(r"COSTCO WHOLESALE CORPORATION #(\d+)", re.I),
],
VaccineProvider.CUB: [
re.compile(r"CUB PHARMACY #\d+ #(\d+)", re.I),
],
VaccineProvider.CVS: [
re.compile(r"CVS\s(?:STORE)?(?:PHARMACY)?(?:, INC.?)?\s?#?(\d+)", re.I),
],
VaccineProvider.DILLONS: [
re.compile(r"DILLON\'S PHARMACY #(\d+)", re.I),
],
VaccineProvider.DRUGCO: [
re.compile(r"DRUGCO DISCOUNT PHARMACY #(\d+)", re.I),
],
VaccineProvider.FAMILY_FARE: [
re.compile(r"FAMILY\s+FARE\s+PHARMACY\s+#?\d+\s+#(\d+)", re.I),
],
VaccineProvider.FOOD_CITY: [
re.compile(r"FOOD CITY PHARMACY #\d+ #(\d+)", re.I),
],
VaccineProvider.FOOD_LION: [
re.compile(r"FOOD LION #(\d+)", re.I),
],
VaccineProvider.FRED_MEYER: [
re.compile(r"FRED MEYER(?: PHARMACY)? #(\d+)", re.I),
],
VaccineProvider.FRYS: [
re.compile(r"FRY\'S FOOD AND DRUG #(\d+)", re.I),
],
VaccineProvider.GENOA: [
re.compile(r"GENOA HEALTHCARE (\d+) \(", re.I),
re.compile(r"GENOA HEALTHCARE LLC #(\d+)", re.I),
],
VaccineProvider.GIANT: [
re.compile(r"GIANT #(\d+)", re.I),
],
VaccineProvider.GIANT_EAGLE: [
re.compile(r"GIANT EAGLE PHARMACY #\d+ #G(\d+)", re.I),
],
VaccineProvider.GIANT_FOOD: [
re.compile(r"GIANT FOOD #(\d+)", re.I),
],
VaccineProvider.HAGGEN: [
re.compile(r"HAGGEN PHARMACY #(\d+)", re.I),
],
VaccineProvider.HANNAFORD: [
re.compile(r"HANNAFORD #(\d+)", re.I),
],
VaccineProvider.HARMONS: [
re.compile(r"HARMONS PHARMACY #(\d+)", re.I),
],
VaccineProvider.HARPS: [
re.compile(r"HARPS PHARMACY #(\d+)", re.I),
],
VaccineProvider.HARRIS_TEETER: [
re.compile(r"HARRIS TEETER PHARMACY #(\d+)", re.I),
],
VaccineProvider.HARTIG: [
re.compile(r"HARTIG DRUG CO #?\d+ #(\d+)", re.I),
],
VaccineProvider.HEB: [
re.compile(r"H-E-B #(\d+)", re.I),
],
VaccineProvider.HOMELAND: [
re.compile(r"HOMELAND PHARMACY #(\d+)", re.I),
],
VaccineProvider.HY_VEE: [
re.compile(r"HY-VEE INC. #(\d+)", re.I),
],
VaccineProvider.INGLES: [
re.compile(r"INGLES PHARMACY #\d+ #(\d+)", re.I),
],
VaccineProvider.KAISER_HEALTH_PLAN: [
re.compile(r"KAISER HEALTH PLAN \w+(?: \w+)? PHY (\d+)", re.I),
],
VaccineProvider.KAISER_PERMANENTE: [
re.compile(r"KAISER PERMANENTE PHARMACY #(\d+)", re.I),
],
VaccineProvider.KING_SOOPERS: [
re.compile(r"KING SOOPERS PHARMACY #?(\d+)", re.I),
],
VaccineProvider.KROGER: [
re.compile(r"KROGER PHARMACY #?(\d+)", re.I),
],
VaccineProvider.LITTLE_CLINIC: [
re.compile(r"THE LITTLE CLINIC #(\d+)", re.I),
],
VaccineProvider.MARIANOS: [
re.compile(r"MARIANO\'S PHARMACY #(\d+)", re.I),
],
VaccineProvider.OSCO: [
re.compile(r"OSCO (?:DRUG|PHARMACY) #(\d+)", re.I),
],
VaccineProvider.MARKET_STREET: [
re.compile(r"MARKET STREET PHARMACY #(\d+)", re.I),
],
VaccineProvider.MEDICAP: [
re.compile(r"MEDICAP PHARMACY #\d+ #(\d+)", re.I),
],
VaccineProvider.MEIJER: [
re.compile(r"MEIJER #(\d+)", re.I),
],
VaccineProvider.PAVILIONS: [
re.compile(r"PAVILIONS PHARMACY #(\d+)", re.I),
],
VaccineProvider.PICK_N_SAVE: [
re.compile(r"PICK N SAVE PHARMACY #(\d+)", re.I),
],
VaccineProvider.PRICE_CHOPPER: [
re.compile(r"PRICE CHOPPER PHARMACY #?\d+ #(?:MS)?(\d+)", re.I),
],
VaccineProvider.PUBLIX: [
re.compile(r"PUBLIX SUPER MARKETS INC\. #(\d+)", re.I),
],
VaccineProvider.QFC: [
re.compile(r"QFC PHARMACY #(\d+)", re.I),
],
VaccineProvider.RALEYS: [
re.compile(r"RALEY\'S PHARMACY #(\d+)", re.I),
],
VaccineProvider.RITE_AID: [
re.compile(r"RITE AID (?:PHARMACY |#RA)(\d+)", re.I),
],
VaccineProvider.SAMS: [
re.compile(r"SAM'?S PHARMACY (?:10-|#\s*)(\d+)", re.I),
re.compile(r"SAMS CLUB (?:#\d+\-)?(\d+)", re.I),
],
VaccineProvider.SAFEWAY: [
re.compile(r"Safeway (?:PHARMACY )?\s?#?(\d+)", re.I),
],
VaccineProvider.SAV_ON: [
re.compile(r"SAV-?ON PHARMACY #\s?(\d+)", re.I),
],
VaccineProvider.SHOP_RITE: [
re.compile(r"SHOPRITE PHARMACY #(\d+)", re.I),
],
VaccineProvider.SMITHS: [
re.compile(r"SMITH\'S PHARMACY #(\d+)", re.I),
],
VaccineProvider.STOP_AND_SHOP: [
re.compile(r"STOP \& SHOP #(\d+)", re.I),
],
VaccineProvider.TOM_THUMB: [
re.compile(r"TOM THUMB PHARMACY #(\d+)", re.I),
],
VaccineProvider.THRIFTY: [
re.compile(r"THRIFTY DRUG STORES INC #(\d+)", re.I),
],
VaccineProvider.VONS: [
re.compile(r"VONS PHARMACY #(\d+)", re.I),
],
VaccineProvider.WALGREENS: [
re.compile(r"Walgreens (?:Specialty )?(?:Pharmacy )?#(\d+)", re.I),
re.compile(r"Walgreens Co\. #(\d+)", re.I),
],
VaccineProvider.WALMART: [
re.compile(r"WALMART INC #10-(\d+)", re.I),
re.compile(r"WALMART PHARMACY 10-(\d+)", re.I),
re.compile(r"WALMART (?:INC,|PHARMACY) #(\d+)", re.I),
],
VaccineProvider.WEIS: [
re.compile(r"WEIS PHARMACY #\d+ #(\d+)", re.I),
],
VaccineProvider.WINN_DIXIE: [
re.compile(r"WINN-DIXIE #(\d+)", re.I),
],
}
def provider_id_from_name(
name: str,
) -> Optional[Tuple[VaccineProvider, str]]:
"""Generate provider ids for retail pharmacies (riteaid:123)"""
for vaccine_provider, regexes in VACCINE_PROVIDER_REGEXES.items():
for regex in regexes:
m = regex.search(name)
if m:
return vaccine_provider, str(int(m.group(1)))
return None
ZIP_RE = re.compile(r"([0-9]{5})([0-9]{4})")
def normalize_zip(zipc: Optional[str]) -> Optional[str]:
if zipc is not None:
if ZIP_RE.match(zipc):
zipc = ZIP_RE.sub(r"\1-\2", zipc)
length = len(zipc)
if length != 5 and length != 10:
zipc = None
return zipc
def normalize_url(url: Optional[str]) -> Optional[str]:
if url is None:
return url
return url_normalize.url_normalize(url)
```
|
{
"source": "jeremyschlatter/xonsh",
"score": 2
}
|
#### File: jeremyschlatter/xonsh/amalgamate.py
```python
import os
import sys
import pprint
from itertools import repeat
from collections import namedtuple
from collections.abc import Mapping
from ast import parse, walk, Import, ImportFrom
__version__ = "0.1.2"
ModNode = namedtuple("ModNode", ["name", "pkgdeps", "extdeps", "futures"])
ModNode.__doc__ = """Module node for dependency graph.
Attributes
----------
name : str
Module name.
pkgdeps : frozenset of str
Module dependencies in the same package.
extdeps : frozenset of str
External module dependencies from outside of the package.
futures : frozenset of str
Import directive names antecedent to 'from __future__ import'
"""
class SourceCache(Mapping):
"""Stores / loads source code for files based on package and module names."""
def __init__(self, *args, **kwargs):
self._d = dict(*args, **kwargs)
def __getitem__(self, key):
d = self._d
if key in d:
return d[key]
pkg, name = key
pkgdir = pkg.replace(".", os.sep)
fname = pkgdir + os.sep + name + ".py"
with open(fname, encoding="utf-8", errors="surrogateescape") as f:
raw = f.read()
d[key] = raw
return raw
def __iter__(self):
yield from self._d
def __len__(self):
return len(self._d)
SOURCES = SourceCache()
class GlobalNames(object):
"""Stores globally defined names that have been seen on ast nodes."""
impnodes = frozenset(["import", "importfrom"])
def __init__(self, pkg="<pkg>"):
self.cache = {}
self.pkg = pkg
self.module = "<mod>"
self.topnode = None
def warn_duplicates(self):
s = ""
for key in sorted(self.cache.keys()):
val = self.cache[key]
if len(val) < 2:
continue
val = sorted(val)
if all([val[0][0] == x[0] for x in val[1:]]):
continue
s += "WARNING: {0!r} defined in multiple locations:\n".format(key)
for loc in val:
s += " {}:{} ({})\n".format(*loc)
if len(s) > 0:
print(s, end="", flush=True, file=sys.stderr)
def entry(self, name, lineno):
if name.startswith("__"):
return
topnode = self.topnode
e = (self.pkg + "." + self.module, lineno, topnode)
if name in self.cache:
if topnode in self.impnodes and all(
[topnode == x[2] for x in self.cache[name]]
):
return
self.cache[name].add(e)
else:
self.cache[name] = set([e])
def add(self, node, istopnode=False):
"""Adds the names from the node to the cache."""
nodename = node.__class__.__name__.lower()
if istopnode:
self.topnode = nodename
meth = getattr(self, "_add_" + nodename, None)
if meth is not None:
meth(node)
def _add_name(self, node):
self.entry(node.id, node.lineno)
def _add_tuple(self, node):
for x in node.elts:
self.add(x)
def _add_assign(self, node):
for target in node.targets:
self.add(target)
def _add_functiondef(self, node):
self.entry(node.name, node.lineno)
def _add_classdef(self, node):
self.entry(node.name, node.lineno)
def _add_import(self, node):
lineno = node.lineno
for target in node.names:
if target.asname is None:
name, _, _ = target.name.partition(".")
else:
name = target.asname
self.entry(name, lineno)
def _add_importfrom(self, node):
pkg, _ = resolve_package_module(node.module, self.pkg, node.level)
if pkg == self.pkg:
return
lineno = node.lineno
for target in node.names:
if target.asname is None:
name = target.name
else:
name = target.asname
self.entry(name, lineno)
def _add_with(self, node):
for item in node.items:
if item.optional_vars is None:
continue
self.add(item.optional_vars)
for child in node.body:
self.add(child, istopnode=True)
def _add_for(self, node):
self.add(node.target)
for child in node.body:
self.add(child, istopnode=True)
def _add_while(self, node):
for child in node.body:
self.add(child, istopnode=True)
def _add_if(self, node):
for child in node.body:
self.add(child, istopnode=True)
for child in node.orelse:
self.add(child, istopnode=True)
def _add_try(self, node):
for child in node.body:
self.add(child, istopnode=True)
def module_is_package(module, pkg, level):
"""Returns whether or not the module name refers to the package."""
if level == 0:
return module == pkg
elif level == 1:
return module is None
else:
return False
def module_from_package(module, pkg, level):
"""Returns whether or not a module is from the package."""
if level == 0:
return module.startswith(pkg + ".")
elif level == 1:
return True
else:
return False
def resolve_package_module(module, pkg, level, default=None):
"""Returns a 2-tuple of package and module name, even for relative
imports
"""
if level == 0:
p, _, m = module.rpartition(".")
elif level == 1:
p = pkg
m = module or default
else:
p = m = None
return p, m
def make_node(name, pkg, allowed, glbnames):
"""Makes a node by parsing a file and traversing its AST."""
raw = SOURCES[pkg, name]
tree = parse(raw, filename=name)
# we only want to deal with global import statements
pkgdeps = set()
extdeps = set()
futures = set()
glbnames.module = name
for a in tree.body:
glbnames.add(a, istopnode=True)
if isinstance(a, Import):
for n in a.names:
p, dot, m = n.name.rpartition(".")
if p == pkg and m in allowed:
pkgdeps.add(m)
else:
extdeps.add(n.name)
elif isinstance(a, ImportFrom):
if module_is_package(a.module, pkg, a.level):
pkgdeps.update(n.name for n in a.names if n.name in allowed)
elif module_from_package(a.module, pkg, a.level):
p, m = resolve_package_module(
a.module, pkg, a.level, default=a.names[0].name
)
if p == pkg and m in allowed:
pkgdeps.add(m)
else:
extdeps.add(a.module)
elif a.module == "__future__":
futures.update(n.name for n in a.names)
return ModNode(name, frozenset(pkgdeps), frozenset(extdeps), frozenset(futures))
def make_graph(pkg, exclude=None):
"""Create a graph (dict) of module dependencies."""
graph = {}
pkgdir = pkg.replace(".", os.sep)
allowed = set()
files = os.listdir(pkgdir)
for fname in files:
base, ext = os.path.splitext(fname)
if base.startswith("__") or ext != ".py":
continue
allowed.add(base)
if exclude:
allowed -= exclude
glbnames = GlobalNames(pkg=pkg)
for base in allowed:
graph[base] = make_node(base, pkg, allowed, glbnames)
glbnames.warn_duplicates()
return graph
def depsort(graph):
"""Sort modules by dependency."""
remaining = set(graph.keys())
seder = []
solved = set()
while 0 < len(remaining):
nodeps = {m for m in remaining if len(graph[m].pkgdeps - solved) == 0}
if len(nodeps) == 0:
msg = (
"\nsolved order = {0}\nremaining = {1}\nCycle detected in "
"module graph!"
).format(pprint.pformat(seder), pprint.pformat(remaining))
raise RuntimeError(msg)
solved |= nodeps
remaining -= nodeps
seder += sorted(nodeps)
return seder
LAZY_IMPORTS = """
from sys import modules as _modules
from types import ModuleType as _ModuleType
from importlib import import_module as _import_module
class _LazyModule(_ModuleType):
def __init__(self, pkg, mod, asname=None):
'''Lazy module 'pkg.mod' in package 'pkg'.'''
self.__dct__ = {
'loaded': False,
'pkg': pkg, # pkg
'mod': mod, # pkg.mod
'asname': asname, # alias
}
@classmethod
def load(cls, pkg, mod, asname=None):
if mod in _modules:
key = pkg if asname is None else mod
return _modules[key]
else:
return cls(pkg, mod, asname)
def __getattribute__(self, name):
if name == '__dct__':
return super(_LazyModule, self).__getattribute__(name)
dct = self.__dct__
mod = dct['mod']
if dct['loaded']:
m = _modules[mod]
else:
m = _import_module(mod)
glbs = globals()
pkg = dct['pkg']
asname = dct['asname']
if asname is None:
glbs[pkg] = m = _modules[pkg]
else:
glbs[asname] = m
dct['loaded'] = True
return getattr(m, name)
"""
def get_lineno(node, default=0):
"""Gets the lineno of a node or returns the default."""
return getattr(node, "lineno", default)
def min_line(node):
"""Computes the minimum lineno."""
node_line = get_lineno(node)
return min(map(get_lineno, walk(node), repeat(node_line)))
def format_import(names):
"""Format an import line"""
parts = []
for _, name, asname in names:
if asname is None:
parts.append(name)
else:
parts.append(name + " as " + asname)
line = "import " + ", ".join(parts) + "\n"
return line
def format_lazy_import(names):
"""Formats lazy import lines"""
lines = ""
for _, name, asname in names:
pkg, _, _ = name.partition(".")
if asname is None:
line = "{pkg} = _LazyModule.load({pkg!r}, {mod!r})\n"
else:
line = "{asname} = _LazyModule.load({pkg!r}, {mod!r}, {asname!r})\n"
lines += line.format(pkg=pkg, mod=name, asname=asname)
return lines
def format_from_import(names):
"""Format a from import line"""
parts = []
for _, module, name, asname in names: # noqa
if asname is None:
parts.append(name)
else:
parts.append(name + " as " + asname)
line = "from " + module
line += " import " + ", ".join(parts) + "\n"
return line
def rewrite_imports(name, pkg, order, imps):
"""Rewrite the global imports in the file given the amalgamation."""
raw = SOURCES[pkg, name]
tree = parse(raw, filename=name)
replacements = [] # list of (startline, stopline, str) tuples
# collect replacements in forward direction
for a, b in zip(tree.body, tree.body[1:] + [None]):
if not isinstance(a, (Import, ImportFrom)):
continue
start = min_line(a) - 1
stop = len(tree.body) if b is None else min_line(b) - 1
if isinstance(a, Import):
keep = []
for n in a.names:
p, dot, m = n.name.rpartition(".")
if p == pkg and m in order:
msg = (
"Cannot amalgamate import of amalgamated module:"
"\n\n import {0}.{1}\n\nin {0}/{2}.py"
).format(pkg, n.name, name)
raise RuntimeError(msg)
imp = (Import, n.name, n.asname)
if imp not in imps:
imps.add(imp)
keep.append(imp)
if len(keep) == 0:
s = ", ".join(n.name for n in a.names)
s = "# amalgamated " + s + "\n"
else:
s = format_lazy_import(keep)
replacements.append((start, stop, s))
elif isinstance(a, ImportFrom):
p, m = resolve_package_module(a.module, pkg, a.level, default="")
if module_is_package(a.module, pkg, a.level):
for n in a.names:
if n.name in order:
msg = (
"Cannot amalgamate import of "
"amalgamated module:\n\n from {0} import {1}\n"
"\nin {0}/{2}.py"
).format(pkg, n.name, name)
raise RuntimeError(msg)
elif p == pkg and m in order:
replacements.append(
(start, stop, "# amalgamated " + p + "." + m + "\n")
)
elif a.module == "__future__":
replacements.append(
(start, stop, "# amalgamated __future__ directive\n")
)
else:
keep = []
for n in a.names:
imp = (ImportFrom, a.module, n.name, n.asname)
if imp not in imps:
imps.add(imp)
keep.append(imp)
if len(keep) == len(a.names):
continue # all new imports
elif len(keep) == 0:
s = ", ".join(n.name for n in a.names)
s = "# amalgamated from " + a.module + " import " + s + "\n"
else:
s = format_from_import(keep)
replacements.append((start, stop, s))
# apply replacements in reverse
lines = raw.splitlines(keepends=True)
for start, stop, s in replacements[::-1]:
lines[start] = s
for _ in range(stop - start - 1):
del lines[start + 1]
return "".join(lines)
def sorted_futures(graph):
"""Returns a sorted, unique list of future imports."""
f = set()
for value in graph.values():
f |= value.futures
return sorted(f)
def amalgamate(order, graph, pkg):
"""Create amalgamated source."""
src = (
'"""Amalgamation of {0} package, made up of the following '
"modules, in order:\n\n* "
).format(pkg)
src += "\n* ".join(order)
src += '\n\n"""\n'
futures = sorted_futures(graph)
if len(futures) > 0:
src += "from __future__ import " + ", ".join(futures) + "\n"
src += LAZY_IMPORTS
imps = set()
for name in order:
lines = rewrite_imports(name, pkg, order, imps)
src += "#\n# " + name + "\n#\n" + lines + "\n"
return src
def write_amalgam(src, pkg):
"""Write out __amalgam__.py file"""
pkgdir = pkg.replace(".", os.sep)
fname = os.path.join(pkgdir, "__amalgam__.py")
with open(fname, "w", encoding="utf-8", errors="surrogateescape") as f:
f.write(src)
def _init_name_lines(pkg):
pkgdir = pkg.replace(".", os.sep)
fname = os.path.join(pkgdir, "__init__.py")
with open(fname, encoding="utf-8", errors="surrogateescape") as f:
raw = f.read()
lines = raw.splitlines()
return fname, lines
def read_exclude(pkg):
"""reads in modules to exclude from __init__.py"""
_, lines = _init_name_lines(pkg)
exclude = set()
for line in lines:
if line.startswith("# amalgamate exclude"):
exclude.update(line.split()[3:])
return exclude
FAKE_LOAD = """
import os as _os
if _os.getenv("{debug}", ""):
pass
else:
import sys as _sys
try:
from {pkg} import __amalgam__
{load}
del __amalgam__
except ImportError:
pass
del _sys
del _os
""".strip()
def rewrite_init(pkg, order, debug="DEBUG"):
"""Rewrites the init file to insert modules."""
fname, lines = _init_name_lines(pkg)
start, stop = -1, -1
for i, line in enumerate(lines):
if line.startswith("# amalgamate end"):
stop = i
elif line.startswith("# amalgamate"):
start = i
t = "{1} = __amalgam__\n " '_sys.modules["{0}.{1}"] = __amalgam__'
load = "\n ".join(t.format(pkg, m) for m in order)
s = FAKE_LOAD.format(pkg=pkg, load=load, debug=debug)
if start + 1 == stop:
lines.insert(stop, s)
else:
lines[start + 1] = s
lines = lines[: start + 2] + lines[stop:]
init = "\n".join(lines) + "\n"
with open(fname, "w", encoding="utf-8", errors="surrogateescape") as f:
f.write(init)
def main(args=None):
if args is None:
args = sys.argv
debug = "DEBUG"
for pkg in args[1:]:
if pkg.startswith("--debug="):
debug = pkg[8:]
continue
print("Amalgamating " + pkg)
exclude = read_exclude(pkg)
print(" excluding {}".format(pprint.pformat(exclude or None)))
graph = make_graph(pkg, exclude=exclude)
order = depsort(graph)
src = amalgamate(order, graph, pkg)
write_amalgam(src, pkg)
rewrite_init(pkg, order, debug=debug)
print(" collapsed {} modules".format(len(order)))
if __name__ == "__main__":
main()
```
#### File: tests/completers/test_dir_completers.py
```python
import re
import pytest
import tempfile
from os import sep
from xonsh.completers.tools import RichCompletion
from xonsh.completers.dirs import complete_cd, complete_rmdir
from xonsh.parsers.completion_context import (
CompletionContext,
CommandContext,
CommandArg,
)
from tests.tools import ON_WINDOWS
COMPLETERS = {
"cd": complete_cd,
"rmdir": complete_rmdir,
}
CUR_DIR = "." if ON_WINDOWS else "./"
PARENT_DIR = ".." if ON_WINDOWS else "../"
@pytest.fixture(autouse=True)
def setup(xession, xonsh_execer):
with tempfile.TemporaryDirectory() as tmp:
xession.env["XONSH_DATA_DIR"] = tmp
xession.env["CDPATH"] = set()
@pytest.fixture(params=list(COMPLETERS))
def cmd(request):
return request.param
def test_not_cmd(cmd):
"""Ensure the cd completer doesn't complete other commands"""
assert not COMPLETERS[cmd](
CompletionContext(
CommandContext(
args=(CommandArg(f"not-{cmd}"),),
arg_index=1,
)
)
)
def complete_cmd(cmd, prefix, opening_quote="", closing_quote=""):
result = COMPLETERS[cmd](
CompletionContext(
CommandContext(
args=(CommandArg(cmd),),
arg_index=1,
prefix=prefix,
opening_quote=opening_quote,
closing_quote=closing_quote,
is_after_closing_quote=bool(closing_quote),
)
)
)
assert result and len(result) == 2
completions, lprefix = result
assert lprefix == len(opening_quote) + len(prefix) + len(
closing_quote
) # should override the quotes
return completions
def complete_cmd_dirs(*a, **kw):
return [r.value for r in complete_cmd(*a, **kw)]
def test_non_dir(cmd):
with tempfile.NamedTemporaryFile(dir=".", suffix="_dummySuffix") as tmp:
with pytest.raises(StopIteration): # tmp is a file
completions = complete_cmd(cmd, tmp.name[:-2])
@pytest.fixture(scope="module")
def dir_path():
with tempfile.TemporaryDirectory(dir=".", suffix="_dummyDir") as tmp_path:
yield tmp_path
def test_dirs_only(cmd, dir_path):
completions = complete_cmd(cmd, dir_path[:-2])
assert completions == {dir_path + sep}
def test_opening_quotes(cmd, dir_path):
assert complete_cmd(cmd, dir_path, opening_quote="r'") == {f"r'{dir_path}{sep}'"}
def test_closing_quotes(cmd, dir_path):
prefix = dir_path
exp = f"'''{dir_path}{sep}'''"
if ON_WINDOWS:
prefix = prefix.replace("\\", "\\\\")
# the path completer converts to a raw string if there's a backslash
exp = "r" + exp
completions = complete_cmd(cmd, prefix, opening_quote="'''", closing_quote="'''")
assert completions == {exp}
completion = completions.pop()
assert isinstance(completion, RichCompletion)
assert completion.append_closing_quote is False
def test_complete_dots(xession):
with xession.env.swap(COMPLETE_DOTS="never"):
dirs = complete_cmd_dirs("cd", "")
assert CUR_DIR not in dirs and PARENT_DIR not in dirs
dirs = complete_cmd_dirs("cd", ".")
assert CUR_DIR not in dirs and PARENT_DIR not in dirs
with xession.env.swap(COMPLETE_DOTS="matching"):
dirs = complete_cmd_dirs("cd", "")
assert CUR_DIR not in dirs and PARENT_DIR not in dirs
dirs = complete_cmd_dirs("cd", ".")
assert CUR_DIR in dirs and PARENT_DIR in dirs
with xession.env.swap(COMPLETE_DOTS="always"):
dirs = complete_cmd_dirs("cd", "")
assert CUR_DIR in dirs and PARENT_DIR in dirs
dirs = complete_cmd_dirs("cd", ".")
assert CUR_DIR in dirs and PARENT_DIR in dirs
```
#### File: xonsh/completers/completer.py
```python
import collections
from xonsh.parsers.completion_context import CommandContext
from xonsh.built_ins import XSH
from xonsh.completers.tools import (
contextual_command_completer_for,
justify,
is_exclusive_completer,
)
@contextual_command_completer_for("completer")
def complete_completer(command: CommandContext):
"""
Completion for "completer"
"""
if command.suffix:
# completing in a middle of a word
# (e.g. "completer some<TAB>thing")
return None
curix = command.arg_index
compnames = set(XSH.completers.keys())
if curix == 1:
possible = {"list", "help", "add", "remove"}
elif curix == 2:
first_arg = command.args[1].value
if first_arg == "help":
possible = {"list", "add", "remove"}
elif first_arg == "remove":
possible = compnames
else:
raise StopIteration
else:
if command.args[1].value != "add":
raise StopIteration
if curix == 3:
possible = {i for i, j in XSH.ctx.items() if callable(j)}
elif curix == 4:
possible = (
{"start", "end"}
| {">" + n for n in compnames}
| {"<" + n for n in compnames}
)
else:
raise StopIteration
return {i for i in possible if i.startswith(command.prefix)}
def add_one_completer(name, func, loc="end"):
new = collections.OrderedDict()
if loc == "start":
# Add new completer before the first exclusive one.
# We don't want new completers to be before the non-exclusive ones,
# because then they won't be used when this completer is successful.
# On the other hand, if the new completer is non-exclusive,
# we want it to be before all other exclusive completers so that is will always work.
items = list(XSH.completers.items())
first_exclusive = next(
(i for i, (_, v) in enumerate(items) if is_exclusive_completer(v)),
len(items),
)
for k, v in items[:first_exclusive]:
new[k] = v
new[name] = func
for k, v in items[first_exclusive:]:
new[k] = v
elif loc == "end":
for (k, v) in XSH.completers.items():
new[k] = v
new[name] = func
else:
direction, rel = loc[0], loc[1:]
found = False
for (k, v) in XSH.completers.items():
if rel == k and direction == "<":
new[name] = func
found = True
new[k] = v
if rel == k and direction == ">":
new[name] = func
found = True
if not found:
new[name] = func
XSH.completers.clear()
XSH.completers.update(new)
def list_completers():
"""List the active completers"""
o = "Registered Completer Functions: (NX = Non Exclusive)\n\n"
non_exclusive = " [NX]"
_comp = XSH.completers
ml = max((len(i) for i in _comp), default=0)
exclusive_len = ml + len(non_exclusive) + 1
_strs = []
for c in _comp:
if _comp[c].__doc__ is None:
doc = "No description provided"
else:
doc = " ".join(_comp[c].__doc__.split())
doc = justify(doc, 80, exclusive_len + 3)
if is_exclusive_completer(_comp[c]):
_strs.append("{: <{}} : {}".format(c, exclusive_len, doc))
else:
_strs.append("{: <{}} {} : {}".format(c, ml, non_exclusive, doc))
return o + "\n".join(_strs) + "\n"
def remove_completer(name: str):
"""removes a completer from xonsh
Parameters
----------
name:
NAME is a unique name of a completer (run "completer list" to see the current
completers in order)
"""
err = None
if name not in XSH.completers:
err = f"The name {name} is not a registered completer function."
if err is None:
del XSH.completers[name]
return
else:
return None, err + "\n", 1
```
|
{
"source": "jeremyschulman/aeon-ztps",
"score": 2
}
|
#### File: aeon_ztp/bin/eos_bootstrap.py
```python
import sys
import os
import json
import argparse
import subprocess
import logging
import logging.handlers
import tempfile
import time
import requests
import hashlib
from aeon.eos.device import Device
from aeon.exceptions import ProbeError, UnauthorizedError
from aeon.exceptions import ConfigError, CommandError
import tenacity
# ##### -----------------------------------------------------------------------
# #####
# ##### Command Line Arguments
# #####
# ##### -----------------------------------------------------------------------
def cli_parse(cmdargs=None):
psr = argparse.ArgumentParser(
prog='eos_bootstrap',
description="Aeon ZTP bootstrapper for Arista EOS",
add_help=True)
psr.add_argument(
'--target', required=True,
help='hostname or ip_addr of target device')
psr.add_argument(
'--server', required=True,
help='Aeon ZTP server host:port')
psr.add_argument(
'--topdir', required=True,
help='toplevel directory aztp installation files')
psr.add_argument(
'--logfile',
help='name of log file')
psr.add_argument(
'--reload-delay',
type=int, default=10 * 60,
help="about of time/s to try to reconnect to device after reload")
psr.add_argument(
'--init-delay',
type=int, default=60,
help="amount of time/s to wait before starting the bootstrap process")
# ##### -------------------------
# ##### authentication
# ##### -------------------------
group = psr.add_argument_group('authentication')
group.add_argument(
'--user', help='login user-name')
group.add_argument(
'--env-user', '-U',
help='Username environment variable')
group.add_argument(
'--env-passwd', '-P',
required=True,
help='Passwd environment variable')
return psr.parse_args(cmdargs)
class EosBootstrap(object):
def __init__(self, server, cli_args):
self.server = server
self.cli_args = cli_args
self.target = self.cli_args.target
self.os_name = 'eos'
self.progname = '%s-bootstrap' % self.os_name
self.logfile = self.cli_args.logfile
self.log = self.setup_logging(logname=self.progname, logfile=self.logfile)
self.user, self.passwd = self.get_user_and_passwd()
self.image_name = None
self.finally_script = None
self.dev = None
self.vendor_dir = os.path.join(self.cli_args.topdir, 'vendor_images', self.os_name)
self.image_fpath = None
def setup_logging(self, logname, logfile=None):
log = logging.getLogger(name=logname)
log.setLevel(logging.INFO)
fmt = logging.Formatter(
'%(name)s %(levelname)s {target}: %(message)s'
.format(target=self.target))
if logfile:
handler = logging.FileHandler(self.logfile)
else:
handler = logging.handlers.SysLogHandler(address='/dev/log')
handler.setFormatter(fmt)
log.addHandler(handler)
return log
# ##### -----------------------------------------------------------------------
# #####
# ##### REST API functions
# #####
# ##### -----------------------------------------------------------------------
def post_device_facts(self):
facts = self.dev.facts
facts['ip_addr'] = self.dev.target
facts = json.dumps(facts)
dev_data = dict(
ip_addr=self.dev.target,
serial_number=self.dev.facts['serial_number'],
hw_model=self.dev.facts['hw_model'],
os_version=self.dev.facts['os_version'],
os_name=self.os_name,
facts=facts)
dev_data['image_name'] = self.image_name
dev_data['finally_script'] = self.finally_script
requests.put(url='http://%s/api/devices/facts' % self.server, json=dev_data)
def post_device_status(self, message=None, state=None):
if not (self.dev or self.target):
self.log.error('Either dev or target is required to post device status. Message was: {}'.format(message))
return
requests.put(
url='http://%s/api/devices/status' % self.server,
json=dict(
os_name=self.os_name,
ip_addr=self.target or self.dev.target,
state=state, message=message))
# ##### -----------------------------------------------------------------------
# #####
# ##### Utility Functions
# #####
# ##### -----------------------------------------------------------------------
def exit_results(self, results, exit_error=None):
if results['ok']:
self.post_device_status(message='bootstrap completed OK', state='DONE')
sys.exit(0)
else:
self.post_device_status(message=results['message'], state='FAILED')
sys.exit(exit_error or 1)
def get_user_and_passwd(self):
user = self.cli_args.user or os.getenv(self.cli_args.env_user)
passwd = os.getenv(self.cli_args.env_passwd)
if not user:
errmsg = "login user-name missing"
self.log.error(errmsg)
self.exit_results(results=dict(
ok=False,
error_type='login',
message=errmsg))
if not passwd:
errmsg = "login user-password missing"
self.log.error(errmsg)
self.exit_results(results=dict(
ok=False,
error_type='login',
message=errmsg))
return user, passwd
def wait_for_device(self, countdown, poll_delay):
dev = None
# first we need to wait for the device to be 'reachable' via the API.
# we'll use the probe error to detect if it is or not
while not dev:
msg = 'reload-countdown at: {} seconds'.format(countdown)
self.post_device_status(message=msg, state='AWAIT-ONLINE')
self.log.info(msg)
try:
dev = Device(self.target, user=self.user, passwd=self.passwd, timeout=poll_delay)
except CommandError:
# this means that the device is probe-able, but unable to use the API
# for some reason; likely the process is not yet ready. need to
# 'manually' invoke the poll delay.
countdown -= poll_delay
if countdown <= 0:
errmsg = 'Failed to access %s device API within reload countdown' % self.target
self.exit_results(results=dict(
ok=False,
error_type='login',
message=errmsg), exit_error=errmsg)
time.sleep(poll_delay)
except ProbeError:
countdown -= poll_delay
if countdown <= 0:
errmsg = 'Failed to probe target %s within reload countdown' % self.target
self.exit_results(results=dict(
ok=False,
error_type='login',
message=errmsg), exit_error=errmsg)
except UnauthorizedError:
errmsg = 'Unauthorized - check user/password'
self.exit_results(results=dict(
ok=False,
error_type='login',
message=errmsg), exit_error=errmsg)
self.dev = dev
self.post_device_facts()
# ##### -----------------------------------------------------------------------
# #####
# ##### General config process
# #####
# ##### -----------------------------------------------------------------------
def do_push_config(self):
topdir = self.cli_args.topdir
config_dir = os.path.join(topdir, 'etc', 'configs', self.os_name)
all_fpath = os.path.join(config_dir, 'all.conf')
model_fpath = os.path.join(config_dir, self.dev.facts['hw_model'] + '.conf')
changed = False
self.post_device_status(message='applying general config from %s' % config_dir, state='CONFIG')
try:
if os.path.isfile(all_fpath):
self.log.info('reading from: {}'.format(all_fpath))
conf = open(all_fpath).read().split('\n')
self.log.info('pushing all config to device')
self.dev.api.configure(conf)
changed = True
else:
self.log.info('no all.conf file found')
if os.path.isfile(model_fpath):
self.log.info('reading model config from: {}'.format(model_fpath))
conf = open(model_fpath).read().split('\n')
self.log.info('pushing model config to device')
self.dev.api.configure(conf)
changed = True
else:
self.log.info('no model config file found: {}'.format(model_fpath))
except ConfigError as exc:
errmsg = str(exc.exc)
self.log.critical("unable to push config: {}".format(errmsg))
self.exit_results(dict(
ok=False,
error_type='config',
message=errmsg))
if changed is True:
self.post_device_status(message='Waiting for eAPI to become available.', state='CONFIG')
# retry for 5min (5000ms * 60) every 5000ms
# because eAPI takes time to activate during boot.
@tenacity.retry(wait=tenacity.wait_fixed(5000), stop=tenacity.stop_after_attempt(60))
def finalize():
self.log.info('Saving startup-config... (This will retry until eAPI is available.)')
self.dev.api.execute(['enable', 'copy running-config startup-config'])
self.post_device_status(message='Config written to device.', state='CONFIG')
self.log.info('config completed OK.')
finalize()
# ##### -----------------------------------------------------------------------
# #####
# ##### OS install process
# #####
# ##### -----------------------------------------------------------------------
@staticmethod
def ensure_md5sum(filepath):
md5sum_fpath = filepath + ".md5"
if os.path.isfile(md5sum_fpath):
with open(md5sum_fpath, 'rb') as f:
return f.read()
with open(filepath, 'rb') as f:
md5sum = hashlib.md5(f.read()).hexdigest()
with tempfile.NamedTemporaryFile('w', dir=os.path.dirname(md5sum_fpath), delete=False) as tf:
tf.write(md5sum)
tempname = tf.name
os.rename(tempname, md5sum_fpath)
return md5sum
def check_os_install_and_finally(self):
profile_dir = os.path.join(self.cli_args.topdir, 'etc', 'profiles', self.os_name)
conf_fpath = os.path.join(profile_dir, 'os-selector.cfg')
cmd = "{topdir}/bin/aztp_os_selector.py -j '{dev_json}' -c {config}".format(
topdir=self.cli_args.topdir,
dev_json=json.dumps(self.dev.facts),
config=conf_fpath)
self.log.info('os-select: [%s]' % cmd)
child = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE)
_stdout, _stderr = child.communicate()
self.log.info('os-select rc={}, stdout={}'.format(child.returncode, _stdout))
self.log.info('os-select stderr={}'.format(_stderr))
try:
results = json.loads(_stdout)
image_name = results.get('image_name', None)
finally_script = results.get('finally', None)
self.image_name = image_name
self.finally_script = finally_script
self.post_device_facts()
return results
except Exception as exc:
errmsg = 'Unable to load os-select output as JSON: {}\n {}'.format(_stdout, str(exc))
self.exit_results(results=dict(
ok=False,
error_type='install',
message=errmsg
), exit_error=errmsg)
@tenacity.retry(wait=tenacity.wait_fixed(1000),
stop=tenacity.stop_any(tenacity.stop_after_delay(600000),
tenacity.stop_after_attempt(10)))
def do_os_install(self):
self.image_fpath = os.path.join(self.vendor_dir, self.image_name)
if not os.path.isfile(self.image_fpath):
errmsg = 'Image file {} does not exist'.format(self.image_fpath)
self.log.critical(errmsg)
self.exit_results(results=dict(
ok=False,
error_type='install',
message=errmsg))
msg = 'installing OS image [{}] ... please be patient'.format(self.image_name)
self.log.info(msg)
self.post_device_status(message=msg, state='OS-INSTALL')
# --------------------------------
# check for file already on device
# --------------------------------
try:
self.dev.api.execute('dir flash:%s' % self.image_name)
self.log.info('file already exists on device, skipping copy.')
has_file = True
except CommandError:
has_file = False
if has_file:
# ---------------------------------------------
# Configure switch to boot from existing upgrade image
# ---------------------------------------------
self.check_md5()
self.dev.api.configure(['boot system flash:%s' % self.image_name])
else:
# Install directly from ZTPS, bypassing the need to copy first
# Note that even if the install fails, this image will persist in flash.
# The next retry attempt will not have to download the image again.
cmds = ['install source http://{server}/images/{OS}/{filename}'
.format(server=self.server, OS=self.os_name,
filename=self.image_name)]
try:
self.dev.api.execute(cmds)
except CommandError as e:
self.log.error('Error while installing image: {}'.format(str(e)))
self.check_md5()
# Write config
self.dev.api.execute('copy running-config startup-config')
return
def check_md5(self):
""""""
md5sum = self.ensure_md5sum(filepath=self.image_fpath)
got_md5 = self.dev.api.execute('verify /md5 flash:{}'.format(self.image_name))
has_md5 = got_md5['messages'][0].split('=')[-1].strip()
if has_md5 != md5sum:
errmsg = 'Image file {filename} MD5 mismatch has={has} should={should}' \
.format(filename=self.image_name,
has=has_md5, should=md5sum)
self.log.error(errmsg)
self.exit_results(results=dict(
ok=False,
error_type='install',
message=errmsg))
self.log.info('md5sum checksum OK.')
def do_ensure_os_version(self):
self.check_os_install_and_finally()
if not self.image_name:
self.log.info('no software install required')
return self.dev
self.log.info('software image install required: %s' % self.image_name)
self.do_os_install()
self.log.info('software install OK')
self.log.info('rebooting device ... please be patient')
self.post_device_status(message='OS install %s completed, now rebooting'
' ... please be patient' % self.image_name, state='REBOOTING')
try:
self.dev.api.execute('reload now')
except CommandError:
# Ignore errors during disconnect due to reboot
pass
time.sleep(self.cli_args.init_delay)
return self.wait_for_device(countdown=self.cli_args.reload_delay, poll_delay=10)
# ##### -----------------------------------------------------------------------
# #####
# ##### !!! MAIN !!!
# #####
# ##### -----------------------------------------------------------------------
def main():
cli_args = cli_parse()
server = cli_args.server
eboot = EosBootstrap(server, cli_args)
if not os.path.isdir(cli_args.topdir):
eboot.exit_results(dict(
ok=False,
error_type='args',
message='{} is not a directory'.format(cli_args.topdir)))
eboot.log.info("starting bootstrap process in {} seconds"
.format(cli_args.init_delay))
eboot.post_device_status(message='bootstrap started, waiting for device access', state='START')
time.sleep(cli_args.init_delay)
eboot.wait_for_device(countdown=cli_args.reload_delay, poll_delay=10)
eboot.log.info("proceeding with bootstrap")
eboot.do_push_config()
if eboot.dev.facts['virtual']:
eboot.log.info('Virtual device. No OS upgrade necessary.')
eboot.check_os_install_and_finally()
else:
eboot.do_ensure_os_version()
eboot.log.info("bootstrap process finished")
eboot.exit_results(dict(ok=True))
if '__main__' == __name__:
main()
```
#### File: aeon_ztp/bin/opx_bootstrap.py
```python
import sys
import os
import json
import argparse
import subprocess
import logging
import logging.handlers
import time
import re
import requests
import tenacity
from pexpect import pxssh
import pexpect
from aeon.opx.device import Device
from paramiko import AuthenticationException
from paramiko.ssh_exception import NoValidConnectionsError
from aeon.exceptions import LoginNotReadyError
_DEFAULTS = {
'init-delay': 5,
'reload-delay': 10 * 60,
}
# ##### -----------------------------------------------------------------------
# #####
# ##### Command Line Arguments
# #####
# ##### -----------------------------------------------------------------------
def cli_parse(cmdargs=None):
psr = argparse.ArgumentParser(
prog='opx_bootstrap',
description="Aeon-ZTP bootstrapper for OPX",
add_help=True)
psr.add_argument(
'--target', required=True,
help='hostname or ip_addr of target device')
psr.add_argument(
'--server', required=True,
help='Aeon-ZTP host:port')
psr.add_argument(
'--topdir', required=True,
help='Aeon-ZTP install directory')
psr.add_argument(
'--logfile',
help='name of log file')
psr.add_argument(
'--reload-delay',
type=int, default=_DEFAULTS['reload-delay'],
help="about of time/s to try to reconnect to device after reload")
psr.add_argument(
'--init-delay',
type=int, default=_DEFAULTS['init-delay'],
help="amount of time/s to wait before starting the bootstrap process")
# ##### -------------------------
# ##### authentication
# ##### -------------------------
group = psr.add_argument_group('authentication')
group.add_argument(
'--user', help='login user-name')
group.add_argument(
'-U', '--env-user',
help='Username environment variable')
group.add_argument(
'-P', '--env-passwd',
required=True,
help='Passwd environment variable')
return psr.parse_args(cmdargs)
class OpxBootstrap(object):
def __init__(self, server, cli_args):
self.server = server
self.cli_args = cli_args
self.target = self.cli_args.target
self.os_name = 'opx'
self.progname = '%s-bootstrap' % self.os_name
self.logfile = self.cli_args.logfile
self.log = self.setup_logging(logname=self.progname, logfile=self.logfile)
self.user, self.passwd = self.get_user_and_passwd()
self.image_name = None
self.finally_script = None
self.dev = None
def setup_logging(self, logname, logfile=None):
log = logging.getLogger(name=logname)
log.setLevel(logging.INFO)
fmt = logging.Formatter(
'%(name)s %(levelname)s {target}: %(message)s'
.format(target=self.target))
if logfile:
handler = logging.FileHandler(self.logfile)
else:
handler = logging.handlers.SysLogHandler(address='/dev/log')
handler.setFormatter(fmt)
log.addHandler(handler)
return log
def get_ssh_session(self, user=None, password=<PASSWORD>, onie=False, sudo=False):
ssh = pxssh.pxssh(options={"StrictHostKeyChecking": "no", "UserKnownHostsFile": "/dev/null"})
# Uncomment for debugging when running opx_bootstrap.py from bash
# ssh.logfile = sys.stdout
if password:
ssh.login(self.target, user, password=password, auto_prompt_reset=not onie)
else:
ssh.login(self.target, user, auto_prompt_reset=not onie)
if onie:
ssh.PROMPT = 'ONIE:.*#'
if sudo:
rootprompt = re.compile('root@.*[#]')
ssh.sendline('sudo -s')
i = ssh.expect([rootprompt, 'assword.*: '])
if i == 0:
# Password not required
pass
elif i == 1:
# Sending sudo password
ssh.sendline(self.passwd)
j = ssh.expect([rootprompt, 'Sorry, try again'])
if j == 0:
pass
elif j == 1:
errmsg = 'Bad sudo password.'
self.exit_results(results=dict(
ok=False,
error_type='install',
message=errmsg))
else:
errmsg = 'Unable to obtain root privileges.'
self.exit_results(results=dict(
ok=False,
error_type='install',
message=errmsg))
ssh.set_unique_prompt()
ssh.sendline('whoami')
ssh.expect('root')
self.log.info('Logged in as root')
ssh.sendline('\n')
ssh.prompt()
return ssh
# ##### -----------------------------------------------------------------------
# #####
# ##### REST API functions
# #####
# ##### -----------------------------------------------------------------------
def post_device_facts(self):
facts = self.dev.facts
facts['ip_addr'] = self.dev.target
facts = json.dumps(facts)
dev_data = dict(
ip_addr=self.dev.target,
serial_number=self.dev.facts['serial_number'],
hw_model=self.dev.facts['hw_model'],
os_version=self.dev.facts['os_version'],
os_name=self.os_name,
facts=facts)
dev_data['image_name'] = self.image_name
dev_data['finally_script'] = self.finally_script
requests.put(url='http://%s/api/devices/facts' % self.server, json=dev_data)
def post_device_status(self, message=None, state=None):
if not (self.dev or self.target):
self.log.error('Either dev or target is required to post device status. Message was: {}'.format(message))
return
requests.put(
url='http://%s/api/devices/status' % self.server,
json=dict(
os_name=self.os_name,
ip_addr=self.target or self.dev.target,
state=state, message=message))
# ##### -----------------------------------------------------------------------
# #####
# ##### Utility Functions
# #####
# ##### -----------------------------------------------------------------------
def exit_results(self, results, exit_error=None):
if results['ok']:
msg = 'bootstrap completed OK'
self.post_device_status(message=msg, state='DONE')
self.log.info(msg)
sys.exit(0)
else:
msg = results['message']
self.post_device_status(message=msg, state='FAILED')
self.log.error(msg)
sys.exit(exit_error or 1)
def get_user_and_passwd(self):
user = self.cli_args.user or os.getenv(self.cli_args.env_user)
passwd = os.getenv(self.cli_args.env_passwd)
if not user:
errmsg = "login user-name missing"
self.log.error(errmsg)
self.exit_results(results=dict(
ok=False,
error_type='login',
message=errmsg))
if not passwd:
errmsg = "login user-password missing"
self.log.error(errmsg)
self.exit_results(results=dict(
ok=False,
error_type='login',
message=errmsg))
return user, passwd
def wait_for_device(self, countdown, poll_delay, msg=None):
dev = None
# first we need to wait for the device to be 'reachable' via the API.
# we'll use the probe error to detect if it is or not
while not dev:
new_msg = msg or 'Waiting for device access via SSH. Timeout remaining: {} seconds'.format(countdown)
self.post_device_status(message=new_msg, state='AWAIT-ONLINE')
self.log.info(new_msg)
try:
dev = Device(self.target, user=self.user, passwd=self.passwd,
timeout=poll_delay)
except AuthenticationException as e:
self.log.info('Authentication exception reported: {} \n args: {}'.format(e, e.args))
self.exit_results(results=dict(
ok=False,
error_type='login',
message='Unauthorized - check user/password'))
except NoValidConnectionsError as e:
countdown -= poll_delay
if countdown <= 0:
self.exit_results(results=dict(
ok=False,
error_type='login',
message='Failed to connect to target %s within reload countdown' % self.target))
except LoginNotReadyError as e:
countdown -= poll_delay
if countdown <= 0:
self.exit_results(results=dict(
ok=False,
error_type='login',
message='Failed to connect to target %s within reload countdown' % self.target))
time.sleep(poll_delay)
self.dev = dev
self.post_device_facts()
def wait_for_onie_rescue(self, countdown, poll_delay, user='root'):
"""Polls for SSH access to OPX device in ONIE rescue mode.
Args:
countdown (int): Countdown in seconds to wait for device to become reachable.
poll_delay (int): Countdown in seconds between poll attempts.
user (str): SSH username to use. Defaults to 'root'.
"""
while countdown >= 0:
try:
msg = 'OPX installation in progress. Waiting for ONIE rescue mode. Timeout remaining: {} seconds'.format(
countdown)
self.post_device_status(message=msg, state='AWAIT-ONLINE')
self.log.info(msg)
ssh = pxssh.pxssh(options={"StrictHostKeyChecking": "no", "UserKnownHostsFile": "/dev/null"})
ssh.login(self.target, user, auto_prompt_reset=False)
ssh.PROMPT = 'ONIE:.*#'
ssh.sendline('\n')
ssh.prompt()
return True
except (pexpect.pxssh.ExceptionPxssh, pexpect.exceptions.EOF) as e:
if (str(e) == 'Could not establish connection to host') or isinstance(e, pexpect.exceptions.EOF):
countdown -= poll_delay
time.sleep(poll_delay)
else:
self.log.error('Error accessing {} in ONIE rescue mode: {}.'.format(self.target, str(e)))
self.exit_results(results=dict(
ok=False,
error_type='login',
message='Error accessing {} in ONIE rescue mode: {}.'.format(self.target, str(e))))
else:
self.log.error('Device {} not reachable in ONIE rescue mode within reload countdown.'.format(self.target))
self.exit_results(results=dict(
ok=False,
error_type='login',
message='Device {} not reachable in ONIE rescue mode within reload countdown.'.format(self.target)))
# ##### -----------------------------------------------------------------------
# #####
# ##### OS install process
# #####
# ##### -----------------------------------------------------------------------
def check_os_install_and_finally(self):
profile_dir = os.path.join(self.cli_args.topdir, 'etc', 'profiles', self.os_name)
conf_fpath = os.path.join(profile_dir, 'os-selector.cfg')
cmd = "{topdir}/bin/aztp_os_selector.py -j '{dev_json}' -c {config}".format(
topdir=self.cli_args.topdir,
dev_json=json.dumps(self.dev.facts),
config=conf_fpath)
self.log.info('os-select: [%s]' % cmd)
child = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE)
_stdout, _stderr = child.communicate()
self.log.info('os-select rc={}, stdout={}'.format(child.returncode, _stdout))
self.log.info('os-select stderr={}'.format(_stderr))
try:
results = json.loads(_stdout)
image_name = results.get('image_name', None)
finally_script = results.get('finally', None)
self.image_name = image_name
self.finally_script = finally_script
self.post_device_facts()
return results
except Exception as exc:
errmsg = 'Unable to load os-select output as JSON: {}\n {}'.format(_stdout, str(exc))
self.exit_results(results=dict(
ok=False,
error_type='install',
message=errmsg
), exit_error=errmsg)
# Cannot mock out retry decorator in unittest.
# Retry wrapper function around do_onie_install to avoid long unittest times.
@tenacity.retry(wait=tenacity.wait_fixed(15000), stop=tenacity.stop_after_attempt(3))
def onie_install(self, *args, **kwargs):
self.do_onie_install(**kwargs)
def do_onie_install(self, user='root'):
"""Initiates install in ONIE-RESCUE mode.
Args:
dev (Device object): OPX device object
user (str): ONIE rescue mode user
"""
msg = 'Beginning OPX download and installation.'
self.post_device_status(message=msg, state='ONIE-RESCUE')
self.log.info(msg)
ssh = self.get_ssh_session(user=user, onie=True)
def start_installation():
# Start installation process
ssh.sendline('onie-nos-install http://{server}/images/{os_name}/{image_name}'
.format(server=self.cli_args.server, os_name=self.os_name, image_name=self.image_name))
# 'installer' means that the download has started
ssh.expect('installer', timeout=30)
msg = 'OPX image download has started. Will timeout if not completed within 10 minutes.'
self.log.info(msg)
self.post_device_status(message=msg, state='OS-INSTALL')
check_install_status()
msg = 'OPX download complete. Executing installer. Will timeout if not completed within 20 minutes.'
self.log.info(msg)
self.post_device_status(message=msg, state='OS-INSTALL')
# Indicates that the image has been downloaded and verified
ssh.expect('Installation finished. No error reported.', timeout=20 * 60)
ssh.prompt()
ssh.sendline('reboot')
msg = 'OPX download completed and verified, reboot initiated.'
self.log.info(msg)
self.post_device_status(message=msg, state='OS-INSTALL')
ssh.close()
@tenacity.retry(wait=tenacity.wait_fixed(5),
stop=tenacity.stop_after_delay(10 * 60),
retry=tenacity.retry_if_exception(pexpect.exceptions.TIMEOUT))
def check_install_status():
"""
Check to see that either the install has started, or that the download has timed out.
:return:
"""
# 'Executing installer' means that the download has finished
i = ssh.expect(['Verifying image checksum...OK', 'download timed out'], timeout=5)
if i == 0:
pass
if i == 1:
msg = 'Download timed out: http://{server}/images/{os_name}/{image_name}'.format(
server=self.cli_args.server, os_name=self.os_name, image_name=self.image_name)
self.log.info(msg)
self.exit_results(results=dict(ok=False, error_type='install', message=msg))
try:
start_installation()
except pxssh.ExceptionPxssh as e:
self.log.info(str(e))
self.exit_results(results=dict(ok=False, error_type='install', message=str(e)))
def install_os(self):
vendor_dir = os.path.join(self.cli_args.topdir, 'vendor_images', self.os_name)
image_fpath = os.path.join(vendor_dir, self.image_name)
if not os.path.exists(image_fpath):
errmsg = 'Image file does not exist: %s' % image_fpath
self.log.error(errmsg)
self.exit_results(results=dict(
ok=False, error_type='install',
message=errmsg))
msg = 'Installing OPX image=[%s] ... this can take up to 30 min.' % self.image_name
self.log.info(msg)
self.post_device_status(message=msg, state='OS-INSTALL')
try:
ssh = self.get_ssh_session(user=self.user, password=<PASSWORD>, sudo=True)
ssh.sendline('grub-reboot --boot-directory=/mnt/boot ONIE')
ssh.prompt()
ssh.sendline('/mnt/onie-boot/onie/tools/bin/onie-boot-mode -o rescue')
ssh.prompt()
ssh.sendline('reboot')
except pxssh.ExceptionPxssh as e:
self.log.info(str(e))
self.exit_results(results=dict(ok=False, error_type='install', message=str(e)))
msg = 'Booting into ONIE rescue mode to install OS: %s' % self.image_name
self.log.info(msg)
self.post_device_status(message=msg, state='OS-INSTALL')
time.sleep(60)
# Wait for ONIE rescue mode
self.wait_for_onie_rescue(countdown=300, poll_delay=10, user='root')
# Download and verify OS
self.onie_install()
# Wait for onie-rescue shell to terminate
time.sleep(60)
# Wait for device to come back online after OS install
self.wait_for_device(countdown=10 * 60, poll_delay=30)
def ensure_os_version(self):
self.check_os_install_and_finally()
if not self.image_name:
self.log.info('no software install required')
return self.dev
self.log.info('software image install required: %s' % self.image_name)
self.install_os()
self.log.info('software install OK')
# ##### -----------------------------------------------------------------------
# #####
# ##### !!! MAIN !!!
# #####
# ##### -----------------------------------------------------------------------
def main():
cli_args = cli_parse()
self_server = cli_args.server
opxboot = OpxBootstrap(self_server, cli_args)
if not os.path.isdir(cli_args.topdir):
opxboot.exit_results(dict(
ok=False,
error_type='args',
message='{} is not a directory'.format(cli_args.topdir)))
opxboot.post_device_status(message='bootstrap started, waiting for device access', state='START')
opxboot.wait_for_device(countdown=cli_args.reload_delay, poll_delay=10, msg='Waiting for device access')
# Give the device time to stabilize since SSH may not be reliable yet.
time.sleep(30)
opxboot.log.info("proceeding with bootstrap")
if opxboot.dev.facts['virtual']:
opxboot.log.info('Virtual device. No OS upgrade necessary.')
opxboot.check_os_install_and_finally()
else:
opxboot.ensure_os_version()
opxboot.log.info("bootstrap process finished")
opxboot.exit_results(dict(ok=True))
if '__main__' == __name__:
main()
```
#### File: jeremyschulman/aeon-ztps/setup.py
```python
from setuptools import setup, find_packages
from setuptools.command.test import test as TestCommand
import sys
# parse requirements
req_lines = [line.strip() for line in open(
'requirements.txt').readlines()]
install_reqs = list(filter(None, req_lines))
class Tox(TestCommand):
def finalize_options(self):
TestCommand.finalize_options(self)
self.test_args = []
self.test_suite = True
def run_tests(self):
import tox
errcode = tox.cmdline(self.test_args)
sys.exit(errcode)
setup(
name="aeon-ztp",
version="1.5.4",
author="Apstra Customer Enablement",
author_email="<EMAIL>",
description=("AEON ZTP Server"),
url="https://github.com/Apstra/aeon-ztps",
scripts=['aeon_ztp/bin/aztp-db-flush', 'aeon_ztp/bin/aztp-manage'],
license="Apache 2.0",
keywords="networking automation vendor-agnostic",
packages=find_packages(exclude=["tests", ".*"]),
include_package_data=True,
install_requires=install_reqs,
zip_safe=False,
tests_require=['tox'],
cmdclass = {'test': Tox},
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'Intended Audience :: Information Technology',
'Intended Audience :: System Administrators',
'Intended Audience :: Telecommunications Industry',
'License :: OSI Approved :: Apache Software License',
'Operating System :: POSIX :: Linux',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Topic :: Software Development :: Libraries',
'Topic :: Software Development :: Libraries :: Application Frameworks',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: System :: Networking',
],
)
```
|
{
"source": "jeremyschulman/aio-cppm",
"score": 2
}
|
#### File: aio-cppm/examples/ex_devices.py
```python
import os
import asyncio
from httpx import Response
from aiocppm.client import CPPMClient as _Client
from aiocppm.mixins.network_device import CPPMNetworkDeviceMixin
import csv
class CPPMClient(_Client, CPPMNetworkDeviceMixin):
pass
# sample payload
# payload = dict(
# name='test-veos1',
# ip_address='1.1.1.1',
# tacacs_secret='foobaz',
# vendor_name='Arista',
# radius_secret='',
# attributes={
# 'Location': 'nyc1',
# 'OS Version': 'eos'
# }
# )
g_tacacs_secret = os.environ["TACACS_SECRET"]
def csv_to_payload(rec: dict):
return dict(
name=rec["hostname"],
ip_address=rec["ipaddr"],
tacacs_secret=g_tacacs_secret,
vendor_name=rec["vendor"],
radius_secret="",
attributes={"Location": rec["site"], "OS Version": rec["os_name"]},
)
def load_csv(filepath):
with open(filepath) as infile:
csv.DictReader(infile)
return list(csv.DictReader(infile))
async def run(records):
cppm = CPPMClient(timeout=30)
await cppm.login()
existing_devices = await cppm.fetch_devices()
existing_names = (rec["name"] for rec in existing_devices)
payloads = [
csv_to_payload(rec) for rec in records if rec["hostname"] not in existing_names
]
print(f"Creating {len(payloads)} device records.")
tasks = [asyncio.create_task(cppm.create_device(payload)) for payload in payloads]
for next_done in asyncio.as_completed(tasks, timeout=5 * 60):
res: Response = await next_done
body = res.json()
if res.is_error:
if "already exists" in body["detail"]:
print(f"OK: {body['detail']}")
continue
print(f"ERROR: {res.text}")
continue
print(f"OK: {res.text}")
async def patch_iosxe(cppm: CPPMClient, records):
tasks = [
asyncio.create_task(
cppm.api.patch(url=f'/api/network-device/{rec["id"]}', json=rec)
)
for rec in records
]
for next_done in asyncio.as_completed(tasks, timeout=5 * 60):
res: Response = await next_done
if res.is_error:
print(f"ERROR: {res.text}")
continue
print(f"OK: {res.text}")
def main(filepath):
records = load_csv(filepath)
asyncio.run(run(records))
```
#### File: aio-cppm/examples/ex_network_groups.py
```python
import asyncio
from collections import defaultdict
from operator import itemgetter
from httpx import Response
from aiocppm.client import CPPMClient as _Client
from aiocppm.mixins import network_device
class CPPMClient(_Client, network_device.CPPMNetworkDeviceMixin):
pass
async def ensure_groups(cppm: CPPMClient):
existing_groups = {rec["name"]: rec for rec in await cppm.fetch_device_groups()}
existing_devices = {rec["name"]: rec for rec in await cppm.fetch_devices()}
need_groups = defaultdict(list)
for name, rec in existing_devices.items():
os_name = rec["attributes"]["OS Version"]
gr_name = "all-" + os_name
need_groups[gr_name].append(rec)
gr_lists = {
gr_name: ", ".join(sorted(list(map(itemgetter("ip_address"), recs))))
for gr_name, recs in need_groups.items()
}
tasks = [
asyncio.create_task(
cppm.api.post(
network_device.URIs.network_device_groups,
json={"name": _gr_name, "value": _gr_value, "group_format": "list"},
)
)
for _gr_name, _gr_value in gr_lists.items()
if _gr_name not in existing_groups
]
changes = {
_gr_name: existing_groups[_gr_name]["id"]
for _gr_name, _gr_value in gr_lists.items()
if _gr_name in existing_groups
and _gr_value != existing_groups[_gr_name]["value"]
}
tasks.extend(
[
asyncio.create_task(
cppm.api.patch(
network_device.URIs.network_device_groups + f"/{_gr_id}",
json={"value": gr_lists[_gr_name]},
)
)
for _gr_name, _gr_id in changes.items()
]
)
for next_done in asyncio.as_completed(tasks, timeout=5 * 60):
res: Response = await next_done
if res.is_error:
print(f"FAIL: {res.text}")
continue
body = res.json()
print(f"OK: {res.status_code} network group {body['name']}.")
async def arun():
async with CPPMClient() as cppm:
await ensure_groups(cppm)
def run():
asyncio.run(arun())
```
|
{
"source": "jeremyschulman/aio-ipfabric",
"score": 2
}
|
#### File: aio-ipfabric/aioipfabric/base_client.py
```python
from typing import Optional, AnyStr, Iterable, List, Dict, Union
from os import environ, getenv
from dataclasses import dataclass
from functools import wraps
# -----------------------------------------------------------------------------
# Public Imports
# -----------------------------------------------------------------------------
from httpx import Response
# -----------------------------------------------------------------------------
# Private Imports
# -----------------------------------------------------------------------------
from .consts import ENV, API_VER, TableFields
from .api import IPFSession
from .filters import parse_filter
# -----------------------------------------------------------------------------
# Exports
# -----------------------------------------------------------------------------
__all__ = ["IPFBaseClient", "table_api"]
# -----------------------------------------------------------------------------
#
# CODE BEGINS
#
# -----------------------------------------------------------------------------
@dataclass
class URIs:
"""identifies API URL endpoings used"""
snapshots = "/snapshots"
def table_api(methcoro):
"""Method decorator for all Table related APIs"""
@wraps(methcoro)
async def wrapper(
self,
*,
filters=None,
columns=None,
pagination=None,
sort=None,
reports=None,
request=None,
return_as="data",
**kwargs,
):
"""
This decorator prepares a request body used to fetch records from a
Table. The wrapped coroutine will be passed at a minimum two
Parameters, the first being the instance to the IPF client, and a named
parameter `request` that is a dictionary of the prepared fields. Any
other Caller args `kwargs` are passed to the wrapped coroutine as-is.
The return value is deteremined by the `return_as` parameter. By
default, the return value is a list of table records; that is the
response body 'data' list. If `return_as` is set to "meta" then the
return value is the response body 'meta' dict item, which contains the
keys such as "count" and "size". If the `return_as` is set to "body"
then return value is the entire native response body that contains both
the 'data' and '_meta' keys (not the underscore for _meta in this
case!). If `return_as` is set to 'raw' then the response is the raw
httpx.Response object.
Parameters
----------
self:
The instance of the IPF Client
filters: dict
The IPF filters dictionary item. If not provided, the
request['filters'] will be set to an empty dictionary.
columns: list
The list of table column names; specific to the Table being fetched.
If this parameter is None, then the request['columns'] key is not
set.
pagination: dict
The IPF API pagination item. If not provided, the
request['pagination'] key is not set.
sort: dict
The IPF API sort item. If not provided, the request['sort'] key is
not set.
reports: str
A request reports string, generally used when retrieving
intent-rule-validation values.
request: dict
If provided, this dict is the starting defition of the request
passed to the wrapped coroutine. If not provided, this decorator
creates a new dict object that is populated based on the above
description.
return_as
Other Parameters
----------------
Any other key-value arguments are passed 'as-is' to the wrapped coroutine.
Returns
-------
Depends on the parameter `return_as` as described above.
"""
payload = request or {}
payload.setdefault(TableFields.snapshot, self.active_snapshot)
payload.setdefault(TableFields.filters, filters or {})
if columns:
payload[TableFields.columns] = columns
# TODO: perhaps add a default_pagination setting to the IP Client?
# for now the default will be no pagnication
if pagination:
payload["pagination"] = pagination
if reports:
payload["reports"] = reports
if sort:
payload["sort"] = sort
res = await methcoro(self, request=payload, **kwargs)
if return_as == "raw":
return res
res.raise_for_status()
body = res.json()
return {"data": body["data"], "meta": body["_meta"], "body": body}[return_as]
return wrapper
class IPFBaseClient(object):
"""
The IPFabricClient instances is composed of one or more Mixins that are a
subclass of IPFBaseClient. Put another way, he IPFBaseClient provides the
common code that is available to all Mixins.
The primary purpose of the IPFBaseClass instance is to provide the `api`
attribute, which is an instance of the IPFSession (async HTTP client
instance).
"""
@dataclass
class ENV:
"""identifies enviornment variables used"""
addr = "IPF_ADDR"
username = "IPF_USERNAME"
password = "<PASSWORD>"
token = "IPF_TOKEN"
def __init__(
self,
/,
*mixin_classes,
base_url: Optional[AnyStr] = None,
token: Optional[AnyStr] = None,
username: Optional[AnyStr] = None,
password: Optional[AnyStr] = None,
**clientopts,
):
"""
Create an IP Fabric Client instance
Parameters
----------
base_url : str
The IP Fabric system HTTPS URL, for example:
https://my-ipfabric-server/
username: str
The IPF login user-name value
password: str
The IPF login password value
token : str
The IP Fabric API Token created from the Settings configuration;
requires IP Fabric v3.7+
Other Parameters
----------------
`clientopts` are passed AS-IS to the API session so that the
httpx.AsyncClient can be configured as desired.
Notes
-----
The Caller can provide either the login credentials (username, password)
or the refresh token. One of these two are required.
"""
token = token or getenv(ENV.token)
base_url = base_url or environ[ENV.addr]
username = username or getenv(ENV.username)
password = password or getenv(ENV.password)
self.api = IPFSession(
base_url=base_url + API_VER,
token=<PASSWORD>,
username=username,
password=password,
**clientopts,
)
# dynamically add any Mixins at the time of client creation. This
# enables the caller to perform the mixin at runtime without having to
# define a specific class.
if mixin_classes:
self.mixin(*mixin_classes)
self.snapshots = None
self._active_snapshot = None
self.version = None # the IPF product version
@property
def active_snapshot(self):
return self._active_snapshot
@active_snapshot.setter
def active_snapshot(self, name):
if (
s_id := next((i["id"] for i in self.snapshots if i["name"] == name), None)
) is None:
raise ValueError(name)
self._active_snapshot = s_id
async def login(self):
"""
Coroutine to perform the initial login authentication process, retrieve the list
of current snapshots, and set the `active_snapshot` attribute to the latest
snapshot.
"""
if self.api.token and self.api.is_closed:
self.api = IPFSession(base_url=str(self.api.base_url), token=self.api.token)
await self.api.authenticate()
# if the `version` attribute is set this means that this client has
# connected to the IPF system before, and we do not need to re-fetch the
# version and snapshot data.
if self.version:
return
# capture the IPF version value
res = await self.api.get("/os/version")
res.raise_for_status()
self.version = res.json()["version"]
# fetch the snapshot catalog and default the active to the most recent one.
# TODO: might want to only fetch the "latest" snapshot vs. all.
await self.fetch_snapshots()
self._active_snapshot = self.snapshots[0]["id"]
async def logout(self):
"""close the async connection"""
await self.api.aclose()
async def fetch_snapshots(self) -> None:
"""coroutine to retrieve all known snapshots, returns List[dict] records"""
res = await self.api.get(URIs.snapshots)
res.raise_for_status()
self.snapshots = res.json()
@table_api
async def fetch_table(self, url: str, request: dict) -> Union[Response, List, Dict]:
"""
This coroutine is used to fetch records from any table, as identified by
the `url` parameter. The `requests` dict *must* contain a columns key,
and if missing this coroutine will raise a ValueError exception.
Parameters
----------
url: str
The URL to indicate the table, for example "/tables/inventory/devices".
request: dict
The request body payload, as prepared by the `table_api` decorator.
"""
return await self.api.post(url=url, json=request)
def mixin(self, *mixin_cls):
"""
This method allows the Caller to dynamically add a Mixin class
to the existing IPF client instance.
Parameters
----------
mixin_cls: subclasses of IPFBaseClass
The mixin classes whose methods will be added to the existing
IPF client instance (self).
References
----------
https://stackoverflow.com/questions/8544983/dynamically-mixin-a-base-class-to-an-instance-in-python
"""
self.__class__ = type(self.__class__.__name__, (self.__class__, *mixin_cls), {})
def __repr__(self) -> Iterable[str]:
"""override the default repr to show the IPF system base URL"""
cls_name = self.__class__.__name__
base_url = self.api.base_url
return f"{cls_name}: {base_url}"
# -------------------------------------------------------------------------
#
# ASYNC CONTEXT MANAGER METHODS
#
# -------------------------------------------------------------------------
async def __aenter__(self):
"""login and return instance"""
await self.login()
return self
async def __aexit__(self, exc_type, exc_val, exc_tb):
"""close the http async api instance"""
await self.logout()
# -------------------------------------------------------------------------
#
# STATIC METHODS
#
# -------------------------------------------------------------------------
parse_filter = staticmethod(parse_filter)
```
#### File: aio-ipfabric/examples/ex_big_fetch.py
```python
import asyncio
from aioipfabric import IPFabricClient
ipf = IPFabricClient()
async def demo():
tasks = list()
for count in range(10):
task = asyncio.create_task(
ipf.fetch_table(
url="/tables/inventory/interfaces",
columns=["hostname", "intName", "siteName"],
return_as="raw",
)
)
tasks.append(task)
return await asyncio.gather(*tasks, return_exceptions=True)
async def demo2(url, columns, page_sz=500, timeout=60 * 5):
res = await ipf.fetch_table(
url=url, columns=columns, pagination=dict(start=0, limit=1), return_as="meta"
)
count = res["count"]
pages, more = divmod(count, page_sz)
if more:
pages += 1
paginations = [
dict(start=page_start, limit=page_sz) for page_start in range(0, count, page_sz)
]
tasks = [
asyncio.create_task(ipf.fetch_table(url=url, columns=columns, pagination=pg))
for pg in paginations
]
print(f"Fetching {pages} pages ...", flush=True)
total_records = list()
for page, next_done in enumerate(asyncio.as_completed(tasks, timeout=timeout)):
res = await next_done
print(f"Page {page} of {pages}, got {len(res)} records.")
total_records.extend(res)
return total_records
if_table_url = "/tables/inventory/interfaces"
if_table_columns = ["hostname", "intName", "siteName"]
mac_table_url = "/tables/addressing/mac"
mac_table_columns = ["hostname", "intName", "siteName", "mac", "vlan"]
```
#### File: aio-ipfabric/examples/ex_e2e_diagram.py
```python
import asyncio
from aioipfabric import IPFabricClient
from aioipfabric.mixins.diagrams import IPFDiagramE2EMixin
class Client(IPFabricClient, IPFDiagramE2EMixin):
pass
asyncio.set_event_loop(asyncio.get_event_loop())
async def end_to_end(**options):
async with Client() as ipf:
return await ipf.end_to_end_path(**options)
```
|
{
"source": "jeremyschulman/aio-nxapi",
"score": 2
}
|
#### File: aio-nxapi/asyncnxapi/device.py
```python
from typing import Optional, AnyStr, Tuple, List
import json
from socket import getservbyname
import httpx
import base64
import ssl
from collections import namedtuple
from . import xmlhelp
__all__ = ["Device", "CommandResults"]
_ssl_context = ssl.create_default_context()
_ssl_context.options &= ~ssl.OP_NO_TLSv1_2 & ~ssl.OP_NO_TLSv1_1
_ssl_context.minimum_version = ssl.TLSVersion.TLSv1
_ssl_context.check_hostname = False
_ssl_context.verify_mode = ssl.CERT_NONE
_ssl_context.set_ciphers("HIGH:!DH:!aNULL")
# _ssl_context = ssl.SSLContext(ssl_version=ssl.PROTOCOL_TLSv1_1) # noqa
# _ssl_context = ssl.create_default_context()
# # Sets up old and insecure TLSv1.
# _ssl_context.options &= ~ssl.OP_NO_TLSv1_3 & ~ssl.OP_NO_TLSv1_2 & ~ssl.OP_NO_TLSv1_1
# _ssl_context.minimum_version = ssl.TLSVersion.TLSv1
_NXAPI_CMD_TEMPLATE = """\
<?xml version="1.0"?>
<ins_api>
<version>{api_ver}</version>
<type>{cmd_type}</type>
<chunk>{chunk}</chunk>
<sid>{sid}</sid>
<input>{cmd_input}</input>
<output_format>{ofmt}</output_format>
</ins_api>"""
CommandResults = namedtuple("CommandResults", ["ok", "command", "output"])
class Transport(object):
CMDTYPES_OPTIONS = ("cli_show", "cli_show_ascii", "cli_conf", "bash")
OFMT_OPTIONS = ("xml", "json", "text")
API_VER = "1.0"
def __init__(self, host, proto, port, creds, timeout=60):
port = port or getservbyname(proto)
self.client = httpx.AsyncClient(
base_url=httpx.URL(f"{proto}://{host}:{port}"),
verify=_ssl_context,
timeout=httpx.Timeout(timeout),
)
self.client.headers["Content-Type"] = "application/xml"
self.b64auth = (
base64.encodebytes(bytes("%s:%s" % creds, encoding="utf-8"))
.decode()
.replace("\n", "")
)
self.client.headers["Authorization"] = "Basic %s" % self.b64auth
self.username = creds[0]
self.cmd_type = "cli_show"
self.ofmt = "xml"
@property
def timeout(self):
return self.client.timeout
@timeout.setter
def timeout(self, value):
self.client.timeout = httpx.Timeout(value)
def form_command(self, cmd_input, formatting, sid=None):
cmd_type = formatting.setdefault("cmd_type", self.cmd_type)
ofmt = formatting.setdefault("ofmt", self.ofmt)
return _NXAPI_CMD_TEMPLATE.format(
api_ver=self.API_VER,
cmd_type=cmd_type or self.cmd_type,
cmd_input=cmd_input,
chunk=0,
sid=sid or "sid",
ofmt=ofmt or self.ofmt,
)
async def post(self, xcmd, formatting, strip_ns=False):
res = await self.client.post("/ins", data=xcmd)
res.raise_for_status()
if formatting["ofmt"] == "json":
as_json = json.loads(res.text)
outputs = as_json["ins_api"]["outputs"]["output"]
if not isinstance(outputs, list):
outputs = [outputs]
return [
CommandResults(
ok=cmd_res["code"] == "200",
command=cmd_res["input"],
output=cmd_res["body"],
)
for cmd_res in outputs
]
# Output format is "xml" or "text"; but in either case the body content
# is extracted in the same manner.
as_text = xmlhelp.strip_ns(res.text) if strip_ns else res.text
as_xml = xmlhelp.fromstring(as_text)
def body_is_text(_res_e):
return _res_e.find("body").text.strip()
def body_is_xml(_res):
return _res.find("body")
get_output = (
body_is_text if formatting["cmd_type"] == "cli_show_ascii" else body_is_xml
)
return [
CommandResults(
ok=cmd_res.findtext("code") == "200",
command=cmd_res.findtext("input").strip(),
output=get_output(cmd_res),
)
for cmd_res in as_xml.xpath("outputs/output")
]
async def post_write_config(self, xcmd):
"""
This coroutine is used to push the configuration to the device an return any
error XML elements. If no errors then return value is None.
"""
res = await self.client.post("/ins", data=xcmd)
res.raise_for_status()
as_xml = xmlhelp.fromstring(res.text)
if any_errs := as_xml.xpath(".//code[. != '200']"):
return any_errs
return None
class Device(object):
def __init__(
self,
host: AnyStr,
creds: Tuple[str, str],
proto: Optional[AnyStr] = "https",
port=None,
private=None,
):
self.api = Transport(host=host, creds=creds, proto=proto, port=port)
self.private = private
self.host = host
async def exec(
self, commands: List[AnyStr], ofmt: Optional[AnyStr] = None, strip_ns=False
) -> List[CommandResults]:
"""
Execute a list of operational commands and return the output as a list of CommandResults.
"""
formatting = dict(ofmt=ofmt)
if ofmt == "text":
formatting["cmd_type"] = "cli_show_ascii"
formatting["ofmt"] = "xml"
xcmd = self.api.form_command(" ;".join(commands), formatting)
return await self.api.post(xcmd, formatting, strip_ns=strip_ns)
async def push_config(self, content: AnyStr):
xcmd = self.api.form_command(
cmd_input=" ; ".join(content.splitlines()),
formatting=dict(cmd_type="cli_conf", ofmt="xml"),
)
return await self.api.post_write_config(xcmd)
async def get_config(self, ofmt="text"):
res = await self.exec(["show running-config"], ofmt=ofmt, strip_ns=True)
return res[0].output
```
|
{
"source": "jeremyschulman/genieparser",
"score": 2
}
|
#### File: iosxr/tests/test_show_isis.py
```python
import unittest
from unittest.mock import Mock
# Metaparser
from genie.metaparser.util.exceptions import SchemaEmptyParserError, SchemaMissingKeyError
# iosxr show_isis
from genie.libs.parser.iosxr.show_isis import (
ShowIsis,
ShowIsisLspLog,
ShowIsisSpfLog,
ShowIsisProtocol,
ShowIsisHostname,
ShowIsisInterface,
ShowIsisAdjacency,
ShowIsisNeighbors,
ShowIsisStatistics,
ShowIsisSpfLogDetail,
ShowIsisDatabaseDetail,
ShowIsisSegmentRoutingLabelTable,
)
# ==================================================
# Unit test for 'show isis adjacency'
# ==================================================
class TestShowIsisAdjacency(unittest.TestCase):
'''Unit test for 'show isis adjacency'''
empty_output = {'execute.return_value': ''}
maxDiff = None
golden_parsed_output1 = {
'isis': {
'p': {
'vrf': {
'default': {
'level': {
'Level-1': {
'interfaces': {
'PO0/1/0/1': {
'system_id': {
'12a4': {
'interface': 'Port-channel0/1/0/1',
'snpa': '*PtoP*',
'state': 'Up',
'hold': '23',
'changed': '00:00:06',
'nsf': 'Capable',
'bfd': 'Init'}}},
'Gi0/6/0/2': {
'system_id': {
'12a4': {
'interface': 'GigabitEthernet0/6/0/2',
'snpa': '0004.2893.f2f6',
'state': 'Up',
'hold': '56',
'changed': '00:04:01',
'nsf': 'Capable',
'bfd': 'Up'}}}},
'total_adjacency_count': 2},
'Level-2': {
'interfaces': {
'PO0/1/0/1': {
'system_id': {
'12a4': {
'interface': 'Port-channel0/1/0/1',
'snpa': '*PtoP*',
'state': 'Up',
'hold': '23',
'changed': '00:00:06',
'nsf': 'Capable',
'bfd': 'None'}}},
'Gi0/6/0/2': {
'system_id': {
'12a4': {
'interface': 'GigabitEthernet0/6/0/2',
'snpa': '0004.2893.f2f6',
'state': 'Up',
'hold': '26',
'changed': '00:00:13',
'nsf': 'Capable',
'bfd': 'Init'}}}},
'total_adjacency_count': 2}}}}}}}
golden_output1 = {'execute.return_value': '''
IS-IS p Level-1 adjacencies:
System Id Interface SNPA State Hold Changed NSF BFD
12a4 PO0/1/0/1 *PtoP* Up 23 00:00:06 Capable Init
12a4 Gi0/6/0/2 0004.2893.f2f6 Up 56 00:04:01 Capable Up
Total adjacency count: 2
IS-IS p Level-2 adjacencies:
System Id Interface SNPA State Hold Changed NSF BFD
12a4 PO0/1/0/1 *PtoP* Up 23 00:00:06 Capable None
12a4 Gi0/6/0/2 0004.2893.f2f6 Up 26 00:00:13 Capable Init
Total adjacency count: 2
'''}
golden_parsed_output2 = {
'isis': {
'test': {
'vrf': {
'default': {
'level': {
'Level-1': {
'interfaces': {
'Gi0/0/0/0.115': {
'system_id': {
'R1_xe': {
'interface': 'GigabitEthernet0/0/0/0.115',
'snpa': 'fa16.3eab.a39d',
'state': 'Up',
'hold': '23',
'changed': '22:30:27',
'nsf': 'Yes',
'ipv4_bfd': 'None',
'ipv6_bfd': 'None'}}},
'Gi0/0/0/1.115': {
'system_id': {
'R3_nx': {
'interface': 'GigabitEthernet0/0/0/1.115',
'snpa': '5e00.4002.0007',
'state': 'Up',
'hold': '20',
'changed': '22:30:27',
'nsf': 'Yes',
'ipv4_bfd': 'None',
'ipv6_bfd': 'None'}}}},
'total_adjacency_count': 2},
'Level-2': {
'interfaces': {
'Gi0/0/0/0.115': {
'system_id': {
'R1_xe': {
'interface': 'GigabitEthernet0/0/0/0.115',
'snpa': 'fa16.3eab.a39d',
'state': 'Up',
'hold': '26',
'changed': '22:30:26',
'nsf': 'Yes',
'ipv4_bfd': 'None',
'ipv6_bfd': 'None'}}},
'Gi0/0/0/1.115': {
'system_id': {
'R3_nx': {
'interface': 'GigabitEthernet0/0/0/1.115',
'snpa': '5e00.4002.0007',
'state': 'Up',
'hold': '23',
'changed': '22:30:27',
'nsf': 'Yes',
'ipv4_bfd': 'None',
'ipv6_bfd': 'None'}}}},
'total_adjacency_count': 2}}}}},
'test1': {
'vrf': {
'default': {
'level': {
'Level-1': {},
'Level-2': {}}}}}}}
golden_output2 = {'execute.return_value': '''
+++ R2_xr: executing command 'show isis adjacency' +++
show isis adjacency
Wed Apr 17 16:25:06.870 UTC
IS-IS test Level-1 adjacencies:
System Id Interface SNPA State Hold Changed NSF IPv4 IPv6
BFD BFD
R1_xe Gi0/0/0/0.115 fa16.3eab.a39d Up 23 22:30:27 Yes None None
R3_nx Gi0/0/0/1.115 5e00.4002.0007 Up 20 22:30:27 Yes None None
Total adjacency count: 2
IS-IS test Level-2 adjacencies:
System Id Interface SNPA State Hold Changed NSF IPv4 IPv6
BFD BFD
R1_xe Gi0/0/0/0.115 fa16.3eab.a39d Up 26 22:30:26 Yes None None
R3_nx Gi0/0/0/1.115 5e00.4002.0007 Up 23 22:30:27 Yes None None
Total adjacency count: 2
IS-IS test1 Level-1 adjacencies:
System Id Interface SNPA State Hold Changed NSF IPv4 IPv6
BFD BFD
IS-IS test1 Level-2 adjacencies:
System Id Interface SNPA State Hold Changed NSF IPv4 IPv6
BFD BFD
'''}
def test_show_isis_adjacency_empty(self):
self.device = Mock(**self.empty_output)
obj = ShowIsisAdjacency(device=self.device)
with self.assertRaises(SchemaEmptyParserError):
parsed_output = obj.parse()
def test_show_isis_adjacency_golden1(self):
self.device = Mock(**self.golden_output1)
obj = ShowIsisAdjacency(device=self.device)
parsed_output = obj.parse()
self.assertEqual(parsed_output, self.golden_parsed_output1)
def test_show_isis_adjacency_golden2(self):
self.device = Mock(**self.golden_output2)
obj = ShowIsisAdjacency(device=self.device)
parsed_output = obj.parse()
self.assertEqual(parsed_output, self.golden_parsed_output2)
# ====================================
# Unit test for 'show isis neighbors'
# ====================================
class TestShowIsisNeighbors(unittest.TestCase):
'''Unit test for "show isis neighbors"'''
maxDiff = None
empty_output = {'execute.return_value': ''}
golden_parsed_output1 = {
'isis': {
'test': {
'vrf': {
'default': {
'interfaces': {
'GigabitEthernet0/0/0/0.115': {
'neighbors': {
'R1_xe': {
'snpa': 'fa16.3eab.a39d',
'state': 'Up',
'holdtime': '24',
'type': 'L1L2',
'ietf_nsf': 'Capable'}}},
'GigabitEthernet0/0/0/1.115': {
'neighbors': {
'R3_nx': {
'snpa': '5e00.4002.0007',
'state': 'Up',
'holdtime': '25',
'type': 'L1L2',
'ietf_nsf': 'Capable'}}}},
'total_neighbor_count': 2}}}}}
golden_output1 = {'execute.return_value': '''
+++ R2_xr: executing command 'show isis neighbors' +++
show isis neighbors
Wed Apr 17 16:21:30.075 UTC
IS-IS test neighbors:
System Id Interface SNPA State Holdtime Type IETF-NSF
R1_xe Gi0/0/0/0.115 fa16.3eab.a39d Up 24 L1L2 Capable
R3_nx Gi0/0/0/1.115 5e00.4002.0007 Up 25 L1L2 Capable
Total neighbor count: 2
'''}
golden_parsed_output2 = {
'isis': {
'test': {
'vrf': {
'default': {
'interfaces': {
'GigabitEthernet0/0/0/0.115': {
'neighbors': {
'R1_xe': {
'snpa': 'fa16.3eab.a39d',
'state': 'Up',
'holdtime': '22',
'type': 'L1L2',
'ietf_nsf': 'Capable'}}},
'GigabitEthernet0/0/0/1.115': {
'neighbors': {
'R3_nx': {
'snpa': '5e00.4002.0007',
'state': 'Up',
'holdtime': '22',
'type': 'L1L2',
'ietf_nsf': 'Capable'}}}},
'total_neighbor_count': 2}}},
'test1': {
'vrf': {
'default': {}}}}}
golden_output2 = {'execute.return_value': '''
show isis neighbors
Thu Apr 18 11:00:22.192 UTC
IS-IS test neighbors:
System Id Interface SNPA State Holdtime Type IETF-NSF
R1_xe Gi0/0/0/0.115 fa16.3eab.a39d Up 22 L1L2 Capable
R3_nx Gi0/0/0/1.115 5e00.4002.0007 Up 22 L1L2 Capable
Total neighbor count: 2
IS-IS test1 neighbors:
System Id Interface SNPA State Holdtime Type IETF-NSF
'''}
def test_show_isis_neighbors_empty(self):
self.device = Mock(**self.empty_output)
obj = ShowIsisNeighbors(device=self.device)
with self.assertRaises(SchemaEmptyParserError):
parsed_output = obj.parse()
def test_show_isis_neighbors_golden1(self):
self.device = Mock(**self.golden_output1)
obj = ShowIsisNeighbors(device=self.device)
parsed_output = obj.parse()
self.assertEqual(parsed_output, self.golden_parsed_output1)
def test_show_isis_neighbors_golden2(self):
self.device = Mock(**self.golden_output2)
obj = ShowIsisNeighbors(device=self.device)
parsed_output = obj.parse()
self.assertEqual(parsed_output, self.golden_parsed_output2)
# ======================================================
# Unit test for 'show isis segment-routing label table'
# ======================================================
class TestShowIsisSegmentRoutingLabelTable(unittest.TestCase):
'''Unit test for "show isis segment-routing label table"'''
maxDiff = None
empty_output = {'execute.return_value': ''}
golden_parsed_output1 = {
'instance': {
'SR': {
'label': {
16001: {
'prefix_interface': 'Loopback0'},
16002: {
'prefix_interface': '10.2.2.2/32'},
16003: {
'prefix_interface': '10.3.3.3/32'}
}
}
}
}
golden_output1 = {'execute.return_value': '''
RP/0/RP0/CPU0:iosxrv9000-1#show isis segment-routing label table
Mon Sep 30 13:22:32.921 EDT
IS-IS SR IS Label Table
Label Prefix/Interface
---------- ----------------
16001 Loopback0
16002 10.2.2.2/32
16003 10.3.3.3/32
'''}
def test_show_isis_segment_routing_label_table_empty(self):
self.device = Mock(**self.empty_output)
obj = ShowIsisSegmentRoutingLabelTable(device=self.device)
with self.assertRaises(SchemaEmptyParserError):
parsed_output = obj.parse()
def test_show_isis_segment_routing_label_table_golden1(self):
self.device = Mock(**self.golden_output1)
obj = ShowIsisSegmentRoutingLabelTable(device=self.device)
parsed_output = obj.parse()
self.assertEqual(parsed_output, self.golden_parsed_output1)
class TestShowIsis(unittest.TestCase):
''' Unitest for commands:
* show isis -> ShowIsis
'''
maxDiff = None
empty_output = {'execute.return_value': ''}
golden_parsed_output_1 = {
"instance": {
"test": {
"process_id": "test",
"instance": "0",
"vrf": {
"default": {
"system_id": "3333.3333.3333",
"is_levels": "level-1-2",
"manual_area_address": ["49.0002"],
"routing_area_address": ["49.0002", "49.0001"],
"non_stop_forwarding": "Disabled",
"most_recent_startup_mode": "Cold Restart",
"te_connection_status": "Down",
"topology": {
"IPv4 Unicast": {
'vrf': {
'default': {
"level": {
1: {
"generate_style": "Wide",
"accept_style": "Wide",
"metric": 10,
"ispf_status": "Disabled",
},
2: {
"generate_style": "Wide",
"accept_style": "Wide",
"metric": 10,
"ispf_status": "Disabled",
},
},
"protocols_redistributed": False,
"distance": 115,
"adv_passive_only": False,
},
},
},
"IPv6 Unicast": {
'vrf': {
'default': {
"level": {
1: {
"metric": 10,
"ispf_status": "Disabled"},
2: {
"metric": 10,
"ispf_status": "Disabled"},
},
"protocols_redistributed": False,
"distance": 115,
"adv_passive_only": False,
}
}
},
},
"interfaces": {
"Loopback0": {
"running_state": "running actively",
"configuration_state": "active in configuration",
},
"GigabitEthernet0/0/0/0": {
"running_state": "running actively",
"configuration_state": "active in configuration",
},
"GigabitEthernet0/0/0/1": {
"running_state": "running actively",
"configuration_state": "active in configuration",
},
"GigabitEthernet0/0/0/2": {
"running_state": "running actively",
"configuration_state": "active in configuration",
},
"GigabitEthernet0/0/0/3": {
"running_state": "running actively",
"configuration_state": "active in configuration",
},
},
}
},
}
}
}
golden_output_1 = {'execute.return_value': '''
IS-IS Router: test
System Id: 3333.3333.3333
Instance Id: 0
IS Levels: level-1-2
Manual area address(es):
49.0002
Routing for area address(es):
49.0002
49.0001
Non-stop forwarding: Disabled
Most recent startup mode: Cold Restart
TE connection status: Down
Topologies supported by IS-IS:
IPv4 Unicast
Level-1
Metric style (generate/accept): Wide/Wide
Metric: 10
ISPF status: Disabled
Level-2
Metric style (generate/accept): Wide/Wide
Metric: 10
ISPF status: Disabled
No protocols redistributed
Distance: 115
Advertise Passive Interface Prefixes Only: No
IPv6 Unicast
Level-1
Metric: 10
ISPF status: Disabled
Level-2
Metric: 10
ISPF status: Disabled
No protocols redistributed
Distance: 115
Advertise Passive Interface Prefixes Only: No
SRLB not allocated
SRGB not allocated
Interfaces supported by IS-IS:
Loopback0 is running actively (active in configuration)
GigabitEthernet0/0/0/0 is running actively (active in configuration)
GigabitEthernet0/0/0/1 is running actively (active in configuration)
GigabitEthernet0/0/0/2 is running actively (active in configuration)
GigabitEthernet0/0/0/3 is running actively (active in configuration)
'''}
golden_parsed_output_2 = {
'instance': {
'Cisco': {
'process_id': 'Cisco',
'instance': '0',
'vrf': {
'default': {
'system_id': '1781.8132.1195',
'is_levels': 'level-2-only',
'manual_area_address': ['49.0000'],
'routing_area_address': ['49.0000'],
'non_stop_forwarding': 'Disabled',
'most_recent_startup_mode': 'Cold Restart',
'te_connection_status': 'Up',
'topology': {
'IPv4 Unicast': {
'vrf': {
'default': {
'level': {
2: {
'generate_style': 'Wide',
'accept_style': 'Wide',
'metric': 100000,
'ispf_status': 'Disabled',
},
},
'protocols_redistributed': True,
'redistributing': ['Connected', 'Static', 'OSPF process 65001', 'OSPF process 65002', 'OSPF process 65003'],
'distance': 115,
'adv_passive_only': True,
},
},
},
},
'srlb': {
'start': 15000,
'end': 15999,
},
'srgb': {
'start': 16000,
'end': 81534,
},
'interfaces': {
'Bundle-Ether1': {
'running_state': 'running suppressed',
'configuration_state': 'active in configuration',
},
'Bundle-Ether2': {
'running_state': 'running suppressed',
'configuration_state': 'active in configuration',
},
'Loopback0': {
'running_state': 'running passively',
'configuration_state': 'passive in configuration',
},
'TenGigE0/0/1/2': {
'running_state': 'running suppressed',
'configuration_state': 'active in configuration',
},
'TenGigE0/0/1/3': {
'running_state': 'disabled',
'configuration_state': 'active in configuration',
},
'TenGigE0/5/0/1': {
'running_state': 'disabled',
'configuration_state': 'active in configuration',
},
},
},
},
},
},
}
golden_output_2 = {'execute.return_value': '''
+++ genie-Router: executing command 'show isis' +++
show isis
Mon Oct 7 16:22:11.993 EDT
IS-IS Router: Cisco
System Id: 1781.8132.1195
Instance Id: 0
IS Levels: level-2-only
Manual area address(es):
49.0000
Routing for area address(es):
49.0000
Non-stop forwarding: Disabled
Most recent startup mode: Cold Restart
TE connection status: Up
Topologies supported by IS-IS:
IPv4 Unicast
Level-2
Metric style (generate/accept): Wide/Wide
Metric: 100000
ISPF status: Disabled
Redistributing:
Connected
Static
OSPF process 65001
OSPF process 65002
OSPF process 65003
Distance: 115
Advertise Passive Interface Prefixes Only: Yes
SRLB allocated: 15000 - 15999
SRGB allocated: 16000 - 81534
Interfaces supported by IS-IS:
Bundle-Ether1 is running suppressed (active in configuration)
Bundle-Ether2 is running suppressed (active in configuration)
Loopback0 is running passively (passive in configuration)
TenGigE0/0/1/2 is running suppressed (active in configuration)
TenGigE0/0/1/3 is disabled (active in configuration)
TenGigE0/5/0/1 is disabled (active in configuration)
RP/0/RSP0/CPU0:genie-Router#
'''}
def test_show_isis_empty(self):
self.device = Mock(**self.empty_output)
obj = ShowIsis(device=self.device)
with self.assertRaises(SchemaEmptyParserError):
parsed_output = obj.parse()
def test_show_isis_1(self):
self.device = Mock(**self.golden_output_1)
obj = ShowIsis(device=self.device)
parsed_output = obj.parse()
self.assertEqual(parsed_output, self.golden_parsed_output_1)
def test_show_isis_2(self):
self.device = Mock(**self.golden_output_2)
obj = ShowIsis(device=self.device)
parsed_output = obj.parse()
self.assertEqual(parsed_output, self.golden_parsed_output_2)
class TestShowIsisSpfLog(unittest.TestCase):
''' Unit Tests for command/parser
* show isis spf-log/ShowIsisSpfLog
'''
maxDiff = None
empty_output = {'execute.return_value': ''}
parsed_output_1 = {
"instance": {
"TEST": {
"address_family": {
"IPv4 Unicast": {
"spf_log": {
1: {
"start_timestamp": "Mon Oct 7 2019 23:12:51.401",
"level": 2,
"type": "PPFRR",
"time_ms": 0,
"total_nodes": 64,
"trigger_count": 1,
"triggers": "PERPREFIXFRR",
},
2: {
"start_timestamp": "Mon Oct 7 2019 23:27:50.960",
"level": 2,
"type": "FSPF",
"time_ms": 0,
"total_nodes": 64,
"trigger_count": 1,
"triggers": "PERIODIC",
},
3: {
"start_timestamp": "Tue Oct 8 2019 00:00:17.514",
"level": 2,
"type": "PRC",
"time_ms": 0,
"total_nodes": 64,
"trigger_count": 6,
"first_trigger_lsp": "bla-host1.12-34",
"triggers": "PREFIXBAD",
},
4: {
"start_timestamp": "Tue Oct 8 2019 00:02:24.523",
"level": 2,
"type": "PRC",
"time_ms": 0,
"total_nodes": 64,
"trigger_count": 6,
"first_trigger_lsp": "bla-host2.13-34",
"triggers": "PREFIXGOOD",
},
5: {
"start_timestamp": "Tue Oct 8 2019 00:02:25.025",
"level": 2,
"type": "PPFRR",
"time_ms": 0,
"total_nodes": 64,
"trigger_count": 1,
"triggers": "PERPREFIXFRR",
},
6: {
"start_timestamp": "Tue Oct 8 2019 08:15:04.265",
"level": 2,
"type": "PRC",
"time_ms": 0,
"total_nodes": 64,
"trigger_count": 1,
"first_trigger_lsp": "bla-9.blahlab-cld.12-34",
"triggers": "PREFIXBAD",
},
7: {
"start_timestamp": "Tue Oct 8 2019 08:15:04.418",
"level": 2,
"type": "PRC",
"time_ms": 0,
"total_nodes": 64,
"trigger_count": 1,
"first_trigger_lsp": "bla-9.blahlab-cld.12-34",
"triggers": "PREFIXGOOD",
},
8: {
"start_timestamp": "Tue Oct 8 2019 08:17:55.366",
"level": 2,
"type": "PRC",
"time_ms": 0,
"total_nodes": 64,
"trigger_count": 1,
"first_trigger_lsp": "bla-9.blahlab-cld.12-34",
"triggers": "PREFIXBAD",
},
}
}
}
}
}
}
golden_output_1 = {'execute.return_value': '''
#show isis spf-log
Tue Oct 8 17:37:35.029 EDT
IS-IS TEST Level 2 IPv4 Unicast Route Calculation Log
Time Total Trig.
Timestamp Type (ms) Nodes Count First Trigger LSP Triggers
------------ ----- ----- ----- ----- -------------------- -----------------------
--- Mon Oct 7 2019 ---
23:12:51.401 PPFRR 0 64 1 PERPREFIXFRR
23:27:50.960 FSPF 0 64 1 PERIODIC
--- Tue Oct 8 2019 ---
00:00:17.514 PRC 0 64 6 bla-host1.12-34 PREFIXBAD
00:02:24.523 PRC 0 64 6 bla-host2.13-34 PREFIXGOOD
00:02:25.025 PPFRR 0 64 1 PERPREFIXFRR
08:15:04.265 PRC 0 64 1 bla-9.blahlab-cld.12-34 PREFIXBAD
08:15:04.418 PRC 0 64 1 bla-9.blahlab-cld.12-34 PREFIXGOOD
08:17:55.366 PRC 0 64 1 bla-9.blahlab-cld.12-34 PREFIXBAD
'''}
parsed_output_2 = {
"instance": {
"1": {
"address_family": {
"IPv4 Unicast": {
"spf_log": {
1: {
"start_timestamp": "Thurs Aug 19 2004 12:00:50.787",
"level": 1,
"type": "FSPF",
"time_ms": 1,
"total_nodes": 1,
"trigger_count": 3,
"first_trigger_lsp": "ensoft-grs7.00-00",
"triggers": "LSPHEADER TLVCODE",
},
2: {
"start_timestamp": "Thurs Aug 19 2004 12:00:52.846",
"level": 1,
"type": "FSPF",
"time_ms": 1,
"total_nodes": 1,
"trigger_count": 1,
"first_trigger_lsp": "ensoft-grs7.00-00",
"triggers": "LSPHEADER",
},
3: {
"start_timestamp": "Thurs Aug 19 2004 12:00:56.049",
"level": 1,
"type": "FSPF",
"time_ms": 1,
"total_nodes": 1,
"trigger_count": 1,
"first_trigger_lsp": "ensoft-grs7.00-00",
"triggers": "TLVCODE",
},
4: {
"start_timestamp": "Thurs Aug 19 2004 12:01:02.620",
"level": 1,
"type": "FSPF",
"time_ms": 1,
"total_nodes": 1,
"trigger_count": 2,
"first_trigger_lsp": "ensoft-grs7.00-00",
"triggers": "NEWADJ LINKTLV",
},
5: {
"start_timestamp": "Mon Aug 19 2004 12:00:50.790",
"level": 1,
"type": "FSPF",
"time_ms": 0,
"total_nodes": 1,
"trigger_count": 4,
"first_trigger_lsp": "ensoft-grs7.00-00",
"triggers": "LSPHEADER TLVCODE",
},
6: {
"start_timestamp": "Mon Aug 19 2004 12:00:54.043",
"level": 1,
"type": "FSPF",
"time_ms": 1,
"total_nodes": 1,
"trigger_count": 2,
"first_trigger_lsp": "ensoft-grs7.00-00",
"triggers": "NEWADJ LSPHEADER",
},
7: {
"start_timestamp": "Mon Aug 19 2004 12:00:55.922",
"level": 1,
"type": "FSPF",
"time_ms": 1,
"total_nodes": 2,
"trigger_count": 1,
"first_trigger_lsp": "ensoft-grs7.00-00",
"triggers": "NEWLSPO",
},
}
}
}
}
}
}
# From ncs5k/ncs6k/asr9k documentation
golden_output_2 = {'execute.return_value': '''
# show isis spf-log
IS-IS 1 Level 1 IPv4 Unicast Route Calculation Log
Time Total Trig
Timestamp Type (ms) Nodes Count First Trigger LSP Triggers
----------- ---- ---- ----- ----- ----- ------- --- --------
--- Thurs Aug 19 2004 ---
12:00:50.787 FSPF 1 1 3 ensoft-grs7.00-00 LSPHEADER TLVCODE
12:00:52.846 FSPF 1 1 1 ensoft-grs7.00-00 LSPHEADER
12:00:56.049 FSPF 1 1 1 ensoft-grs7.00-00 TLVCODE
12:01:02.620 FSPF 1 1 2 ensoft-grs7.00-00 NEWADJ LINKTLV
IS-IS 1 Level 1 IPv4 Unicast Route Calculation Log
Time Total Trig
Timestamp Type (ms) Nodes Count First Trigger LSP Triggers
----------- ---- ---- ----- ----- ----- ------- --- --------
--- Mon Aug 19 2004 ---
12:00:50.790 FSPF 0 1 4 ensoft-grs7.00-00 LSPHEADER TLVCODE
12:00:54.043 FSPF 1 1 2 ensoft-grs7.00-00 NEWADJ LSPHEADER
12:00:55.922 FSPF 1 2 1 ensoft-grs7.00-00 NEWLSPO
'''}
def test_empty_output(self):
device = Mock(**self.empty_output)
obj = ShowIsisSpfLog(device=device)
with self.assertRaises(SchemaEmptyParserError):
obj.parse()
def test_golden_output_1(self):
device = Mock(**self.golden_output_1)
obj = ShowIsisSpfLog(device=device)
parsed_output = obj.parse()
self.assertEqual(parsed_output, self.parsed_output_1)
def test_golden_output_2(self):
device = Mock(**self.golden_output_2)
obj = ShowIsisSpfLog(device=device)
parsed_output = obj.parse()
self.assertEqual(parsed_output, self.parsed_output_2)
class TestShowIsisSpfLogDetail(unittest.TestCase):
''' Unit tests for commands/parsers
* show isis spf-log detail/ShowIsisSpfLogDetail
'''
maxDiff = None
empty_output = {'execute.return_value': ''}
parsed_output_1 = {
"instance": {
"isp": {
"address_family": {
"IPv4 Unicast": {
"spf_log": {
1: {
"type": "FSPF",
"time_ms": 1,
"level": 1,
"total_nodes": 1,
"trigger_count": 1,
"first_trigger_lsp": "12a5.00-00",
"triggers": "NEWLSP0",
"start_timestamp": "Mon Aug 16 2004 19:25:35.140",
"delay_ms": 51,
"delay_info": "since first trigger",
"spt_calculation": {
"cpu_time_ms": 0,
"real_time_ms": 0},
"prefix_update": {
"cpu_time_ms": 1,
"real_time_ms": 1},
"new_lsp_arrivals": 0,
"next_wait_interval_ms": 200,
"results": {
"nodes": {
"reach": 1,
"unreach": 0,
"total": 1},
"prefixes": {
"items": {
"critical_priority": {
"reach": 0,
"unreach": 0,
"total": 0,
},
"high_priority": {
"reach": 0,
"unreach": 0,
"total": 0,
},
"medium_priority": {
"reach": 0,
"unreach": 0,
"total": 0,
},
"low_priority": {
"reach": 0,
"unreach": 0,
"total": 0,
},
"all_priority": {
"reach": 0,
"unreach": 0,
"total": 0,
},
},
"routes": {
"critical_priority": {
"reach": 0,
"total": 0},
"high_priority": {
"reach": 0,
"total": 0},
"medium_priority": {
"reach": 0,
"total": 0},
"low_priority": {
"reach": 0,
"total": 0},
"all_priority": {
"reach": 0,
"total": 0
},
}
},
},
},
}
}
}
}
}
}
golden_output_1 = {'execute.return_value': '''
# show isis spf-log detail
ISIS isp Level 1 IPv4 Unicast Route Calculation Log
Time Total Trig
Timestamp Type (ms) Nodes Count First Trigger LSP Triggers
Mon Aug 16 2004
19:25:35.140 FSPF 1 1 1 12a5.00-00 NEWLSP0
Delay: 51ms (since first trigger)
SPT Calculation
CPU Time: 0ms
Real Time: 0ms
Prefix Updates
CPU Time: 1ms
Real Time: 1ms
New LSP Arrivals: 0
Next Wait Interval: 200ms
Results
Reach Unreach Total
Nodes: 1 0 1
Prefixes (Items)
Critical Priority: 0 0 0
High Priority: 0 0 0
Medium Priority 0 0 0
Low Priority 0 0 0
All Priorities 0 0 0
Prefixes (Routes)
Critical Priority: 0 - 0
High Priority: 0 - 0
Medium Priority 0 - 0
Low Priority: 0 - 0
All Priorities 0 - 0
'''}
def test_empty_output(self):
device = Mock(**self.empty_output)
obj = ShowIsisSpfLogDetail(device=device)
with self.assertRaises(SchemaEmptyParserError):
obj.parse()
def test_golden_output_1(self):
device = Mock(**self.golden_output_1)
obj = ShowIsisSpfLogDetail(device=device)
parsed_output = obj.parse()
self.assertEqual(parsed_output, self.parsed_output_1)
class TestIsisHostname(unittest.TestCase):
''' Unit tests for commands:
* show isis hostname / ShowIsisHostname
* show isis instance {instance} hostname / ShowIsisHostname
'''
maxDiff = None
empty_output = {'execute.return_value': ''}
golden_parsed_output_1 = {
'isis': {
'TEST1': {
'vrf': {
'default': {
'level': {
2: {
'system_id': {
'5286.4470.2149': {
'dynamic_hostname': 'host-1.bla-site3'},
'9839.2319.8337': {
'dynamic_hostname': 'host3-bla'},
'3549.6375.2540': {
'dynamic_hostname': 'abc-3.bla-site4'},
'0670.7021.9090': {
'dynamic_hostname': 'host2-abc'},
'9853.9997.6489': {
'dynamic_hostname': 'abc2-xyz',
'local_router': True}}}}}}}}}
golden_output_1 = {'execute.return_value': '''
show isis hostname
Thu Oct 3 10:53:16.534 EDT
IS-IS TEST1 hostnames
Level System ID Dynamic Hostname
2 5286.4470.2149 host-1.bla-site3
2 9839.2319.8337 host3-bla
2 3549.6375.2540 abc-3.bla-site4
2 0670.7021.9090 host2-abc
2 * 9853.9997.6489 abc2-xyz
'''}
golden_parsed_output_2 = {
"isis": {
"test": {
"vrf": {
"default": {
"level": {
2: {
"system_id": {
"2222.2222.2222": {
"dynamic_hostname": "R2"},
"8888.8888.8888": {
"dynamic_hostname": "R8"},
"7777.7777.7777": {
"dynamic_hostname": "R7"},
"3333.3333.3333": {
"dynamic_hostname": "R3",
"local_router": True,
},
"5555.5555.5555": {
"dynamic_hostname": "R5"},
"9999.9999.9999": {
"dynamic_hostname": "R9"},
}
},
1: {
"system_id": {
"4444.4444.4444": {
"dynamic_hostname": "R4"},
"6666.6666.6666": {
"dynamic_hostname": "R6"},
"7777.7777.7777": {
"dynamic_hostname": "R7"},
"3333.3333.3333": {
"dynamic_hostname": "R3",
"local_router": True,
},
"5555.5555.5555": {
"dynamic_hostname": "R5"},
}
},
}
}
}
}
}
}
golden_output_2 = {'execute.return_value': '''
show isis hostname
IS-IS test hostnames
Level System ID Dynamic Hostname
2 2222.2222.2222 R2
1 4444.4444.4444 R4
1 6666.6666.6666 R6
2 8888.8888.8888 R8
1,2 7777.7777.7777 R7
1,2 * 3333.3333.3333 R3
1,2 5555.5555.5555 R5
2 9999.9999.9999 R9
'''}
def test_empty_output(self):
self.device = Mock(**self.empty_output)
obj = ShowIsisHostname(device=self.device)
with self.assertRaises(SchemaEmptyParserError):
obj.parse()
def test_golden_output_1(self):
self.device = Mock(**self.golden_output_1)
obj = ShowIsisHostname(device=self.device)
parsed_output = obj.parse()
self.assertEqual(parsed_output, self.golden_parsed_output_1)
def test_golden_output_2(self):
self.device = Mock(**self.golden_output_2)
obj = ShowIsisHostname(device=self.device)
parsed_output = obj.parse()
self.assertEqual(parsed_output, self.golden_parsed_output_2)
class TestShowIsisStatistics(unittest.TestCase):
''' Unit tests for commands/parsers
* show isis statistics/ShowIsisStatistics
'''
maxDiff = None
empty_output = {'execute.return_value': ''}
parsed_output_1 = {
"isis": {
"test": {
"psnp_cache": {
"hits": 21,
"tries": 118},
"csnp_cache": {
"hits": 1398,
"tries": 1501,
"updates": 204},
"lsp": {
"checksum_errors_received": 0,
"dropped": 0},
"snp": {
"dropped": 0},
"upd": {
"max_queue_size": 3,
"queue_size": 0},
"transmit_time": {
"hello": {
"average_transmit_time_sec": 0,
"average_transmit_time_nsec": 66473,
"rate_per_sec": 15,
},
"csnp": {
"average_transmit_time_sec": 0,
"average_transmit_time_nsec": 45979,
"rate_per_sec": 2,
},
"psnp": {
"average_transmit_time_sec": 0,
"average_transmit_time_nsec": 4113,
"rate_per_sec": 0,
},
"lsp": {
"average_transmit_time_sec": 0,
"average_transmit_time_nsec": 14392,
"rate_per_sec": 0,
},
},
"process_time": {
"hello": {
"average_process_time_sec": 0,
"average_process_time_nsec": 51163,
"rate_per_sec": 9,
},
"csnp": {
"average_process_time_sec": 0,
"average_process_time_nsec": 26914,
"rate_per_sec": 1,
},
"psnp": {
"average_process_time_sec": 0,
"average_process_time_nsec": 39758,
"rate_per_sec": 0,
},
"lsp": {
"average_process_time_sec": 0,
"average_process_time_nsec": 52706,
"rate_per_sec": 0,
},
},
"level": {
1: {
"lsp": {
"new": 11,
"refresh": 15},
"address_family": {
"IPv4 Unicast": {
"total_spf_calculation": 18,
"full_spf_calculation": 16,
"ispf_calculation": 0,
"next_hop_calculation": 0,
"partial_route_calculation": 2,
"periodic_spf_calculation": 3,
},
"IPv6 Unicast": {
"total_spf_calculation": 19,
"full_spf_calculation": 17,
"ispf_calculation": 0,
"next_hop_calculation": 0,
"partial_route_calculation": 2,
"periodic_spf_calculation": 3,
},
},
},
2: {
"lsp": {
"new": 13,
"refresh": 11},
"address_family": {
"IPv4 Unicast": {
"total_spf_calculation": 23,
"full_spf_calculation": 15,
"ispf_calculation": 0,
"next_hop_calculation": 0,
"partial_route_calculation": 8,
"periodic_spf_calculation": 4,
},
"IPv6 Unicast": {
"total_spf_calculation": 22,
"full_spf_calculation": 14,
"ispf_calculation": 0,
"next_hop_calculation": 0,
"partial_route_calculation": 8,
"periodic_spf_calculation": 4,
},
},
},
},
"interface": {
"Loopback0": {
"level": {
1: {
"lsps_sourced": {
"sent": 0,
"received": 0,
"flooding_duplicates": 51,
"arrival_time_throttled": 0,
},
"csnp": {
"sent": 0,
"received": 0},
"psnp": {
"sent": 0,
"received": 0},
},
2: {
"lsps_sourced": {
"sent": 0,
"received": 0,
"flooding_duplicates": 46,
"arrival_time_throttled": 0,
},
"csnp": {
"sent": 0,
"received": 0},
"psnp": {
"sent": 0,
"received": 0},
},
}
},
"GigabitEthernet0/0/0/0": {
"level": {
1: {
"hello": {
"received": 594,
"sent": 593},
"dr": {
"elections": 1},
"lsps_sourced": {
"sent": 0,
"received": 0,
"flooding_duplicates": 51,
"arrival_time_throttled": 0,
},
"csnp": {
"sent": 0,
"received": 0},
"psnp": {
"sent": 0,
"received": 0},
},
2: {
"hello": {
"received": 1779,
"sent": 594},
"dr": {
"elections": 1},
"lsps_sourced": {
"sent": 63,
"received": 7,
"flooding_duplicates": 0,
"arrival_time_throttled": 0,
},
"csnp": {
"sent": 595,
"received": 0},
"psnp": {
"sent": 0,
"received": 0},
},
}
},
"GigabitEthernet0/0/0/1": {
"level": {
1: {
"hello": {
"received": 1294,
"sent": 604},
"dr": {
"elections": 5},
"lsps_sourced": {
"sent": 47,
"received": 15,
"flooding_duplicates": 8,
"arrival_time_throttled": 0,
},
"csnp": {
"sent": 339,
"received": 0},
"psnp": {
"sent": 0,
"received": 1},
},
2: {
"hello": {
"received": 724,
"sent": 281},
"dr": {
"elections": 5},
"lsps_sourced": {
"sent": 0,
"received": 0,
"flooding_duplicates": 42,
"arrival_time_throttled": 0,
},
"csnp": {
"sent": 0,
"received": 0},
"psnp": {
"sent": 0,
"received": 0},
},
}
},
"GigabitEthernet0/0/0/2": {
"level": {
1: {
"hello": {
"received": 1739,
"sent": 572},
"dr": {
"elections": 3},
"lsps_sourced": {
"sent": 51,
"received": 31,
"flooding_duplicates": 0,
"arrival_time_throttled": 0,
},
"csnp": {
"sent": 567,
"received": 0},
"psnp": {
"sent": 0,
"received": 0},
},
2: {
"hello": {
"received": 597,
"sent": 0},
"dr": {
"elections": 1},
"lsps_sourced": {
"sent": 0,
"received": 0,
"flooding_duplicates": 46,
"arrival_time_throttled": 0,
},
"csnp": {
"sent": 0,
"received": 0},
"psnp": {
"sent": 0,
"received": 0},
},
}
},
"GigabitEthernet0/0/0/3": {
"level": {
1: {
"hello": {
"received": 598,
"sent": 1115},
"dr": {
"elections": 3},
"lsps_sourced": {
"sent": 38,
"received": 26,
"flooding_duplicates": 5,
"arrival_time_throttled": 0,
},
"csnp": {
"sent": 0,
"received": 370},
"psnp": {
"sent": 0,
"received": 0},
},
2: {
"hello": {
"received": 596,
"sent": 1113},
"dr": {
"elections": 3},
"lsps_sourced": {
"sent": 18,
"received": 39,
"flooding_duplicates": 3,
"arrival_time_throttled": 0,
},
"csnp": {
"sent": 0,
"received": 370},
"psnp": {
"sent": 0,
"received": 0},
},
}
},
},
}
}
}
golden_output_1 = {'execute.return_value': '''
IS-IS test statistics:
Fast PSNP cache (hits/tries): 21/118
Fast CSNP cache (hits/tries): 1398/1501
Fast CSNP cache updates: 204
LSP checksum errors received: 0
LSP Dropped: 0
SNP Dropped: 0
UPD Max Queue size: 3
UPD Queue size: 0
Average transmit times and rate:
Hello: 0 s, 66473 ns, 15/s
CSNP: 0 s, 45979 ns, 2/s
PSNP: 0 s, 4113 ns, 0/s
LSP: 0 s, 14392 ns, 0/s
Average process times and rate:
Hello: 0 s, 51163 ns, 9/s
CSNP: 0 s, 26914 ns, 1/s
PSNP: 0 s, 39758 ns, 0/s
LSP: 0 s, 52706 ns, 0/s
Level-1:
LSPs sourced (new/refresh): 11/15
IPv4 Unicast
Total SPF calculations : 18
Full SPF calculations : 16
ISPF calculations : 0
Next Hop Calculations : 0
Partial Route Calculations : 2
Periodic SPF calculations : 3
IPv6 Unicast
Total SPF calculations : 19
Full SPF calculations : 17
ISPF calculations : 0
Next Hop Calculations : 0
Partial Route Calculations : 2
Periodic SPF calculations : 3
Level-2:
LSPs sourced (new/refresh): 13/11
IPv4 Unicast
Total SPF calculations : 23
Full SPF calculations : 15
ISPF calculations : 0
Next Hop Calculations : 0
Partial Route Calculations : 8
Periodic SPF calculations : 4
IPv6 Unicast
Total SPF calculations : 22
Full SPF calculations : 14
ISPF calculations : 0
Next Hop Calculations : 0
Partial Route Calculations : 8
Periodic SPF calculations : 4
Interface Loopback0:
Level-1 LSPs (sent/rcvd) : 0/0
Level-1 CSNPs (sent/rcvd) : 0/0
Level-1 PSNPs (sent/rcvd) : 0/0
Level-1 LSP Flooding Duplicates : 51
Level-1 LSPs Arrival Time Throttled : 0
Level-2 LSPs (sent/rcvd) : 0/0
Level-2 CSNPs (sent/rcvd) : 0/0
Level-2 PSNPs (sent/rcvd) : 0/0
Level-2 LSP Flooding Duplicates : 46
Level-2 LSPs Arrival Time Throttled : 0
Interface GigabitEthernet0/0/0/0:
Level-1 Hellos (sent/rcvd): 594/593
Level-1 DR Elections : 1
Level-1 LSPs (sent/rcvd) : 0/0
Level-1 CSNPs (sent/rcvd) : 0/0
Level-1 PSNPs (sent/rcvd) : 0/0
Level-1 LSP Flooding Duplicates : 51
Level-1 LSPs Arrival Time Throttled : 0
Level-2 Hellos (sent/rcvd): 1779/594
Level-2 DR Elections : 1
Level-2 LSPs (sent/rcvd) : 63/7
Level-2 CSNPs (sent/rcvd) : 595/0
Level-2 PSNPs (sent/rcvd) : 0/0
Level-2 LSP Flooding Duplicates : 0
Level-2 LSPs Arrival Time Throttled : 0
Interface GigabitEthernet0/0/0/1:
Level-1 Hellos (sent/rcvd): 1294/604
Level-1 DR Elections : 5
Level-1 LSPs (sent/rcvd) : 47/15
Level-1 CSNPs (sent/rcvd) : 339/0
Level-1 PSNPs (sent/rcvd) : 0/1
Level-1 LSP Flooding Duplicates : 8
Level-1 LSPs Arrival Time Throttled : 0
Level-2 Hellos (sent/rcvd): 724/281
Level-2 DR Elections : 5
Level-2 LSPs (sent/rcvd) : 0/0
Level-2 CSNPs (sent/rcvd) : 0/0
Level-2 PSNPs (sent/rcvd) : 0/0
Level-2 LSP Flooding Duplicates : 42
Level-2 LSPs Arrival Time Throttled : 0
Interface GigabitEthernet0/0/0/2:
Level-1 Hellos (sent/rcvd): 1739/572
Level-1 DR Elections : 3
Level-1 LSPs (sent/rcvd) : 51/31
Level-1 CSNPs (sent/rcvd) : 567/0
Level-1 PSNPs (sent/rcvd) : 0/0
Level-1 LSP Flooding Duplicates : 0
Level-1 LSPs Arrival Time Throttled : 0
Level-2 Hellos (sent/rcvd): 597/0
Level-2 DR Elections : 1
Level-2 LSPs (sent/rcvd) : 0/0
Level-2 CSNPs (sent/rcvd) : 0/0
Level-2 PSNPs (sent/rcvd) : 0/0
Level-2 LSP Flooding Duplicates : 46
Level-2 LSPs Arrival Time Throttled : 0
Interface GigabitEthernet0/0/0/3:
Level-1 Hellos (sent/rcvd): 598/1115
Level-1 DR Elections : 3
Level-1 LSPs (sent/rcvd) : 38/26
Level-1 CSNPs (sent/rcvd) : 0/370
Level-1 PSNPs (sent/rcvd) : 0/0
Level-1 LSP Flooding Duplicates : 5
Level-1 LSPs Arrival Time Throttled : 0
Level-2 Hellos (sent/rcvd): 596/1113
Level-2 DR Elections : 3
Level-2 LSPs (sent/rcvd) : 18/39
Level-2 CSNPs (sent/rcvd) : 0/370
Level-2 PSNPs (sent/rcvd) : 0/0
Level-2 LSP Flooding Duplicates : 3
Level-2 LSPs Arrival Time Throttled : 0
'''}
def test_empty_output(self):
self.device = Mock(**self.empty_output)
obj = ShowIsisStatistics(device=self.device)
with self.assertRaises(SchemaEmptyParserError):
obj.parse()
def test_golden_output_1(self):
self.device = Mock(**self.golden_output_1)
obj = ShowIsisStatistics(device=self.device)
parsed_output = obj.parse()
self.assertEqual(parsed_output, self.parsed_output_1)
class TestShowIsisProtocol(unittest.TestCase):
''' Unit tests for command/parser
* show isis protocol / ShowIsisProtocol
'''
maxDiff = None
empty_output = {'execute.return_value': ''}
golden_parsed_output_1 = {
"instance": {
"TEST": {
"process_id": "TEST",
"instance": "0",
"vrf": {
"default": {
"system_id": "0123.4567.8910",
"is_levels": "level-2-only",
"manual_area_address": ["90.0000"],
"routing_area_address": ["90.0000"],
"non_stop_forwarding": "Disabled",
"most_recent_startup_mode": "Cold Restart",
"te_connection_status": "Up",
"topology": {
"IPv4 Unicast": {
'vrf': {
'default': {
"level": {
2: {
"generate_style": "Wide",
"accept_style": "Wide",
"metric": 100000,
"ispf_status": "Disabled",
}
},
"protocols_redistributed": False,
"distance": 115,
"adv_passive_only": True,
}
}
}
},
"srlb": {
"start": 15000,
"end": 15999},
"srgb": {
"start": 16000,
"end": 81534},
"interfaces": {
"GigabitEthernet0/0/0/1": {
"running_state": "running suppressed",
"configuration_state": "active in configuration",
},
"GigabitEthernet0/0/0/2": {
"running_state": "running suppressed",
"configuration_state": "active in configuration",
},
"GigabitEthernet0/0/0/3": {
"running_state": "running suppressed",
"configuration_state": "active in configuration",
},
"Loopback0": {
"running_state": "running passively",
"configuration_state": "passive in configuration",
},
"GigabitEthernet0/0/0/4": {
"running_state": "running suppressed",
"configuration_state": "active in configuration",
},
"GigabitEthernet0/0/0/5": {
"running_state": "running suppressed",
"configuration_state": "active in configuration",
},
"GigabitEthernet0/0/0/6": {
"running_state": "disabled",
"configuration_state": "active in configuration",
},
"GigabitEthernet0/0/0/7": {
"running_state": "disabled",
"configuration_state": "active in configuration",
},
},
}
},
}
}
}
golden_output_1 = {'execute.return_value': '''
#show isis protocol
Wed Oct 9 13:07:59.452 EDT
IS-IS Router: TEST
System Id: 0123.4567.8910
Instance Id: 0
IS Levels: level-2-only
Manual area address(es):
90.0000
Routing for area address(es):
90.0000
Non-stop forwarding: Disabled
Most recent startup mode: Cold Restart
TE connection status: Up
Topologies supported by IS-IS:
IPv4 Unicast
Level-2
Metric style (generate/accept): Wide/Wide
Metric: 100000
ISPF status: Disabled
No protocols redistributed
Distance: 115
Advertise Passive Interface Prefixes Only: Yes
SRLB allocated: 15000 - 15999
SRGB allocated: 16000 - 81534
Interfaces supported by IS-IS:
GigabitEthernet0/0/0/1 is running suppressed (active in configuration)
GigabitEthernet0/0/0/2 is running suppressed (active in configuration)
GigabitEthernet0/0/0/3 is running suppressed (active in configuration)
Loopback0 is running passively (passive in configuration)
GigabitEthernet0/0/0/4 is running suppressed (active in configuration)
GigabitEthernet0/0/0/5 is running suppressed (active in configuration)
GigabitEthernet0/0/0/6 is disabled (active in configuration)
GigabitEthernet0/0/0/7 is disabled (active in configuration)
'''}
golden_parsed_output_2 = {
"instance": {
"test": {
"process_id": "test",
"instance": "0",
"vrf": {
"default": {
"system_id": "2222.2222.2222",
"is_levels": "level-1-2",
"manual_area_address": ["49.0001"],
"routing_area_address": ["49.0001"],
"non_stop_forwarding": "Disabled",
"most_recent_startup_mode": "Cold Restart",
"te_connection_status": "Down",
"topology": {
"IPv4 Unicast": {
"vrf": {
"default": {
"level": {
1: {
"generate_style": "Wide",
"accept_style": "Wide",
"metric": 10,
"ispf_status": "Disabled",
},
2: {
"generate_style": "Wide",
"accept_style": "Wide",
"metric": 10,
"ispf_status": "Disabled",
},
},
"protocols_redistributed": False,
"distance": 115,
"adv_passive_only": False,
}
}
},
"IPv6 Unicast": {
"vrf": {
"default": {
"level": {
1: {
"metric": 10,
"ispf_status": "Disabled"},
2: {
"metric": 10,
"ispf_status": "Disabled"},
},
"protocols_redistributed": False,
"distance": 115,
"adv_passive_only": False,
}
}
},
},
"interfaces": {
"Loopback0": {
"running_state": "running actively",
"configuration_state": "active in configuration",
},
"GigabitEthernet0/0/0/0.115": {
"running_state": "running actively",
"configuration_state": "active in configuration",
},
"GigabitEthernet0/0/0/1.115": {
"running_state": "running actively",
"configuration_state": "active in configuration",
},
},
}
},
},
"test1": {
"process_id": "test1",
"instance": "0",
"vrf": {
"VRF1": {
"system_id": "2222.2222.2222",
"is_levels": "level-1-2",
"manual_area_address": ["49.0001"],
"routing_area_address": ["49.0001"],
"non_stop_forwarding": "Disabled",
"most_recent_startup_mode": "Cold Restart",
"te_connection_status": "Down",
"topology": {
"IPv4 Unicast": {
"vrf": {
"VRF1": {
"level": {
1: {
"generate_style": "Wide",
"accept_style": "Wide",
"metric": 10,
"ispf_status": "Disabled",
},
2: {
"generate_style": "Wide",
"accept_style": "Wide",
"metric": 10,
"ispf_status": "Disabled",
},
},
"protocols_redistributed": False,
"distance": 115,
"adv_passive_only": False,
}
}
},
"IPv6 Unicast": {
"vrf": {
"VRF1": {
"level": {
1: {
"metric": 10,
"ispf_status": "Disabled"},
2: {
"metric": 10,
"ispf_status": "Disabled"},
},
"protocols_redistributed": False,
"distance": 115,
"adv_passive_only": False,
}
}
},
},
"interfaces": {
"Loopback300": {
"running_state": "running actively",
"configuration_state": "active in configuration",
},
"GigabitEthernet0/0/0/0.415": {
"running_state": "running actively",
"configuration_state": "active in configuration",
},
"GigabitEthernet0/0/0/1.415": {
"running_state": "running actively",
"configuration_state": "active in configuration",
},
},
}
},
},
"test2": {
"process_id": "test2",
"instance": "0",
"vrf": {
"VRF1": {
"system_id": "0000.0000.0000",
"is_levels": "level-1-2",
"non_stop_forwarding": "Disabled",
"most_recent_startup_mode": "Cold Restart",
"te_connection_status": "Down",
}
},
},
}
}
golden_output_2 = {'execute.return_value': '''
# show isis protocol
IS-IS Router: test
System Id: 2222.2222.2222
Instance Id: 0
IS Levels: level-1-2
Manual area address(es):
49.0001
Routing for area address(es):
49.0001
Non-stop forwarding: Disabled
Most recent startup mode: Cold Restart
TE connection status: Down
Topologies supported by IS-IS:
IPv4 Unicast
Level-1
Metric style (generate/accept): Wide/Wide
Metric: 10
ISPF status: Disabled
Level-2
Metric style (generate/accept): Wide/Wide
Metric: 10
ISPF status: Disabled
No protocols redistributed
Distance: 115
Advertise Passive Interface Prefixes Only: No
IPv6 Unicast
Level-1
Metric: 10
ISPF status: Disabled
Level-2
Metric: 10
ISPF status: Disabled
No protocols redistributed
Distance: 115
Advertise Passive Interface Prefixes Only: No
SRLB not allocated
SRGB not allocated
Interfaces supported by IS-IS:
Loopback0 is running actively (active in configuration)
GigabitEthernet0/0/0/0.115 is running actively (active in configuration)
GigabitEthernet0/0/0/1.115 is running actively (active in configuration)
IS-IS Router: test1
VRF context: VRF1
System Id: 2222.2222.2222
Instance Id: 0
IS Levels: level-1-2
Manual area address(es):
49.0001
Routing for area address(es):
49.0001
Non-stop forwarding: Disabled
Most recent startup mode: Cold Restart
TE connection status: Down
Topologies supported by IS-IS:
IPv4 Unicast VRF VRF1
Level-1
Metric style (generate/accept): Wide/Wide
Metric: 10
ISPF status: Disabled
Level-2
Metric style (generate/accept): Wide/Wide
Metric: 10
ISPF status: Disabled
No protocols redistributed
Distance: 115
Advertise Passive Interface Prefixes Only: No
IPv6 Unicast VRF VRF1
Level-1
Metric: 10
ISPF status: Disabled
Level-2
Metric: 10
ISPF status: Disabled
No protocols redistributed
Distance: 115
Advertise Passive Interface Prefixes Only: No
SRLB not allocated
SRGB not allocated
Interfaces supported by IS-IS:
Loopback300 is running actively (active in configuration)
GigabitEthernet0/0/0/0.415 is running actively (active in configuration)
GigabitEthernet0/0/0/1.415 is running actively (active in configuration)
IS-IS Router: test2
VRF context: VRF1
System Id: 0000.0000.0000 (Not configured, protocol disabled)
Instance Id: 0
IS Levels: level-1-2
Manual area address(es):
Routing for area address(es):
Non-stop forwarding: Disabled
Most recent startup mode: Cold Restart
TE connection status: Down
Topologies supported by IS-IS:
none
SRLB not allocated
SRGB not allocated
Interfaces supported by IS-IS:
'''}
def test_empty_output(self):
device = Mock(**self.empty_output)
obj = ShowIsisProtocol(device=device)
with self.assertRaises(SchemaEmptyParserError):
obj.parse()
def test_golden_parsed_output_1(self):
device = Mock(**self.golden_output_1)
obj = ShowIsisProtocol(device=device)
parsed_output = obj.parse()
self.assertEqual(parsed_output, self.golden_parsed_output_1)
def test_golden_parsed_output_2(self):
device = Mock(**self.golden_output_2)
obj = ShowIsisProtocol(device=device)
parsed_output = obj.parse()
self.assertEqual(parsed_output, self.golden_parsed_output_2)
class TestShowIsisLspLog(unittest.TestCase):
''' UT for commands/parsers:
* show isis lsp-log / ShowIsisLspLog
'''
maxDiff = None
empty_output = {'execute.return_value': ''}
golden_parsed_output_1 = {
"instance": {
"TEST": {
"lsp_log": {
1: {
"count": 1,
"level": 2,
"triggers": "IPEXT",
"received_timestamp": "Thu Sep 26 2019 09:39:16.648",
},
2: {
"count": 1,
"level": 2,
"triggers": "IPEXT",
"received_timestamp": "Thu Sep 26 2019 10:29:02.303",
},
3: {
"count": 1,
"level": 2,
"triggers": "IPEXT",
"received_timestamp": "Mon Sep 30 2019 00:00:17.274",
},
4: {
"count": 1,
"level": 2,
"triggers": "IPEXT",
"received_timestamp": "Mon Sep 30 2019 00:02:25.263",
},
5: {
"count": 2,
"level": 2,
"interface": "Bundle-Ether2",
"triggers": "DELADJ",
"received_timestamp": "Fri Oct 4 2019 16:10:11.734",
},
6: {
"count": 2,
"level": 2,
"interface": "Bundle-Ether2",
"triggers": "ADJSIDADD",
"received_timestamp": "Fri Oct 4 2019 16:17:45.821",
},
}
}
}
}
golden_output_1 = {'execute.return_value': '''
#show isis lsp-log
Tue Oct 8 17:38:16.254 EDT
IS-IS TEST Level 2 LSP log
When Count Interface Triggers
--- Thu Sep 26 2019 ---
09:39:16.648 1 IPEXT
10:29:02.303 1 IPEXT
--- Mon Sep 30 2019 ---
00:00:17.274 1 IPEXT
00:02:25.263 1 IPEXT
--- Fri Oct 4 2019 ---
16:10:11.734 2 BE2 DELADJ
16:17:45.821 2 BE2 ADJSIDADD
'''}
golden_parsed_output_2 = {
"instance": {
"isp": {
"lsp_log": {
1: {
"count": 1,
"level": 1,
"received_timestamp": "00:02:36"},
2: {
"count": 1,
"level": 1,
"triggers": "LSPREGEN",
"received_timestamp": "00:02:31",
},
3: {
"count": 1,
"level": 1,
"interface": "Port-channel4/1",
"triggers": "NEWADJ",
"received_timestamp": "00:02:24",
},
4: {
"count": 1,
"level": 1,
"interface": "GigabitEthernet5/0",
"triggers": "DIS",
"received_timestamp": "00:02:23",
},
5: {
"count": 1,
"level": 1,
"interface": "Loopback0",
"triggers": "IPUP",
"received_timestamp": "00:01:12",
},
6: {
"count": 1,
"level": 2,
"received_timestamp": "00:02:36"},
7: {
"count": 1,
"level": 2,
"triggers": "LSPREGEN",
"received_timestamp": "00:02:30",
},
8: {
"count": 1,
"level": 2,
"interface": "GigabitEthernet5/0",
"triggers": "DIS",
"received_timestamp": "00:02:23",
},
9: {
"count": 1,
"level": 2,
"interface": "Loopback0",
"triggers": "IPUP",
"received_timestamp": "00:01:12",
},
}
}
}
}
# From asr9k docs
golden_output_2 = {'execute.return_value': '''
# show isis lsp-log
ISIS isp Level 1 LSP log
When Count Interface Triggers
00:02:36 1
00:02:31 1 LSPREGEN
00:02:24 1 PO4/1 NEWADJ
00:02:23 1 Gi5/0 DIS
00:01:12 1 Lo0 IPUP
ISIS isp Level 2 LSP log
When Count Interface Triggers
00:02:36 1
00:02:30 1 LSPREGEN
00:02:23 1 Gi5/0 DIS
00:01:12 1 Lo0 IPUP
'''}
golden_parsed_output_3 = {
"instance": {
"": {
"lsp_log": {
1: {
"count": 3,
"level": 1,
"triggers": "CONFIG NEWADJ DIS",
"received_timestamp": "07:05:18",
},
2: {
"count": 2,
"level": 1,
"interface": "Ethernet0",
"triggers": "NEWADJ DIS",
"received_timestamp": "07:05:13",
},
3: {
"count": 2,
"level": 2,
"triggers": "CONFIG NEWADJ",
"received_timestamp": "07:05:24",
},
4: {
"count": 1,
"level": 2,
"interface": "Ethernet0",
"triggers": "NEWADJ",
"received_timestamp": "07:05:23",
},
5: {
"count": 3,
"level": 2,
"interface": "Loopback0",
"triggers": "CONFIG DELADJ",
"received_timestamp": "07:01:39",
},
}
}
}
}
# From ncs6k docs
golden_output_3 = {'execute.return_value': '''
Router# show isis lsp-log
Level 1 LSP log
When Count Interface Triggers
07:05:18 3 CONFIG NEWADJ DIS
07:05:13 2 Ethernet0 NEWADJ DIS
Level 2 LSP log
When Count Interface Triggers
07:05:24 2 CONFIG NEWADJ
07:05:23 1 Ethernet0 NEWADJ
07:01:39 3 Loopback0 CONFIG DELADJ
'''}
golden_parsed_output_4 = {
"instance": {
"isp": {
"lsp_log": {
1: {
"count": 1,
"level": 1,
"received_timestamp": "00:02:36"},
2: {
"count": 1,
"level": 1,
"triggers": "LSPREGEN",
"received_timestamp": "00:02:31",
},
3: {
"count": 1,
"level": 1,
"interface": "Port-channel4/1",
"triggers": "DELADJ",
"received_timestamp": "00:02:26",
},
4: {
"count": 1,
"level": 1,
"interface": "Port-channel4/1",
"triggers": "NEWADJ",
"received_timestamp": "00:02:24",
},
5: {
"count": 1,
"level": 2,
"received_timestamp": "00:02:36"},
6: {
"count": 1,
"level": 2,
"triggers": "LSPREGEN",
"received_timestamp": "00:02:30",
},
7: {
"count": 1,
"level": 2,
"interface": "Port-channel4/1",
"triggers": "DELADJ",
"received_timestamp": "00:02:26",
},
8: {
"count": 1,
"level": 2,
"interface": "Loopback0",
"triggers": "IPUP",
"received_timestamp": "00:01:12",
},
}
}
}
}
# from ncs5k docs
golden_output_4 = {'execute.return_value': '''
#show isis lsp-log
ISIS isp Level 1 LSP log
When Count Interface Triggers
00:02:36 1
00:02:31 1 LSPREGEN
00:02:26 1 PO4/1 DELADJ
00:02:24 1 PO4/1 NEWADJ
ISIS isp Level 2 LSP log
When Count Interface Triggers
00:02:36 1
00:02:30 1 LSPREGEN
00:02:26 1 PO4/1 DELADJ
00:01:12 1 Lo0 IPUP
'''}
def test_empty_output(self):
device = Mock(**self.empty_output)
obj = ShowIsisLspLog(device=device)
with self.assertRaises(SchemaEmptyParserError):
obj.parse()
def test_golden_output_1(self):
device = Mock(**self.golden_output_1)
obj = ShowIsisLspLog(device=device)
parsed_output = obj.parse()
self.assertEqual(parsed_output, self.golden_parsed_output_1)
def test_golden_output_2(self):
device = Mock(**self.golden_output_2)
obj = ShowIsisLspLog(device=device)
parsed_output = obj.parse()
self.assertEqual(parsed_output, self.golden_parsed_output_2)
def test_golden_output_3(self):
device = Mock(**self.golden_output_3)
obj = ShowIsisLspLog(device=device)
parsed_output = obj.parse()
self.assertEqual(parsed_output, self.golden_parsed_output_3)
def test_golden_output_4(self):
device = Mock(**self.golden_output_4)
obj = ShowIsisLspLog(device=device)
parsed_output = obj.parse()
self.assertEqual(parsed_output, self.golden_parsed_output_4)
class TestShowIsisInterface(unittest.TestCase):
''' Unit test for commands:
* show isis interface -> ShowIsisInterface
'''
maxDiff = None
empty_output = {'execute.return_value': ''}
parsed_output_1 = {
"instance": {
"test": {
"interface": {
"Loopback0": {
"state": "Enabled",
"adjacency_formation": "Enabled",
"prefix_advertisement": "Enabled",
"ipv4_bfd": False,
"ipv6_bfd": False,
"bfd_min_interval": 150,
"bfd_multiplier": 3,
"bandwidth": 0,
"circuit_type": "level-1-2",
"media_type": "Loop",
"circuit_number": 0,
"level": {
1: {
"adjacency_count": 0,
"lsp_pacing_interval_ms": 33,
"psnp_entry_queue_size": 0,
"hello_interval_sec": 10,
"hello_multiplier": 3,
},
2: {
"adjacency_count": 0,
"lsp_pacing_interval_ms": 33,
"psnp_entry_queue_size": 0,
"hello_interval_sec": 10,
"hello_multiplier": 3,
},
},
"clns_io": {
"protocol_state": "Up",
"mtu": 1500},
"topology": {
"ipv4 unicast": {
"state": "Enabled",
"adjacency_formation": "Running",
"prefix_advertisement": "Running",
"metric": {
"level": {
1: 10,
2: 10}},
"weight": {
"level": {
1: 0,
2: 0}},
"mpls": {
"mpls_max_label_stack": "1/3/10 (PRI/BKP/SRTE)",
"ldp_sync": {
"level": {
1: "Disabled",
2: "Disabled"}},
},
"frr": {
"level": {
1: {
"state": "Not Enabled",
"type": "None"},
2: {
"state": "Not Enabled",
"type": "None"},
}
},
},
"ipv6 unicast": {
"state": "Enabled",
"adjacency_formation": "Running",
"prefix_advertisement": "Running",
"metric": {
"level": {
1: 10,
2: 10}},
"weight": {
"level": {
1: 0,
2: 0}},
"mpls": {
"mpls_max_label_stack": "1/3/10 (PRI/BKP/SRTE)",
"ldp_sync": {
"level": {
1: "Disabled",
2: "Disabled"}},
},
"frr": {
"level": {
1: {
"state": "Not Enabled",
"type": "None"},
2: {
"state": "Not Enabled",
"type": "None"},
}
},
},
},
"address_family": {
"IPv4": {
"state": "Enabled",
"forwarding_address": ["0.0.0.0"],
"global_prefix": ["10.36.3.0/24"],
},
"IPv6": {
"state": "Enabled",
"forwarding_address": ["::"],
"global_prefix": ["2001:db8:3:3:3::3/128"],
},
},
"lsp": {
"transmit_timer_expires_ms": 0,
"transmission_state": "idle",
"lsp_transmit_back_to_back_limit_window_msec": 0,
"lsp_transmit_back_to_back_limit": 10,
},
},
"GigabitEthernet0/0/0/0": {
"state": "Enabled",
"adjacency_formation": "Enabled",
"prefix_advertisement": "Enabled",
"ipv4_bfd": False,
"ipv6_bfd": False,
"bfd_min_interval": 150,
"bfd_multiplier": 3,
"bandwidth": 1000000,
"circuit_type": "level-1-2",
"media_type": "LAN",
"circuit_number": 7,
"level": {
1: {
"adjacency_count": 0,
"lan_id": "R3.07",
"priority": {
"local": "64",
"dis": "none (no DIS elected)"},
"next_lan_iih_sec": 5,
"lsp_pacing_interval_ms": 33,
"psnp_entry_queue_size": 0,
"hello_interval_sec": 10,
"hello_multiplier": 3,
},
2: {
"adjacency_count": 1,
"lan_id": "R3.07",
"priority": {
"local": "64",
"dis": "64"},
"next_lan_iih_sec": 3,
"lsp_pacing_interval_ms": 33,
"psnp_entry_queue_size": 0,
"hello_interval_sec": 10,
"hello_multiplier": 3,
},
},
"clns_io": {
"protocol_state": "Up",
"mtu": 1497,
"snpa": "fa16.3ee6.6bd7",
"layer2_mcast_groups_membership": {
"all_level_1_iss": "Yes",
"all_level_2_iss": "Yes",
},
},
"topology": {
"ipv4 unicast": {
"state": "Enabled",
"adjacency_formation": "Running",
"prefix_advertisement": "Running",
"metric": {
"level": {
1: 10,
2: 10}},
"weight": {
"level": {
1: 0,
2: 0}},
"mpls": {
"mpls_max_label_stack": "1/3/10 (PRI/BKP/SRTE)",
"ldp_sync": {
"level": {
1: "Disabled",
2: "Disabled"}},
},
"frr": {
"level": {
1: {
"state": "Not Enabled",
"type": "None"},
2: {
"state": "Not Enabled",
"type": "None"},
}
},
},
"ipv6 unicast": {
"state": "Enabled",
"adjacency_formation": "Running",
"prefix_advertisement": "Running",
"metric": {
"level": {
1: 10,
2: 10}},
"weight": {
"level": {
1: 0,
2: 0}},
"mpls": {
"mpls_max_label_stack": "1/3/10 (PRI/BKP/SRTE)",
"ldp_sync": {
"level": {
1: "Disabled",
2: "Disabled"}},
},
"frr": {
"level": {
1: {
"state": "Not Enabled",
"type": "None"},
2: {
"state": "Not Enabled",
"type": "None"},
}
},
},
},
"address_family": {
"IPv4": {
"state": "Enabled",
"forwarding_address": ["10.2.3.3"],
"global_prefix": ["10.2.3.0/24"],
},
"IPv6": {
"state": "Enabled",
"forwarding_address": ["fe80::f816:3eff:fee6:6bd7"],
"global_prefix": ["2001:db8:10:2::/64"],
},
},
"lsp": {
"transmit_timer_expires_ms": 0,
"transmission_state": "idle",
"lsp_transmit_back_to_back_limit_window_msec": 0,
"lsp_transmit_back_to_back_limit": 9,
},
},
"GigabitEthernet0/0/0/1": {
"state": "Enabled",
"adjacency_formation": "Enabled",
"prefix_advertisement": "Enabled",
"ipv4_bfd": False,
"ipv6_bfd": False,
"bfd_min_interval": 150,
"bfd_multiplier": 3,
"bandwidth": 1000000,
"circuit_type": "level-1-2",
"media_type": "LAN",
"circuit_number": 5,
"level": {
1: {
"adjacency_count": 1,
"lan_id": "R3.05",
"priority": {
"local": "64",
"dis": "64"},
"next_lan_iih_sec": 2,
"lsp_pacing_interval_ms": 33,
"psnp_entry_queue_size": 0,
"hello_interval_sec": 10,
"hello_multiplier": 3,
},
2: {
"adjacency_count": 0,
"lan_id": "R3.05",
"priority": {
"local": "64",
"dis": "none (no DIS elected)"},
"next_lan_iih_sec": 6,
"lsp_pacing_interval_ms": 33,
"psnp_entry_queue_size": 0,
"hello_interval_sec": 10,
"hello_multiplier": 3,
},
},
"clns_io": {
"protocol_state": "Up",
"mtu": 1497,
"snpa": "fa16.3eb0.d50f",
"layer2_mcast_groups_membership": {
"all_level_1_iss": "Yes",
"all_level_2_iss": "Yes",
},
},
"topology": {
"ipv4 unicast": {
"state": "Enabled",
"adjacency_formation": "Running",
"prefix_advertisement": "Running",
"metric": {
"level": {
1: 10,
2: 10}},
"weight": {
"level": {
1: 0,
2: 0}},
"mpls": {
"mpls_max_label_stack": "1/3/10 (PRI/BKP/SRTE)",
"ldp_sync": {
"level": {
1: "Disabled",
2: "Disabled"}},
},
"frr": {
"level": {
1: {
"state": "Not Enabled",
"type": "None"},
2: {
"state": "Not Enabled",
"type": "None"},
}
},
},
"ipv6 unicast": {
"state": "Enabled",
"adjacency_formation": "Running",
"prefix_advertisement": "Running",
"metric": {
"level": {
1: 10,
2: 10}},
"weight": {
"level": {
1: 0,
2: 0}},
"mpls": {
"mpls_max_label_stack": "1/3/10 (PRI/BKP/SRTE)",
"ldp_sync": {
"level": {
1: "Disabled",
2: "Disabled"}},
},
"frr": {
"level": {
1: {
"state": "Not Enabled",
"type": "None"},
2: {
"state": "Not Enabled",
"type": "None"},
}
},
},
},
"address_family": {
"IPv4": {
"state": "Enabled",
"forwarding_address": ["10.3.6.3"],
"global_prefix": ["10.3.6.0/24"],
},
"IPv6": {
"state": "Enabled",
"forwarding_address": ["fe80::f816:3eff:feb0:d50f"],
"global_prefix": ["2001:db8:10:3::/64"],
},
},
"lsp": {
"transmit_timer_expires_ms": 0,
"transmission_state": "idle",
"lsp_transmit_back_to_back_limit_window_msec": 0,
"lsp_transmit_back_to_back_limit": 9,
},
},
"GigabitEthernet0/0/0/2": {
"state": "Enabled",
"adjacency_formation": "Enabled",
"prefix_advertisement": "Enabled",
"ipv4_bfd": False,
"ipv6_bfd": False,
"bfd_min_interval": 150,
"bfd_multiplier": 3,
"bandwidth": 1000000,
"circuit_type": "level-1-2",
"media_type": "LAN",
"circuit_number": 3,
"level": {
1: {
"adjacency_count": 1,
"lan_id": "R3.03",
"priority": {
"local": "64",
"dis": "64"},
"next_lan_iih_sec": 1,
"lsp_pacing_interval_ms": 33,
"psnp_entry_queue_size": 0,
"hello_interval_sec": 10,
"hello_multiplier": 3,
},
2: {
"adjacency_count": 0,
"lan_id": "R3.03",
"priority": {
"local": "64",
"dis": "none (no DIS elected)"},
"next_lan_iih_sec": 6,
"lsp_pacing_interval_ms": 33,
"psnp_entry_queue_size": 0,
"hello_interval_sec": 10,
"hello_multiplier": 3,
},
},
"clns_io": {
"protocol_state": "Up",
"mtu": 1497,
"snpa": "fa16.3ead.2906",
"layer2_mcast_groups_membership": {
"all_level_1_iss": "Yes",
"all_level_2_iss": "Yes",
},
},
"topology": {
"ipv4 unicast": {
"state": "Enabled",
"adjacency_formation": "Running",
"prefix_advertisement": "Running",
"metric": {
"level": {
1: 10,
2: 10}},
"weight": {
"level": {
1: 0,
2: 0}},
"mpls": {
"mpls_max_label_stack": "1/3/10 (PRI/BKP/SRTE)",
"ldp_sync": {
"level": {
1: "Disabled",
2: "Disabled"}},
},
"frr": {
"level": {
1: {
"state": "Not Enabled",
"type": "None"},
2: {
"state": "Not Enabled",
"type": "None"},
}
},
},
"ipv6 unicast": {
"state": "Enabled",
"adjacency_formation": "Running",
"prefix_advertisement": "Running",
"metric": {
"level": {
1: 10,
2: 10}},
"weight": {
"level": {
1: 0,
2: 0}},
"mpls": {
"mpls_max_label_stack": "1/3/10 (PRI/BKP/SRTE)",
"ldp_sync": {
"level": {
1: "Disabled",
2: "Disabled"}},
},
"frr": {
"level": {
1: {
"state": "Not Enabled",
"type": "None"},
2: {
"state": "Not Enabled",
"type": "None"},
}
},
},
},
"address_family": {
"IPv4": {
"state": "Enabled",
"forwarding_address": ["10.3.4.3"],
"global_prefix": ["10.3.4.0/24"],
},
"IPv6": {
"state": "Enabled",
"forwarding_address": ["fe80::f816:3eff:fead:2906"],
"global_prefix": ["None (No global addresses are configured)"],
},
},
"lsp": {
"transmit_timer_expires_ms": 0,
"transmission_state": "idle",
"lsp_transmit_back_to_back_limit_window_msec": 0,
"lsp_transmit_back_to_back_limit": 9,
},
},
"GigabitEthernet0/0/0/3": {
"state": "Enabled",
"adjacency_formation": "Enabled",
"prefix_advertisement": "Enabled",
"ipv4_bfd": False,
"ipv6_bfd": False,
"bfd_min_interval": 150,
"bfd_multiplier": 3,
"bandwidth": 1000000,
"circuit_type": "level-1-2",
"media_type": "LAN",
"circuit_number": 1,
"level": {
1: {
"adjacency_count": 1,
"lan_id": "R5.01",
"priority": {
"local": "64",
"dis": "64"},
"next_lan_iih_sec": 3,
"lsp_pacing_interval_ms": 33,
"psnp_entry_queue_size": 0,
"hello_interval_sec": 10,
"hello_multiplier": 3,
},
2: {
"adjacency_count": 1,
"lan_id": "R5.01",
"priority": {
"local": "64",
"dis": "64"},
"next_lan_iih_sec": 2,
"lsp_pacing_interval_ms": 33,
"psnp_entry_queue_size": 0,
"hello_interval_sec": 10,
"hello_multiplier": 3,
},
},
"clns_io": {
"protocol_state": "Up",
"mtu": 1497,
"snpa": "fa16.3e1c.d826",
"layer2_mcast_groups_membership": {
"all_level_1_iss": "Yes",
"all_level_2_iss": "Yes",
},
},
"topology": {
"ipv4 unicast": {
"state": "Enabled",
"adjacency_formation": "Running",
"prefix_advertisement": "Running",
"metric": {
"level": {
1: 10,
2: 10}},
"weight": {
"level": {
1: 0,
2: 0}},
"mpls": {
"mpls_max_label_stack": "1/3/10 (PRI/BKP/SRTE)",
"ldp_sync": {
"level": {
1: "Disabled",
2: "Disabled"}},
},
"frr": {
"level": {
1: {
"state": "Not Enabled",
"type": "None"},
2: {
"state": "Not Enabled",
"type": "None"},
}
},
},
"ipv6 unicast": {
"state": "Enabled",
"adjacency_formation": "Running",
"prefix_advertisement": "Running",
"metric": {
"level": {
1: 10,
2: 10}},
"weight": {
"level": {
1: 0,
2: 0}},
"mpls": {
"mpls_max_label_stack": "1/3/10 (PRI/BKP/SRTE)",
"ldp_sync": {
"level": {
1: "Disabled",
2: "Disabled"}},
},
"frr": {
"level": {
1: {
"state": "Not Enabled",
"type": "None"},
2: {
"state": "Not Enabled",
"type": "None"},
}
},
},
},
"address_family": {
"IPv4": {
"state": "Enabled",
"forwarding_address": ["10.3.5.3"],
"global_prefix": ["10.3.5.0/24"],
},
"IPv6": {
"state": "Enabled",
"forwarding_address": ["fe80::f816:3eff:fe1c:d826"],
"global_prefix": ["None (No global addresses are configured)"],
},
},
"lsp": {
"transmit_timer_expires_ms": 0,
"transmission_state": "idle",
"lsp_transmit_back_to_back_limit_window_msec": 0,
"lsp_transmit_back_to_back_limit": 9,
},
},
}
}
}
}
golden_parsed_output_1 = {'execute.return_value': '''
IS-IS test Interfaces
Loopback0 Enabled
Adjacency Formation: Enabled
Prefix Advertisement: Enabled
IPv4 BFD: Disabled
IPv6 BFD: Disabled
BFD Min Interval: 150
BFD Multiplier: 3
Bandwidth: 0
Circuit Type: level-1-2
Media Type: Loop
Circuit Number: 0
Level-1
Adjacency Count: 0
LSP Pacing Interval: 33 ms
PSNP Entry Queue Size: 0
Hello Interval: 10 s
Hello Multiplier: 3
Level-2
Adjacency Count: 0
LSP Pacing Interval: 33 ms
PSNP Entry Queue Size: 0
Hello Interval: 10 s
Hello Multiplier: 3
CLNS I/O
Protocol State: Up
MTU: 1500
IPv4 Unicast Topology: Enabled
Adjacency Formation: Running
Prefix Advertisement: Running
Metric (L1/L2): 10/10
Weight (L1/L2): 0/0
MPLS Max Label Stack: 1/3/10 (PRI/BKP/SRTE)
MPLS LDP Sync (L1/L2): Disabled/Disabled
FRR (L1/L2): L1 Not Enabled L2 Not Enabled
FRR Type: None None
IPv6 Unicast Topology: Enabled
Adjacency Formation: Running
Prefix Advertisement: Running
Metric (L1/L2): 10/10
Weight (L1/L2): 0/0
MPLS Max Label Stack: 1/3/10 (PRI/BKP/SRTE)
MPLS LDP Sync (L1/L2): Disabled/Disabled
FRR (L1/L2): L1 Not Enabled L2 Not Enabled
FRR Type: None None
IPv4 Address Family: Enabled
Protocol State: Up
Forwarding Address(es): 0.0.0.0
Global Prefix(es): 10.36.3.0/24
IPv6 Address Family: Enabled
Protocol State: Up
Forwarding Address(es): ::
Global Prefix(es): 2001:db8:3:3:3::3/128
LSP transmit timer expires in 0 ms
LSP transmission is idle
Can send up to 10 back-to-back LSPs in the next 0 ms
GigabitEthernet0/0/0/0 Enabled
Adjacency Formation: Enabled
Prefix Advertisement: Enabled
IPv4 BFD: Disabled
IPv6 BFD: Disabled
BFD Min Interval: 150
BFD Multiplier: 3
Bandwidth: 1000000
Circuit Type: level-1-2
Media Type: LAN
Circuit Number: 7
Level-1
Adjacency Count: 0
LAN ID: R3.07
Priority (Local/DIS): 64/none (no DIS elected)
Next LAN IIH in: 5 s
LSP Pacing Interval: 33 ms
PSNP Entry Queue Size: 0
Hello Interval: 10 s
Hello Multiplier: 3
Level-2
Adjacency Count: 1
LAN ID: R3.07
Priority (Local/DIS): 64/64
Next LAN IIH in: 3 s
LSP Pacing Interval: 33 ms
PSNP Entry Queue Size: 0
Hello Interval: 10 s
Hello Multiplier: 3
CLNS I/O
Protocol State: Up
MTU: 1497
SNPA: fa16.3ee6.6bd7
Layer-2 MCast Groups Membership:
All Level-1 ISs: Yes
All Level-2 ISs: Yes
IPv4 Unicast Topology: Enabled
Adjacency Formation: Running
Prefix Advertisement: Running
Metric (L1/L2): 10/10
Weight (L1/L2): 0/0
MPLS Max Label Stack: 1/3/10 (PRI/BKP/SRTE)
MPLS LDP Sync (L1/L2): Disabled/Disabled
FRR (L1/L2): L1 Not Enabled L2 Not Enabled
FRR Type: None None
IPv6 Unicast Topology: Enabled
Adjacency Formation: Running
Prefix Advertisement: Running
Metric (L1/L2): 10/10
Weight (L1/L2): 0/0
MPLS Max Label Stack: 1/3/10 (PRI/BKP/SRTE)
MPLS LDP Sync (L1/L2): Disabled/Disabled
FRR (L1/L2): L1 Not Enabled L2 Not Enabled
FRR Type: None None
IPv4 Address Family: Enabled
Protocol State: Up
Forwarding Address(es): 10.2.3.3
Global Prefix(es): 10.2.3.0/24
IPv6 Address Family: Enabled
Protocol State: Up
Forwarding Address(es): fe80::f816:3eff:fee6:6bd7
Global Prefix(es): 2001:db8:10:2::/64
LSP transmit timer expires in 0 ms
LSP transmission is idle
Can send up to 9 back-to-back LSPs in the next 0 ms
GigabitEthernet0/0/0/1 Enabled
Adjacency Formation: Enabled
Prefix Advertisement: Enabled
IPv4 BFD: Disabled
IPv6 BFD: Disabled
BFD Min Interval: 150
BFD Multiplier: 3
Bandwidth: 1000000
Circuit Type: level-1-2
Media Type: LAN
Circuit Number: 5
Level-1
Adjacency Count: 1
LAN ID: R3.05
Priority (Local/DIS): 64/64
Next LAN IIH in: 2 s
LSP Pacing Interval: 33 ms
PSNP Entry Queue Size: 0
Hello Interval: 10 s
Hello Multiplier: 3
Level-2
Adjacency Count: 0
LAN ID: R3.05
Priority (Local/DIS): 64/none (no DIS elected)
Next LAN IIH in: 6 s
LSP Pacing Interval: 33 ms
PSNP Entry Queue Size: 0
Hello Interval: 10 s
Hello Multiplier: 3
CLNS I/O
Protocol State: Up
MTU: 1497
SNPA: fa16.3eb0.d50f
Layer-2 MCast Groups Membership:
All Level-1 ISs: Yes
All Level-2 ISs: Yes
IPv4 Unicast Topology: Enabled
Adjacency Formation: Running
Prefix Advertisement: Running
Metric (L1/L2): 10/10
Weight (L1/L2): 0/0
MPLS Max Label Stack: 1/3/10 (PRI/BKP/SRTE)
MPLS LDP Sync (L1/L2): Disabled/Disabled
FRR (L1/L2): L1 Not Enabled L2 Not Enabled
FRR Type: None None
IPv6 Unicast Topology: Enabled
Adjacency Formation: Running
Prefix Advertisement: Running
Metric (L1/L2): 10/10
Weight (L1/L2): 0/0
MPLS Max Label Stack: 1/3/10 (PRI/BKP/SRTE)
MPLS LDP Sync (L1/L2): Disabled/Disabled
FRR (L1/L2): L1 Not Enabled L2 Not Enabled
FRR Type: None None
IPv4 Address Family: Enabled
Protocol State: Up
Forwarding Address(es): 10.3.6.3
Global Prefix(es): 10.3.6.0/24
IPv6 Address Family: Enabled
Protocol State: Up
Forwarding Address(es): fe80::f816:3eff:feb0:d50f
Global Prefix(es): 2001:db8:10:3::/64
LSP transmit timer expires in 0 ms
LSP transmission is idle
Can send up to 9 back-to-back LSPs in the next 0 ms
GigabitEthernet0/0/0/2 Enabled
Adjacency Formation: Enabled
Prefix Advertisement: Enabled
IPv4 BFD: Disabled
IPv6 BFD: Disabled
BFD Min Interval: 150
BFD Multiplier: 3
Bandwidth: 1000000
Circuit Type: level-1-2
Media Type: LAN
Circuit Number: 3
Level-1
Adjacency Count: 1
LAN ID: R3.03
Priority (Local/DIS): 64/64
Next LAN IIH in: 1 s
LSP Pacing Interval: 33 ms
PSNP Entry Queue Size: 0
Hello Interval: 10 s
Hello Multiplier: 3
Level-2
Adjacency Count: 0
LAN ID: R3.03
Priority (Local/DIS): 64/none (no DIS elected)
Next LAN IIH in: 6 s
LSP Pacing Interval: 33 ms
PSNP Entry Queue Size: 0
Hello Interval: 10 s
Hello Multiplier: 3
CLNS I/O
Protocol State: Up
MTU: 1497
SNPA: fa16.3ead.2906
Layer-2 MCast Groups Membership:
All Level-1 ISs: Yes
All Level-2 ISs: Yes
IPv4 Unicast Topology: Enabled
Adjacency Formation: Running
Prefix Advertisement: Running
Metric (L1/L2): 10/10
Weight (L1/L2): 0/0
MPLS Max Label Stack: 1/3/10 (PRI/BKP/SRTE)
MPLS LDP Sync (L1/L2): Disabled/Disabled
FRR (L1/L2): L1 Not Enabled L2 Not Enabled
FRR Type: None None
IPv6 Unicast Topology: Enabled
Adjacency Formation: Running
Prefix Advertisement: Running
Metric (L1/L2): 10/10
Weight (L1/L2): 0/0
MPLS Max Label Stack: 1/3/10 (PRI/BKP/SRTE)
MPLS LDP Sync (L1/L2): Disabled/Disabled
FRR (L1/L2): L1 Not Enabled L2 Not Enabled
FRR Type: None None
IPv4 Address Family: Enabled
Protocol State: Up
Forwarding Address(es): 10.3.4.3
Global Prefix(es): 10.3.4.0/24
IPv6 Address Family: Enabled
Protocol State: Up
Forwarding Address(es): fe80::f816:3eff:fead:2906
Global Prefix(es): None (No global addresses are configured)
LSP transmit timer expires in 0 ms
LSP transmission is idle
Can send up to 9 back-to-back LSPs in the next 0 ms
GigabitEthernet0/0/0/3 Enabled
Adjacency Formation: Enabled
Prefix Advertisement: Enabled
IPv4 BFD: Disabled
IPv6 BFD: Disabled
BFD Min Interval: 150
BFD Multiplier: 3
Bandwidth: 1000000
Circuit Type: level-1-2
Media Type: LAN
Circuit Number: 1
Level-1
Adjacency Count: 1
LAN ID: R5.01
Priority (Local/DIS): 64/64
Next LAN IIH in: 3 s
LSP Pacing Interval: 33 ms
PSNP Entry Queue Size: 0
Hello Interval: 10 s
Hello Multiplier: 3
Level-2
Adjacency Count: 1
LAN ID: R5.01
Priority (Local/DIS): 64/64
Next LAN IIH in: 2 s
LSP Pacing Interval: 33 ms
PSNP Entry Queue Size: 0
Hello Interval: 10 s
Hello Multiplier: 3
CLNS I/O
Protocol State: Up
MTU: 1497
SNPA: fa16.3e1c.d826
Layer-2 MCast Groups Membership:
All Level-1 ISs: Yes
All Level-2 ISs: Yes
IPv4 Unicast Topology: Enabled
Adjacency Formation: Running
Prefix Advertisement: Running
Metric (L1/L2): 10/10
Weight (L1/L2): 0/0
MPLS Max Label Stack: 1/3/10 (PRI/BKP/SRTE)
MPLS LDP Sync (L1/L2): Disabled/Disabled
FRR (L1/L2): L1 Not Enabled L2 Not Enabled
FRR Type: None None
IPv6 Unicast Topology: Enabled
Adjacency Formation: Running
Prefix Advertisement: Running
Metric (L1/L2): 10/10
Weight (L1/L2): 0/0
MPLS Max Label Stack: 1/3/10 (PRI/BKP/SRTE)
MPLS LDP Sync (L1/L2): Disabled/Disabled
FRR (L1/L2): L1 Not Enabled L2 Not Enabled
FRR Type: None None
IPv4 Address Family: Enabled
Protocol State: Up
Forwarding Address(es): 10.3.5.3
Global Prefix(es): 10.3.5.0/24
IPv6 Address Family: Enabled
Protocol State: Up
Forwarding Address(es): fe80::f816:3eff:fe1c:d826
Global Prefix(es): None (No global addresses are configured)
LSP transmit timer expires in 0 ms
LSP transmission is idle
Can send up to 9 back-to-back LSPs in the next 0 ms
'''}
def test_empty_output(self):
self.device = Mock(**self.empty_output)
obj = ShowIsisInterface(device=self.device)
with self.assertRaises(SchemaEmptyParserError):
obj.parse()
def test_golden_output_1(self):
self.device = Mock(**self.golden_parsed_output_1)
obj = ShowIsisInterface(device=self.device)
parsed_output = obj.parse()
self.assertEqual(parsed_output, self.parsed_output_1)
class TestShowIsisDatabaseDetail(unittest.TestCase):
''' Unit tests for commands/parser:
* show isis database detail / ShowIsisDatabaseDetail
'''
maxDiff = None
empty_output = {'execute.return_value': ''}
golden_parsed_output_1 = {
"instance": {
"test": {
"level": {
1: {
"lspid": {
"R3.00-00": {
"lsp": {
"seq_num": "0x0000000d",
"checksum": "0x0476",
"local_router": True,
"holdtime": 578,
"attach_bit": 1,
"p_bit": 0,
"overload_bit": 0,
},
"area_address": "49.0002",
"nlpid": ["0xcc", "0x8e"],
"ip_address": "10.36.3.3",
"extended_ipv4_reachability": {
"10.36.3.0/24": {
"ip_prefix": "10.36.3.0",
"prefix_length": "24",
"metric": 10,
},
"10.2.3.0/24": {
"ip_prefix": "10.2.3.0",
"prefix_length": "24",
"metric": 10,
},
},
"hostname": "R3",
"ipv6_address": "2001:db8:3:3:3::3",
"mt_ipv6_reachability": {
"2001:db8:3:3:3::3/128": {
"ip_prefix": "2001:db8:3:3:3::3",
"prefix_length": "128",
"metric": 10,
},
"2001:db8:10:2::/64": {
"ip_prefix": "2001:db8:10:2::",
"prefix_length": "64",
"metric": 10,
},
},
"mt_entries": {
"Standard (IPv4 Unicast)": {},
"IPv6 Unicast": {
"attach_bit": 1,
"p_bit": 0,
"overload_bit": 0,
},
},
"extended_is_neighbor": {
"R3.03": {"metric": 10},
"R5.01": {"metric": 10},
},
"mt_is_neighbor": {
"R3.03": {
"metric": 10,
"mt_id": "MT (IPv6 Unicast)"},
"R5.01": {
"metric": 10,
"mt_id": "MT (IPv6 Unicast)"},
},
},
"R3.03-00": {
"lsp": {
"seq_num": "0x00000007",
"checksum": "0x8145",
"local_router": False,
"holdtime": 988,
"attach_bit": 0,
"p_bit": 0,
"overload_bit": 0,
},
"extended_is_neighbor": {
"R3.00": {
"metric": 0},
"R4.00": {
"metric": 0},
},
},
"R3.05-00": {
"lsp": {
"seq_num": "0x00000004",
"checksum": "0x7981",
"local_router": False,
"holdtime": 600,
"attach_bit": 0,
"p_bit": 0,
"overload_bit": 0,
},
"extended_is_neighbor": {
"R3.00": {
"metric": 0},
"R6.00": {
"metric": 0},
},
},
"R4.00-00": {
"lsp": {
"seq_num": "0x0000000c",
"checksum": "0x5c39",
"local_router": False,
"holdtime": 1115,
"received": 1200,
"attach_bit": 0,
"p_bit": 0,
"overload_bit": 0,
},
"area_address": "49.0002",
"extended_is_neighbor": {
"R3.03": {
"metric": 10},
"R4.01": {
"metric": 10},
},
"nlpid": ["0xcc", "0x8e"],
"ip_address": "10.64.4.4",
"extended_ipv4_reachability": {
"10.64.4.4/32": {
"ip_prefix": "10.64.4.4",
"prefix_length": "32",
"metric": 10,
},
"10.3.4.0/24": {
"ip_prefix": "10.3.4.0",
"prefix_length": "24",
"metric": 10,
},
},
"hostname": "R4",
"mt_is_neighbor": {
"R3.03": {
"metric": 10,
"mt_id": "MT (IPv6 Unicast)"},
"R4.01": {
"metric": 10,
"mt_id": "MT (IPv6 Unicast)"},
},
"ipv6_address": "2001:db8:4:4:4::4",
"mt_ipv6_reachability": {
"2001:db8:4:4:4::4/128": {
"ip_prefix": "2001:db8:4:4:4::4",
"prefix_length": "128",
"metric": 10,
},
"2001:db8:10:3::/64": {
"ip_prefix": "2001:db8:10:3::",
"prefix_length": "64",
"metric": 10,
},
},
"mt_entries": {
"Standard (IPv4 Unicast)": {},
"IPv6 Unicast": {
"attach_bit": 0,
"p_bit": 0,
"overload_bit": 0,
},
},
},
"R4.01-00": {
"lsp": {
"seq_num": "0x00000004",
"checksum": "0xf9a0",
"local_router": False,
"holdtime": 616,
"received": 1200,
"attach_bit": 0,
"p_bit": 0,
"overload_bit": 0,
},
"extended_is_neighbor": {
"R4.00": {
"metric": 0},
"R5.00": {
"metric": 0},
},
},
"R5.00-00": {
"lsp": {
"seq_num": "0x00000009",
"checksum": "0x09f9",
"local_router": False,
"holdtime": 980,
"received": 1199,
"attach_bit": 1,
"p_bit": 0,
"overload_bit": 0,
},
"area_address": "49.0002",
"nlpid": ["0xcc", "0x8e"],
"mt_entries": {
"Standard (IPv4 Unicast)": {},
"IPv6 Unicast": {
"attach_bit": 1,
"p_bit": 0,
"overload_bit": 0,
},
},
"hostname": "R5",
"extended_is_neighbor": {
"R5.01": {
"metric": 10},
"R4.01": {
"metric": 10},
},
"mt_is_neighbor": {
"R5.01": {
"metric": 10,
"mt_id": "MT (IPv6 Unicast)"},
"R4.01": {
"metric": 10,
"mt_id": "MT (IPv6 Unicast)"},
},
"ip_address": "10.100.5.5",
"extended_ipv4_reachability": {
"10.100.5.5/32": {
"ip_prefix": "10.100.5.5",
"prefix_length": "32",
"metric": 10,
},
"10.3.5.0/24": {
"ip_prefix": "10.3.5.0",
"prefix_length": "24",
"metric": 10,
},
},
"ipv6_address": "2001:db8:5:5:5::5",
"mt_ipv6_reachability": {
"2001:db8:5:5:5::5/128": {
"ip_prefix": "2001:db8:5:5:5::5",
"prefix_length": "128",
"metric": 10,
},
"2001:db8:10:3::/64": {
"ip_prefix": "2001:db8:10:3::",
"prefix_length": "64",
"metric": 10,
},
},
},
"R5.01-00": {
"lsp": {
"seq_num": "0x00000004",
"checksum": "0x4ac5",
"local_router": False,
"holdtime": 521,
"received": 1199,
"attach_bit": 0,
"p_bit": 0,
"overload_bit": 0,
},
"extended_is_neighbor": {
"R5.00": {
"metric": 0},
"R3.00": {
"metric": 0},
},
},
"R5.03-00": {
"lsp": {
"seq_num": "0x00000004",
"checksum": "0x3c38",
"local_router": False,
"holdtime": 1023,
"received": 1199,
"attach_bit": 0,
"p_bit": 0,
"overload_bit": 0,
},
"extended_is_neighbor": {
"R5.00": {
"metric": 0},
"R7.00": {
"metric": 0},
},
},
"R6.00-00": {
"lsp": {
"seq_num": "0x00000008",
"checksum": "0x1869",
"local_router": False,
"holdtime": 923,
"received": 1199,
"attach_bit": 0,
"p_bit": 0,
"overload_bit": 0,
},
"area_address": "49.0002",
"nlpid": ["0xcc", "0x8e"],
"router_id": "10.144.6.6",
"ip_address": "10.144.6.6",
"mt_entries": {
"IPv6 Unicast": {
"attach_bit": 0,
"p_bit": 0,
"overload_bit": 0,
},
"Standard (IPv4 Unicast)": {},
},
"hostname": "R6",
"mt_is_neighbor": {
"R7.02": {
"metric": 40,
"mt_id": "MT (IPv6 Unicast)"},
"R3.05": {
"metric": 40,
"mt_id": "MT (IPv6 Unicast)"},
},
"extended_is_neighbor": {
"R7.02": {
"metric": 40},
"R3.05": {
"metric": 40},
},
"extended_ipv4_reachability": {
"10.144.6.0/24": {
"ip_prefix": "10.144.6.0",
"prefix_length": "24",
"metric": 1,
},
"10.6.7.0/24": {
"ip_prefix": "10.6.7.0",
"prefix_length": "24",
"metric": 40,
},
"10.3.6.0/24": {
"ip_prefix": "10.3.6.0",
"prefix_length": "24",
"metric": 40,
},
},
"mt_ipv6_reachability": {
"2001:db8:6:6:6::6/128": {
"ip_prefix": "2001:db8:6:6:6::6",
"prefix_length": "128",
"metric": 1,
},
"2001:db8:10:6::/64": {
"ip_prefix": "2001:db8:10:6::",
"prefix_length": "64",
"metric": 40,
},
},
},
"R7.00-00": {
"lsp": {
"seq_num": "0x00000008",
"checksum": "0xaba8",
"local_router": False,
"holdtime": 965,
"received": 1198,
"attach_bit": 1,
"p_bit": 0,
"overload_bit": 0,
},
"area_address": "49.0002",
"nlpid": ["0xcc", "0x8e"],
"router_id": "10.196.7.7",
"ip_address": "10.196.7.7",
"mt_entries": {
"IPv6 Unicast": {
"attach_bit": 0,
"p_bit": 0,
"overload_bit": 0,
},
"Standard (IPv4 Unicast)": {},
},
"hostname": "R7",
"mt_is_neighbor": {
"R7.02": {
"metric": 40,
"mt_id": "MT (IPv6 Unicast)"},
"R5.03": {
"metric": 40,
"mt_id": "MT (IPv6 Unicast)"},
},
"extended_is_neighbor": {
"R7.02": {
"metric": 40},
"R5.03": {
"metric": 40},
},
"ip_interarea": {
"10.7.8.0/24": {
"address_family": {
"ipv4 unicast": {
"metric": 40},
"IPv6 Unicast": {
"metric": 40},
}
}
},
"extended_ipv4_reachability": {
"10.196.7.7/32": {
"ip_prefix": "10.196.7.7",
"prefix_length": "32",
"metric": 1,
},
"10.7.9.0/24": {
"ip_prefix": "10.7.9.0",
"prefix_length": "24",
"metric": 40,
},
},
"mt_ipv6_reachability": {
"2fc00:db20:35b:7399::5/128": {
"ip_prefix": "2001:db8:7:7:7::7",
"prefix_length": "128",
"metric": 1,
}
},
},
"R7.02-00": {
"lsp": {
"seq_num": "0x00000005",
"checksum": "0x8c3d",
"local_router": False,
"holdtime": 884,
"received": 1198,
"attach_bit": 0,
"p_bit": 0,
"overload_bit": 0,
},
"extended_is_neighbor": {
"R6.00": {"metric": 0},
"R7.00": {"metric": 0},
},
},
},
"total_lsp_count": 11,
"local_lsp_count": 1,
},
2: {
"lspid": {
"R2.00-00": {
"lsp": {
"seq_num": "0x00000009",
"checksum": "0x5188",
"local_router": False,
"holdtime": 1082,
"received": 1199,
"attach_bit": 0,
"p_bit": 0,
"overload_bit": 0,
},
"area_address": "49.0001",
"nlpid": ["0xcc", "0x8e"],
"mt_entries": {
"Standard (IPv4 Unicast)": {},
"IPv6 Unicast": {
"attach_bit": 0,
"p_bit": 0,
"overload_bit": 0,
},
},
"hostname": "R2",
"extended_is_neighbor": {
"R3.07": {
"metric": 10}},
"mt_is_neighbor": {
"R3.07": {
"metric": 10,
"mt_id": "MT (IPv6 Unicast)"}
},
"ip_address": "10.16.2.2",
"extended_ipv4_reachability": {
"10.16.2.2/32": {
"ip_prefix": "10.16.2.2",
"prefix_length": "32",
"metric": 10,
},
"10.1.2.0/24": {
"ip_prefix": "10.1.2.0",
"prefix_length": "24",
"metric": 10,
},
},
"ipv6_address": "2001:db8:2:2:2::2",
"mt_ipv6_reachability": {
"2001:db8:2:2:2::2/128": {
"ip_prefix": "2001:db8:2:2:2::2",
"prefix_length": "128",
"metric": 10,
},
"2001:db8:10:1::/64": {
"ip_prefix": "2001:db8:10:1::",
"prefix_length": "64",
"metric": 10,
},
},
},
"R3.00-00": {
"lsp": {
"seq_num": "0x00000011",
"checksum": "0x4c4c",
"local_router": True,
"holdtime": 979,
"attach_bit": 0,
"p_bit": 0,
"overload_bit": 0,
},
"area_address": "49.0002",
"extended_is_neighbor": {
"R3.07": {
"metric": 10},
"R5.01": {
"metric": 10},
},
"nlpid": ["0xcc", "0x8e"],
"ip_address": "10.36.3.3",
"extended_ipv4_reachability": {
"10.36.3.0/24": {
"ip_prefix": "10.36.3.0",
"prefix_length": "24",
"metric": 10,
},
"10.2.3.0/24": {
"ip_prefix": "10.2.3.0",
"prefix_length": "24",
"metric": 10,
},
},
"hostname": "R3",
"mt_is_neighbor": {
"R3.07": {
"metric": 10,
"mt_id": "MT (IPv6 Unicast)"},
"R5.01": {
"metric": 10,
"mt_id": "MT (IPv6 Unicast)"},
},
"ipv6_address": "2001:db8:3:3:3::3",
"mt_ipv6_reachability": {
"2001:db8:3:3:3::3/128": {
"ip_prefix": "2001:db8:3:3:3::3",
"prefix_length": "128",
"metric": 10,
},
"2001:db8:10:2::/64": {
"ip_prefix": "2001:db8:10:2::",
"prefix_length": "64",
"metric": 10,
},
},
"mt_entries": {
"Standard (IPv4 Unicast)": {},
"IPv6 Unicast": {
"attach_bit": 0,
"p_bit": 0,
"overload_bit": 0,
},
},
},
"R3.07-00": {
"lsp": {
"seq_num": "0x00000007",
"checksum": "0x652a",
"local_router": False,
"holdtime": 604,
"attach_bit": 0,
"p_bit": 0,
"overload_bit": 0,
},
"extended_is_neighbor": {
"R3.00": {
"metric": 0},
"R2.00": {
"metric": 0},
},
},
"R5.00-00": {
"lsp": {
"seq_num": "0x0000000b",
"checksum": "0x93bc",
"local_router": False,
"holdtime": 903,
"received": 1199,
"attach_bit": 0,
"p_bit": 0,
"overload_bit": 0,
},
"area_address": "49.0002",
"nlpid": ["0xcc", "0x8e"],
"mt_entries": {
"Standard (IPv4 Unicast)": {},
"IPv6 Unicast": {
"attach_bit": 0,
"p_bit": 0,
"overload_bit": 0,
},
},
"hostname": "R5",
"extended_is_neighbor": {
"R5.01": {
"metric": 10},
"R5.03": {
"metric": 10},
},
"mt_is_neighbor": {
"R5.01": {
"metric": 10,
"mt_id": "MT (IPv6 Unicast)"},
"R5.03": {
"metric": 10,
"mt_id": "MT (IPv6 Unicast)"},
},
"ip_address": "10.100.5.5",
"extended_ipv4_reachability": {
"10.100.5.5/32": {
"ip_prefix": "10.100.5.5",
"prefix_length": "32",
"metric": 10,
},
"10.3.5.0/24": {
"ip_prefix": "10.3.5.0",
"prefix_length": "24",
"metric": 10,
},
},
"ipv6_address": "2001:db8:5:5:5::5",
"mt_ipv6_reachability": {
"2001:db8:5:5:5::5/128": {
"ip_prefix": "2001:db8:5:5:5::5",
"prefix_length": "128",
"metric": 10,
},
"2001:db8:10:3::/64": {
"ip_prefix": "2001:db8:10:3::",
"prefix_length": "64",
"metric": 10,
},
},
},
"R5.01-00": {
"lsp": {
"seq_num": "0x00000004",
"checksum": "0x6236",
"local_router": False,
"holdtime": 426,
"received": 1199,
"attach_bit": 0,
"p_bit": 0,
"overload_bit": 0,
},
"extended_is_neighbor": {
"R5.00": {
"metric": 0},
"R3.00": {
"metric": 0},
},
},
"R5.03-00": {
"lsp": {
"seq_num": "0x00000004",
"checksum": "0x54a8",
"local_router": False,
"holdtime": 965,
"received": 1199,
"attach_bit": 0,
"p_bit": 0,
"overload_bit": 0,
},
"extended_is_neighbor": {
"R5.00": {
"metric": 0},
"R7.00": {
"metric": 0},
},
},
"R7.00-00": {
"lsp": {
"seq_num": "0x00000009",
"checksum": "0x7d78",
"local_router": False,
"holdtime": 766,
"received": 1198,
"attach_bit": 0,
"p_bit": 0,
"overload_bit": 0,
},
"area_address": "49.0002",
"nlpid": ["0xcc", "0x8e"],
"router_id": "10.196.7.7",
"ip_address": "10.196.7.7",
"mt_entries": {
"IPv6 Unicast": {
"attach_bit": 0,
"p_bit": 0,
"overload_bit": 0,
},
"Standard (IPv4 Unicast)": {},
},
"hostname": "R7",
"mt_is_neighbor": {
"R9.01": {
"metric": 40,
"mt_id": "MT (IPv6 Unicast)"},
"R8.01": {
"metric": 40,
"mt_id": "MT (IPv6 Unicast)"},
},
"extended_is_neighbor": {
"R9.01": {
"metric": 40},
"R8.01": {
"metric": 40},
},
"extended_ipv4_reachability": {
"10.6.7.0/24": {
"ip_prefix": "10.6.7.0",
"prefix_length": "24",
"metric": 40,
},
"10.196.7.7/32": {
"ip_prefix": "10.196.7.7",
"prefix_length": "32",
"metric": 1,
},
},
"mt_ipv6_reachability": {
"2001:db8:10:6::/64": {
"ip_prefix": "2001:db8:10:6::",
"prefix_length": "64",
"metric": 40,
},
"2001:db8:7:7:7::7/128": {
"ip_prefix": "2001:db8:7:7:7::7",
"prefix_length": "128",
"metric": 1,
},
},
},
"R8.00-00": {
"lsp": {
"seq_num": "0x00000005",
"checksum": "0x1309",
"local_router": False,
"holdtime": 453,
"received": 1198,
"attach_bit": 0,
"p_bit": 0,
"overload_bit": 0,
},
"area_address": "49.0003",
"nlpid": ["0xcc", "0x8e"],
"mt_entries": {
"Standard (IPv4 Unicast)": {},
"IPv6 Unicast": {
"attach_bit": 0,
"p_bit": 0,
"overload_bit": 0,
},
},
"hostname": "R8",
"extended_is_neighbor": {
"R8.01": {
"metric": 10}},
"mt_is_neighbor": {
"R8.01": {
"metric": 10,
"mt_id": "MT (IPv6 Unicast)"}
},
"ip_address": "10.1.8.8",
"extended_ipv4_reachability": {
"10.1.8.8/32": {
"ip_prefix": "10.1.8.8",
"prefix_length": "32",
"metric": 10,
},
"10.7.8.0/24": {
"ip_prefix": "10.7.8.0",
"prefix_length": "24",
"metric": 10,
},
},
"ipv6_address": "2001:db8:8:8:8::8",
"mt_ipv6_reachability": {
"2001:db8:8:8:8::8/128": {
"ip_prefix": "2001:db8:8:8:8::8",
"prefix_length": "128",
"metric": 10,
},
"2001:db8:10:7::/64": {
"ip_prefix": "2001:db8:10:7::",
"prefix_length": "64",
"metric": 10,
},
},
},
"R8.01-00": {
"lsp": {
"seq_num": "0x00000004",
"checksum": "0x9503",
"local_router": False,
"holdtime": 1143,
"received": 1198,
"attach_bit": 0,
"p_bit": 0,
"overload_bit": 0,
},
"extended_is_neighbor": {
"R8.00": {
"metric": 0},
"R7.00": {
"metric": 0},
},
},
"R9.00-00": {
"lsp": {
"seq_num": "0x00000006",
"checksum": "0xfd4e",
"local_router": False,
"holdtime": 800,
"received": 1198,
"attach_bit": 0,
"p_bit": 0,
"overload_bit": 0,
},
"area_address": "49.0004",
"nlpid": ["0xcc", "0x8e"],
"mt_entries": {
"Standard (IPv4 Unicast)": {},
"IPv6 Unicast": {
"attach_bit": 0,
"p_bit": 0,
"overload_bit": 0,
},
},
"hostname": "R9",
"extended_is_neighbor": {
"R9.01": {
"metric": 10}},
"mt_is_neighbor": {
"R9.01": {
"metric": 10,
"mt_id": "MT (IPv6 Unicast)"}
},
"ip_address": "10.69.9.9",
"extended_ipv4_reachability": {
"10.69.9.9/32": {
"ip_prefix": "10.69.9.9",
"prefix_length": "32",
"metric": 10,
},
"10.7.9.0/24": {
"ip_prefix": "10.7.9.0",
"prefix_length": "24",
"metric": 10,
},
"10.9.10.0/24": {
"ip_prefix": "10.9.10.0",
"prefix_length": "24",
"metric": 10,
},
"10.10.10.10/32": {
"ip_prefix": "10.10.10.10",
"prefix_length": "32",
"metric": 20,
},
},
"ipv6_address": "2001:db8:9:9:9::9",
"mt_ipv6_reachability": {
"2001:db8:9:9:9::9/128": {
"ip_prefix": "2001:db8:9:9:9::9",
"prefix_length": "128",
"metric": 10,
},
"2001:db8:10:7::/64": {
"ip_prefix": "2001:db8:10:7::",
"prefix_length": "64",
"metric": 10,
},
},
"ipv6_reachability": {
"2001:2:2:2::2/128": {
"ip_prefix": "2001:2:2:2::2",
"prefix_length": "128",
"metric": "10",
}
},
},
"R9.01-00": {
"lsp": {
"seq_num": "0x00000003",
"checksum": "0xfdce",
"local_router": False,
"holdtime": 706,
"received": 1198,
"attach_bit": 0,
"p_bit": 0,
"overload_bit": 0,
},
"extended_is_neighbor": {
"R9.00": {
"metric": 0},
"R7.00": {
"metric": 0},
},
},
},
"total_lsp_count": 11,
"local_lsp_count": 1,
},
}
}
}
}
golden_output_1 = {'execute.return_value': '''
RP/0/RP0/CPU0:R3#show isis database detail
Wed Jan 30 22:07:52.759 UTC
IS-IS test (Level-1) Link State Database
LSPID LSP Seq Num LSP Checksum LSP Holdtime/Rcvd ATT/P/OL
R3.00-00 * 0x0000000d 0x0476 578 /* 1/0/0
Area Address: 49.0002
NLPID: 0xcc
NLPID: 0x8e
IP Address: 10.36.3.3
Metric: 10 IP-Extended 10.36.3.0/24
Metric: 10 IP-Extended 10.2.3.0/24
Hostname: R3
IPv6 Address: 2001:db8:3:3:3::3
Metric: 10 MT (IPv6 Unicast) IPv6 2001:db8:3:3:3::3/128
Metric: 10 MT (IPv6 Unicast) IPv6 2001:db8:10:2::/64
MT: Standard (IPv4 Unicast)
MT: IPv6 Unicast 1/0/0
Metric: 10 IS-Extended R3.03
Metric: 10 IS-Extended R5.01
Metric: 10 MT (IPv6 Unicast) IS-Extended R3.03
Metric: 10 MT (IPv6 Unicast) IS-Extended R5.01
R3.03-00 0x00000007 0x8145 988 /* 0/0/0
Metric: 0 IS-Extended R3.00
Metric: 0 IS-Extended R4.00
R3.05-00 0x00000004 0x7981 600 /* 0/0/0
Metric: 0 IS-Extended R3.00
Metric: 0 IS-Extended R6.00
R4.00-00 0x0000000c 0x5c39 1115 /1200 0/0/0
Area Address: 49.0002
Metric: 10 IS-Extended R3.03
Metric: 10 IS-Extended R4.01
NLPID: 0xcc
NLPID: 0x8e
IP Address: 10.64.4.4
Metric: 10 IP-Extended 10.64.4.4/32
Metric: 10 IP-Extended 10.3.4.0/24
Hostname: R4
Metric: 10 MT (IPv6 Unicast) IS-Extended R3.03
Metric: 10 MT (IPv6 Unicast) IS-Extended R4.01
IPv6 Address: 2001:db8:4:4:4::4
Metric: 10 MT (IPv6 Unicast) IPv6 2001:db8:4:4:4::4/128
Metric: 10 MT (IPv6 Unicast) IPv6 2001:db8:10:3::/64
MT: Standard (IPv4 Unicast)
MT: IPv6 Unicast 0/0/0
R4.01-00 0x00000004 0xf9a0 616 /1200 0/0/0
Metric: 0 IS-Extended R4.00
Metric: 0 IS-Extended R5.00
R5.00-00 0x00000009 0x09f9 980 /1199 1/0/0
Area Address: 49.0002
NLPID: 0xcc
NLPID: 0x8e
MT: Standard (IPv4 Unicast)
MT: IPv6 Unicast 1/0/0
Hostname: R5
Metric: 10 IS-Extended R5.01
Metric: 10 IS-Extended R4.01
Metric: 10 MT (IPv6 Unicast) IS-Extended R5.01
Metric: 10 MT (IPv6 Unicast) IS-Extended R4.01
IP Address: 10.100.5.5
Metric: 10 IP-Extended 10.100.5.5/32
Metric: 10 IP-Extended 10.3.5.0/24
IPv6 Address: 2001:db8:5:5:5::5
Metric: 10 MT (IPv6 Unicast) IPv6 2001:db8:5:5:5::5/128
Metric: 10 MT (IPv6 Unicast) IPv6 2001:db8:10:3::/64
R5.01-00 0x00000004 0x4ac5 521 /1199 0/0/0
Metric: 0 IS-Extended R5.00
Metric: 0 IS-Extended R3.00
R5.03-00 0x00000004 0x3c38 1023 /1199 0/0/0
Metric: 0 IS-Extended R5.00
Metric: 0 IS-Extended R7.00
R6.00-00 0x00000008 0x1869 923 /1199 0/0/0
Area Address: 49.0002
NLPID: 0xcc
NLPID: 0x8e
Router ID: 10.144.6.6
IP Address: 10.144.6.6
MT: IPv6 Unicast 0/0/0
MT: Standard (IPv4 Unicast)
Hostname: R6
Metric: 40 MT (IPv6 Unicast) IS-Extended R7.02
Metric: 40 MT (IPv6 Unicast) IS-Extended R3.05
Metric: 40 IS-Extended R7.02
Metric: 40 IS-Extended R3.05
Metric: 1 IP-Extended 10.144.6.0/24
Metric: 40 IP-Extended 10.6.7.0/24
Metric: 40 IP-Extended 10.3.6.0/24
Metric: 1 MT (IPv6 Unicast) IPv6 2001:db8:6:6:6::6/128
Metric: 40 MT (IPv6 Unicast) IPv6 2001:db8:10:6::/64
R7.00-00 0x00000008 0xaba8 965 /1198 1/0/0
Area Address: 49.0002
NLPID: 0xcc
NLPID: 0x8e
Router ID: 10.196.7.7
IP Address: 10.196.7.7
MT: IPv6 Unicast 0/0/0
MT: Standard (IPv4 Unicast)
Hostname: R7
Metric: 40 MT (IPv6 Unicast) IS-Extended R7.02
Metric: 40 MT (IPv6 Unicast) IS-Extended R5.03
Metric: 40 IS-Extended R7.02
Metric: 40 IS-Extended R5.03
Metric: 40 IP-Extended-Interarea 10.7.8.0/24
Metric: 1 IP-Extended 10.196.7.7/32
Metric: 40 IP-Extended 10.7.9.0/24
Metric: 40 MT (IPv6 Unicast) IPv6-Interarea 2001:db8:10:7::/64
Metric: 1 MT (IPv6 Unicast) IPv6 2001:db8:7:7:7::7/128
R7.02-00 0x00000005 0x8c3d 884 /1198 0/0/0
Metric: 0 IS-Extended R6.00
Metric: 0 IS-Extended R7.00
Total Level-1 LSP count: 11 Local Level-1 LSP count: 1
IS-IS test (Level-2) Link State Database
LSPID LSP Seq Num LSP Checksum LSP Holdtime/Rcvd ATT/P/OL
R2.00-00 0x00000009 0x5188 1082 /1199 0/0/0
Area Address: 49.0001
NLPID: 0xcc
NLPID: 0x8e
MT: Standard (IPv4 Unicast)
MT: IPv6 Unicast 0/0/0
Hostname: R2
Metric: 10 IS-Extended R3.07
Metric: 10 MT (IPv6 Unicast) IS-Extended R3.07
IP Address: 10.16.2.2
Metric: 10 IP-Extended 10.16.2.2/32
Metric: 10 IP-Extended 10.1.2.0/24
IPv6 Address: 2001:db8:2:2:2::2
Metric: 10 MT (IPv6 Unicast) IPv6 2001:db8:2:2:2::2/128
Metric: 10 MT (IPv6 Unicast) IPv6 2001:db8:10:1::/64
R3.00-00 * 0x00000011 0x4c4c 979 /* 0/0/0
Area Address: 49.0002
Metric: 10 IS-Extended R3.07
Metric: 10 IS-Extended R5.01
NLPID: 0xcc
NLPID: 0x8e
IP Address: 10.36.3.3
Metric: 10 IP-Extended 10.36.3.0/24
Metric: 10 IP-Extended 10.2.3.0/24
Hostname: R3
Metric: 10 MT (IPv6 Unicast) IS-Extended R3.07
Metric: 10 MT (IPv6 Unicast) IS-Extended R5.01
IPv6 Address: 2001:db8:3:3:3::3
Metric: 10 MT (IPv6 Unicast) IPv6 2001:db8:3:3:3::3/128
Metric: 10 MT (IPv6 Unicast) IPv6 2001:db8:10:2::/64
MT: Standard (IPv4 Unicast)
MT: IPv6 Unicast 0/0/0
R3.07-00 0x00000007 0x652a 604 /* 0/0/0
Metric: 0 IS-Extended R3.00
Metric: 0 IS-Extended R2.00
R5.00-00 0x0000000b 0x93bc 903 /1199 0/0/0
Area Address: 49.0002
NLPID: 0xcc
NLPID: 0x8e
MT: Standard (IPv4 Unicast)
MT: IPv6 Unicast 0/0/0
Hostname: R5
Metric: 10 IS-Extended R5.01
Metric: 10 IS-Extended R5.03
Metric: 10 MT (IPv6 Unicast) IS-Extended R5.01
Metric: 10 MT (IPv6 Unicast) IS-Extended R5.03
IP Address: 10.100.5.5
Metric: 10 IP-Extended 10.100.5.5/32
Metric: 10 IP-Extended 10.3.5.0/24
IPv6 Address: 2001:db8:5:5:5::5
Metric: 10 MT (IPv6 Unicast) IPv6 2001:db8:5:5:5::5/128
Metric: 10 MT (IPv6 Unicast) IPv6 2001:db8:10:3::/64
R5.01-00 0x00000004 0x6236 426 /1199 0/0/0
Metric: 0 IS-Extended R5.00
Metric: 0 IS-Extended R3.00
R5.03-00 0x00000004 0x54a8 965 /1199 0/0/0
Metric: 0 IS-Extended R5.00
Metric: 0 IS-Extended R7.00
R7.00-00 0x00000009 0x7d78 766 /1198 0/0/0
Area Address: 49.0002
NLPID: 0xcc
NLPID: 0x8e
Router ID: 10.196.7.7
IP Address: 10.196.7.7
MT: IPv6 Unicast 0/0/0
MT: Standard (IPv4 Unicast)
Hostname: R7
Metric: 40 MT (IPv6 Unicast) IS-Extended R9.01
Metric: 40 MT (IPv6 Unicast) IS-Extended R8.01
Metric: 40 IS-Extended R9.01
Metric: 40 IS-Extended R8.01
Metric: 40 IP-Extended 10.6.7.0/24
Metric: 1 IP-Extended 10.196.7.7/32
Metric: 40 MT (IPv6 Unicast) IPv6 2001:db8:10:6::/64
Metric: 1 MT (IPv6 Unicast) IPv6 2001:db8:7:7:7::7/128
R8.00-00 0x00000005 0x1309 453 /1198 0/0/0
Area Address: 49.0003
NLPID: 0xcc
NLPID: 0x8e
MT: Standard (IPv4 Unicast)
MT: IPv6 Unicast 0/0/0
Hostname: R8
Metric: 10 IS-Extended R8.01
Metric: 10 MT (IPv6 Unicast) IS-Extended R8.01
IP Address: 10.1.8.8
Metric: 10 IP-Extended 10.1.8.8/32
Metric: 10 IP-Extended 10.7.8.0/24
IPv6 Address: 2001:db8:8:8:8::8
Metric: 10 MT (IPv6 Unicast) IPv6 2001:db8:8:8:8::8/128
Metric: 10 MT (IPv6 Unicast) IPv6 2001:db8:10:7::/64
R8.01-00 0x00000004 0x9503 1143 /1198 0/0/0
Metric: 0 IS-Extended R8.00
Metric: 0 IS-Extended R7.00
R9.00-00 0x00000006 0xfd4e 800 /1198 0/0/0
Area Address: 49.0004
NLPID: 0xcc
NLPID: 0x8e
MT: Standard (IPv4 Unicast)
MT: IPv6 Unicast 0/0/0
Hostname: R9
Metric: 10 IS-Extended R9.01
Metric: 10 MT (IPv6 Unicast) IS-Extended R9.01
IP Address: 10.69.9.9
Metric: 10 IP-Extended 10.69.9.9/32
Metric: 10 IP-Extended 10.7.9.0/24
Metric: 10 IP-Extended 10.9.10.0/24
Metric: 20 IP-Extended 10.10.10.10/32
IPv6 Address: 2001:db8:9:9:9::9
Metric: 10 MT (IPv6 Unicast) IPv6 2001:db8:9:9:9::9/128
Metric: 10 MT (IPv6 Unicast) IPv6 2001:db8:10:7::/64
Metric: 10 IPv6 2001:2:2:2::2/128
R9.01-00 0x00000003 0xfdce 706 /1198 0/0/0
Metric: 0 IS-Extended R9.00
Metric: 0 IS-Extended R7.00
Total Level-2 LSP count: 11 Local Level-2 LSP count: 1
'''}
golden_parsed_output_2 = {
"instance": {
"isp": {
"level": {
1: {
"lspid": {
"router-5.00-00": {
"lsp": {
"seq_num": "0x00000003",
"checksum": "0x8074460",
"local_router": False,
"holdtime": 457,
"attach_bit": 0,
"p_bit": 0,
"overload_bit": 0,
},
"area_address": "49",
"nlpid": ["0xcc"],
"hostname": "router-5",
"ip_address": "172.16.186.5",
"ip_neighbor": {
"172.16.115.0/24": {
"ip_prefix": "172.16.115.0",
"prefix_length": "24",
"metric": 0,
},
"172.16.166.0/24": {
"ip_prefix": "172.16.166.0",
"prefix_length": "24",
"metric": 10,
},
"172.16.166.0/24": {
"ip_prefix": "172.16.166.0",
"prefix_length": "24",
"metric": 10,
},
},
"is_neighbor": {
"router-11.00": {
"metric": 10},
"router-11.01": {
"metric": 10},
},
},
"router-11.00-00": {
"lsp": {
"seq_num": "0x0000000b",
"checksum": "0x8074460",
"local_router": True,
"holdtime": 1161,
"attach_bit": 0,
"p_bit": 0,
"overload_bit": 0,
},
"area_address": "49",
"nlpid": ["0xcc"],
"hostname": "router-11",
"ip_address": "172.16.196.11",
"ip_neighbor": {
"172.16.76.0/24": {
"ip_prefix": "172.16.76.0",
"prefix_length": "24",
"metric": 0,
},
"172.16.166.0/24": {
"ip_prefix": "172.16.166.0",
"prefix_length": "24",
"metric": 10,
},
"172.16.166.0/24": {
"ip_prefix": "172.16.166.0",
"prefix_length": "24",
"metric": 10,
},
},
"is_neighbor": {
"router-11.01": {
"metric": 10},
"router-5.00": {
"metric": 10},
},
},
"router-11.01-00": {
"lsp": {
"seq_num": "0x00000001",
"checksum": "0x80770ec",
"local_router": True,
"holdtime": 457,
"attach_bit": 0,
"p_bit": 0,
"overload_bit": 0,
},
"is_neighbor": {
"router-11.00": {
"metric": 0},
"router-5.00": {
"metric": 0},
},
},
},
"total_lsp_count": 3,
"local_lsp_count": 2,
},
2: {
"lspid": {
"router-5.00-00": {
"lsp": {
"seq_num": "0x00000005",
"checksum": "0x807997c",
"local_router": False,
"holdtime": 457,
"attach_bit": 0,
"p_bit": 0,
"overload_bit": 0,
},
"area_address": "49",
"nlpid": ["0xcc"],
"hostname": "router-5",
"ip_address": "172.16.166.5",
"ip_neighbor": {
"172.16.115.0/24": {
"ip_prefix": "172.16.115.0",
"prefix_length": "24",
"metric": 0,
},
"172.16.166.0/24": {
"ip_prefix": "172.16.166.0",
"prefix_length": "24",
"metric": 10,
},
"172.16.94.0/24": {
"ip_prefix": "172.16.94.0",
"prefix_length": "24",
"metric": 10,
},
"172.16.21.0/24": {
"ip_prefix": "172.16.21.0",
"prefix_length": "24",
"metric": 10,
},
},
"is_neighbor": {
"router-11.00": {
"metric": 10},
"router-11.01": {
"metric": 10},
},
},
"router-11.00-00": {
"lsp": {
"seq_num": "0x0000000d",
"checksum": "0x807997c",
"local_router": True,
"holdtime": 1184,
"attach_bit": 0,
"p_bit": 0,
"overload_bit": 0,
},
"area_address": "49",
"nlpid": ["0xcc"],
"hostname": "router-11",
"ip_address": "172.28.111.111",
"ip_neighbor": {
"172.16.21.0/24": {
"ip_prefix": "172.16.21.0",
"prefix_length": "24",
"metric": 0,
},
"172.16.166.0/24": {
"ip_prefix": "172.16.166.0",
"prefix_length": "24",
"metric": 10,
},
"172.16.166.0/24": {
"ip_prefix": "172.16.166.0",
"prefix_length": "24",
"metric": 10,
},
"172.16.115.0/24": {
"ip_prefix": "172.16.115.0",
"prefix_length": "24",
"metric": 10,
},
},
"is_neighbor": {
"router-11.01": {
"metric": 10},
"router-5.00": {
"metric": 10},
},
},
"router-gsr11.01-00": {
"lsp": {
"seq_num": "0x00000001",
"checksum": "0x80770ec",
"local_router": True,
"holdtime": 457,
"attach_bit": 0,
"p_bit": 0,
"overload_bit": 0,
},
"is_neighbor": {
"router-11.00": {
"metric": 0},
"router-5.00": {
"metric": 0},
},
},
},
"total_lsp_count": 3,
"local_lsp_count": 2,
},
}
}
}
}
# asr9k
golden_output_2 = {'execute.return_value': '''
router# show isis database detail
IS-IS isp (Level-1) Link State Database
LSPID LSP Seq Num LSP Checksum LSP Holdtime ATT/P/OL
router-5.00-00 0x00000003 0x8074460 457 0/0/0
Area Address: 49
NLPID: 0xcc
Hostname: router-5
IP Address: 172.16.186.5
Metric: 0 IP 172.16.115.0/24
Metric: 10 IP 172.16.166.0/24
Metric: 10 IP 172.16.166.0/24
Metric: 10 IS router-11.00
Metric: 10 IS router-11.01
router-11.00-00 * 0x0000000b 0x8074460 1161 0/0/0
Area Address: 49
NLPID: 0xcc
Hostname: router-11
IP Address: 172.16.196.11
Metric: 0 IP 172.16.76.0/24
Metric: 10 IP 172.16.166.0/24
Metric: 10 IP 172.16.166.0/24
Metric: 10 IS router-11.01
Metric: 10 IS router-5.00
router-11.01-00 * 0x00000001 0x80770ec 457 0/0/0
Metric: 0 IS router-11.00
Metric: 0 IS router-5.00
Total LSP count: 3 (L1: 3, L2 0, local L1: 2, local L2 0)
IS-IS isp (Level-2) Link State Database
LSPID LSP Seq Num LSP Checksum LSP Holdtime ATT/P/OL
router-5.00-00 0x00000005 0x807997c 457 0/0/0
Area Address: 49
NLPID: 0xcc
Hostname: router-5
IP Address: 172.16.166.5
Metric: 0 IP 172.16.115.0/24
Metric: 10 IP 172.16.166.0/24
Metric: 10 IP 172.16.94.0/24
Metric: 10 IS router-11.00
Metric: 10 IS router-11.01
Metric: 10 IP 172.16.21.0/24
router-11.00-00 * 0x0000000d 0x807997c 1184 0/0/0
Area Address: 49
NLPID: 0xcc
Hostname: router-11
IP Address: 172.28.111.111
Metric: 0 IP 172.16.21.0/24
Metric: 10 IP 172.16.166.0/24
Metric: 10 IP 172.16.166.0/24
Metric: 10 IS router-11.01
Metric: 10 IS router-5.00
Metric: 10 IP 172.16.115.0/24
router-gsr11.01-00 * 0x00000001 0x80770ec 457 0/0/0
Metric: 0 IS router-11.00
Metric: 0 IS router-5.00
Total LSP count: 3 (L1: 0, L2 3, local L1: 0, local L2 2)
'''}
golden_parsed_output_3 = {
"instance": {
"": {
"level": {
1: {
"lspid": {
"0000.0C00.0C35.00-00": {
"lsp": {
"seq_num": "0x0000000C",
"checksum": "0x5696",
"local_router": False,
"holdtime": 325,
"attach_bit": 0,
"p_bit": 0,
"overload_bit": 0,
},
"area_address": "39.0001",
"is_neighbor": {
"0000.0C00.62E6.03": {
"metric": 10}},
"es_neighbor": {
"0000.0C00.0C35": {
"metric": 0}},
},
"0000.0C00.40AF.00-00": {
"lsp": {
"seq_num": "0x00000009",
"checksum": "0x8452",
"local_router": True,
"holdtime": 608,
"attach_bit": 1,
"p_bit": 0,
"overload_bit": 0,
},
"area_address": "47.0004.004D.0001",
"topology": ["IPv4 (0x0)", "IPv6 (0x2)"],
"nlpid": ["0x8E"],
"ip_address": "172.16.21.49",
"is_neighbor": {
"0800.2B16.24EA.01": {
"metric": 10},
"0000.0C00.62E6.03": {
"metric": 10},
"cisco.03": {
"metric": 10},
},
"es_neighbor": {
"0000.0C00.40AF": {
"metric": 0}},
"ipv6_address": "2001:0DB8::/32",
"ipv6_reachability": {
"2001:0DB8::/64": {
"ip_prefix": "2001:0DB8::",
"prefix_length": "64",
"metric": "10",
}
},
"extended_is_neighbor": {
"cisco.03": {
"metric": 5},
"cisco1.03": {
"metric": 10},
},
},
}
}
}
}
}
}
# ncs5k
golden_output_3 = {'execute.return_value': '''
IS-IS Level-1 Link State Database
LSPID LSP Seq Num LSP Checksum LSP Holdtime ATT/P/OL
0000.0C00.0C35.00-00 0x0000000C 0x5696 325 0/0/0
Area Address: 47.0004.004D.0001
Area Address: 39.0001
Metric: 10 IS 0000.0C00.62E6.03
Metric: 0 ES 0000.0C00.0C35
0000.0C00.40AF.00-00* 0x00000009 0x8452 608 1/0/0
Area Address: 47.0004.004D.0001
Topology: IPv4 (0x0) IPv6 (0x2)
NLPID: 0xCC 0x8E
IP Address: 172.16.21.49
Metric: 10 IS 0800.2B16.24EA.01
Metric: 10 IS 0000.0C00.62E6.03
Metric: 0 ES 0000.0C00.40AF
IPv6 Address: 2001:0DB8::/32
Metric: 10 IPv6 (MT-IPv6) 2001:0DB8::/64
Metric: 5 IS-Extended cisco.03
Metric: 10 IS-Extended cisco1.03
Metric: 10 IS (MT-IPv6) cisco.03
'''}
def test_empty_output(self):
self.device = Mock(**self.empty_output)
obj = ShowIsisDatabaseDetail(device=self.device)
with self.assertRaises(SchemaEmptyParserError):
parsed_output = obj.parse()
def test_output_1(self):
self.device = Mock(**self.golden_output_1)
obj = ShowIsisDatabaseDetail(device=self.device)
parsed_output = obj.parse()
self.assertEqual(parsed_output, self.golden_parsed_output_1)
def test_output_2(self):
self.device = Mock(**self.golden_output_2)
obj = ShowIsisDatabaseDetail(device=self.device)
parsed_output = obj.parse()
self.assertEqual(parsed_output, self.golden_parsed_output_2)
def test_output_3(self):
self.device = Mock(**self.golden_output_3)
obj = ShowIsisDatabaseDetail(device=self.device)
parsed_output = obj.parse()
self.assertEqual(parsed_output, self.golden_parsed_output_3)
if __name__ == '__main__':
unittest.main()
```
|
{
"source": "jeremyschulman/idreamofgenie",
"score": 2
}
|
#### File: idreamofgenie/idreamofgenie/basic.py
```python
import re
from first import first
# unicon is part of genie framework
from unicon.core.errors import SubCommandFailure
__all__ = [
'find_os_name',
'find_cdp_neighbor',
'find_ipaddr_by_arp',
'find_macaddr_by_arp',
'find_macaddr_via_iface',
'find_portchan_members',
'ping'
]
def ping(device, target):
try:
device.ping(target)
return True
except SubCommandFailure:
return False
def find_macaddr_by_arp(dev, ipaddr):
""" '10.9.2.171 00:13:37 0050.abcd.de17 Vlan18' """
cli_text = dev.execute(f'show ip arp {ipaddr} | inc {ipaddr}')
if not cli_text or 'Invalid' in cli_text:
return None
found_ipaddr, timestamp, macaddr, ifname = re.split(r'\s+', cli_text)
return {
'macaddr': macaddr,
'interface': ifname
}
def find_ipaddr_by_arp(dev, macaddr):
""" '10.9.2.171 00:13:37 0050.abcd.de17 Vlan18' """
cli_text = dev.execute(f'show ip arp | inc {macaddr}')
if not cli_text or 'Invalid' in cli_text:
return None
ipaddr, timestamp, macaddr, ifname = re.split(r'\s+', cli_text)
return {
'ipaddr': ipaddr,
'interface': ifname
}
def find_macaddr_via_iface(dev, macaddr):
""" '* 17 0050.abcd.de17 dynamic 0 F F Po1' """
cli_text = dev.execute(f'show mac address-table | inc {macaddr}')
if not cli_text:
return None
# the last item is the interface name
return cli_text.split()[-1]
def find_portchan_members(dev, ifname):
""" '1 Po1(SU) Eth LACP Eth2/1(P) Eth2/2(P)' """
cli_text = dev.execute(f'show port-channel summary interface {ifname} | inc {ifname}')
if not cli_text:
return None
members = re.split(r'\s+', cli_text)[4:]
return [member.split('(')[0] for member in members]
def find_os_name(dev=None, content=None):
if not content:
content = dev.execute('show version')
# look for specific Cisco OS names. If one is not found, it means that the
# CDP neighbor is not a recognized device, and return None. If it is
# recognized then the re will return a list, for which we need to extract
# the actual found NOS name; thus using the first() function twice.
os_name = first(re.findall('(IOSXE)|(NX-OS)|(IOS)', content, re.M))
if not os_name:
return None
os_name = first(os_name)
# convert OS name from show output to os name required by genie, if the OS
# is not found, then return None
return {'IOSXE': 'iosxe', 'NX-OS': 'nxos', 'IOS': 'ios'}[os_name]
def find_cdp_neighbor(dev, ifname):
if dev.os == 'nxos':
cli_text = dev.execute(f'show cdp neighbor interface {ifname} detail')
if not cli_text or 'Invalid' in cli_text:
return None
else:
cli_text = dev.execute(f'show cdp neighbors {ifname} detail')
if "Total cdp entries displayed : 0" in cli_text:
return None
device = first(re.findall('Device ID:(.*)$', cli_text, re.M))
if device and '.' in device:
device = first(device.split('.'))
platform = first(re.findall('Platform: (.*),', cli_text, re.M))
os_name = find_os_name(content=cli_text)
return {
'device': device,
'platform': platform,
'os_name': os_name
}
```
|
{
"source": "jeremyschulman/ipf-netbox",
"score": 2
}
|
#### File: ipf_netbox/cli/__main__.py
```python
from importlib import metadata
import os
import click
import httpx
import uvloop
from parsimonious.exceptions import ParseError
from ipf_netbox.config import load_config_file
from ipf_netbox import consts
VERSION = metadata.version("ipf-netbox")
@click.group()
@click.version_option(VERSION)
@click.option(
"--config",
"-C",
type=click.File(),
is_eager=True,
default=lambda: os.environ.get(consts.ENV_CONFIG_FILE, consts.DEFAULT_CONFIG_FILE),
callback=lambda ctx, param, value: load_config_file(filepath=value),
)
def cli(**kwargs):
""" IP Fabric - Netbox Utility"""
pass
@cli.command("test")
def test():
from code import interact
interact(local=globals())
def script():
uvloop.install()
try:
cli()
except ParseError as exc:
print(f"FAIL: Invalid filter expression: '{exc.text}'")
except httpx.HTTPStatusError as exc:
print(f"FAIL: HTTP error {exc.response.text}")
except (httpx.ReadTimeout, httpx.PoolTimeout) as exc:
print(f"FAIL: HTTP read timeout on URL: {exc.request.url}")
print(f"BODY: {exc.request.stream._body}")
```
#### File: ipf-netbox/ipf_netbox/filtering.py
```python
from typing import List, AnyStr, Optional, Callable, Dict, Sequence
import re
import operator
__all__ = ["create_filter"]
value_pattern = r"(?P<value>\S+)$"
wordsep_re = re.compile(r"\s+|,")
def mk_op_filter(_reg, _fieldn):
""" create a single op filter """
def op_filter(rec):
""" using the regular expression match call """
return _reg.match(rec[_fieldn])
op_filter.__doc__ = f"limit_{_fieldn}({_reg.pattern})"
op_filter.__name__ = op_filter.__doc__
op_filter.__qualname__ = op_filter.__doc__
return op_filter
def create_filter_function(op_filters, optest_fn):
""" create a filtering functions based on the operational test """
def filter_fn(rec):
""" for each of the filters """
for op_fn in op_filters:
if optest_fn(op_fn(rec)):
return False
return True
return filter_fn
def create_filter(
constraints: List[AnyStr],
field_names: Sequence[AnyStr],
include: Optional[bool] = True,
) -> Callable[[Dict], bool]:
"""
This function returns a function that is used to filter source_records records.
Parameters
----------
constraints:
A list of contraint expressions that are in the form "<field-name>=<value>".
field_names:
A list of known field names
include:
When True, the filter function will match when the constraint is true,
for example if the contraint is "os_name=eos", then it would match
records that have os_name field euqal to "eos".
When False, the filter function will match when the constraint is not
true. For exampl if the constraint is "os_name=eos", then the filter
function would match recoreds that have os_name fields not equal to
"eos".
Returns
-------
The returning filter function expects an source_records record as the single
input parameter, and the function returns True/False on match.
"""
fieldn_pattern = "^(?P<keyword>" + "|".join(fieldn for fieldn in field_names) + ")"
field_value_reg = re.compile(fieldn_pattern + "=" + value_pattern)
op_filters = list()
for filter_expr in constraints:
# next check for keyword=value filtering use-case
if (mo := field_value_reg.match(filter_expr)) is None:
raise ValueError(f"Invalid filter expression: {filter_expr}")
fieldn, value = mo.groupdict().values()
try:
value_reg = re.compile(f"^{value}$", re.IGNORECASE)
except re.error as exc:
raise ValueError(
f"Invalid filter regular-expression: {filter_expr}: {str(exc)}"
)
op_filters.append(mk_op_filter(value_reg, fieldn))
optest_fn = operator.not_ if include else operator.truth
filter_fn = create_filter_function(op_filters, optest_fn)
filter_fn.op_filters = op_filters
filter_fn.constraints = constraints
return filter_fn
```
#### File: ipf-netbox/ipf_netbox/mappings.py
```python
from typing import Callable
from functools import lru_cache, partial
import re
from ipf_netbox.config import get_config
@lru_cache()
def _expaner_os() -> Callable[[str], str]:
config = get_config()
cfg_map = config.maps["interfaces"]
mapper = re.compile(r"|".join(list(cfg_map)))
def _expander(ifname):
return mapper.sub(lambda mo: cfg_map[mo.group(0)], ifname)
return _expander
def expand_interface(ifname: str) -> str:
return _expaner_os()(ifname)
@lru_cache()
def domain_remover():
cfg_obj = get_config()
any_domain = "|".join(
re.escape(f".{domain}") for domain in cfg_obj.defaults.domain_names
)
return partial(re.compile(any_domain).sub, repl="")
def normalize_hostname(hostname):
return domain_remover()(string=hostname.lower())
```
#### File: ipf_netbox/netbox/interfaces.py
```python
from typing import Dict, Optional
import asyncio
# -----------------------------------------------------------------------------
# Private Imports
# -----------------------------------------------------------------------------
from ipf_netbox.collection import Collector, CollectionCallback
from ipf_netbox.collections.interfaces import InterfaceCollection
from ipf_netbox.netbox.source import NetboxSource, NetboxClient
# -----------------------------------------------------------------------------
# Exports
# -----------------------------------------------------------------------------
__all__ = ["NetboxInterfaceCollection"]
# -----------------------------------------------------------------------------
#
# CODE BEGINS
#
# -----------------------------------------------------------------------------
class NetboxInterfaceCollection(Collector, InterfaceCollection):
source_class = NetboxSource
async def fetch(self, hostname, **params):
"""
fetch interfaces must be done on a per-device (hostname) basis.
fetch args are Netbox API specific.
"""
self.source_records.extend(
await self.source.client.paginate(
url="/dcim/interfaces/", filters=dict(device=hostname, **params)
)
)
async def fetch_keys(self, keys: Dict):
await asyncio.gather(
*(
self.fetch(hostname=rec["hostname"], name=rec["interface"])
for rec in keys.values()
)
)
def fingerprint(self, rec: Dict) -> Dict:
return dict(
hostname=rec["device"]["name"],
interface=rec["name"],
description=rec["description"],
)
async def create_missing(
self, missing, callback: Optional[CollectionCallback] = None
):
client: NetboxClient = self.source.client
device_records = await client.fetch_devices(
hostname_list=(rec["hostname"] for rec in missing.values()), key="name"
)
def _create_task(key, fields):
hostname, if_name = key
if hostname not in device_records:
print(f"ERROR: device {hostname} missing.")
return None
# TODO: set the interface type correctly based on some kind of mapping definition.
# for now, use this name-basis for loopback, vlan, port-channel.
if_type = {
"v": "virtual", # vlan
"l": "virtual", # loopback
"p": "lag", # port-channel
}.get(if_name[0].lower(), "other")
return client.post(
url="/dcim/interfaces/",
json=dict(
device=device_records[hostname]["id"],
name=if_name,
description=fields["description"],
type=if_type,
),
)
await self.source.update(
updates=missing, callback=callback, creator=_create_task
)
async def update_changes(
self, changes: Dict, callback: Optional[CollectionCallback] = None
):
# Presently the only field to update is description; so we don't need to put
# much logic into this post body process. Might need to in the future.
client = self.source.client
def _create_task(key, fields):
if_id = self.source_record_keys[key]["id"]
return client.patch(
url=f"/dcim/interfaces/{if_id}/",
json=dict(description=fields["description"]),
)
await self.source.update(changes, callback, _create_task)
```
#### File: ipf_netbox/tasks/devices.py
```python
import asyncio
from operator import itemgetter
# -----------------------------------------------------------------------------
# Public Imports
# -----------------------------------------------------------------------------
from tabulate import tabulate
from httpx import Response
# -----------------------------------------------------------------------------
# Private Imports
# -----------------------------------------------------------------------------
from ipf_netbox.collection import get_collection
from ipf_netbox.diff import diff, DiffResults
from ipf_netbox.netbox.devices import NetboxDeviceCollection
from ipf_netbox.ipfabric.devices import IPFabricDeviceCollection
from ipf_netbox.tasks.tasktools import with_sources
# -----------------------------------------------------------------------------
#
# CODE BEGINS
#
# -----------------------------------------------------------------------------
@with_sources
async def ensure_devices(ipf, netbox, **params) -> IPFabricDeviceCollection:
"""
Ensure Netbox contains devices found IP Fabric.
Parameters
----------
ipf: IPFabric Source instance
netbox: Netbox Source instance
Other Parameters
----------------
dry_run: bool
Determines dry-run mode
devices: List[str]
List of device to use as basis for action
filters: str
The IPF device inventory filter expression to use
as basis for action.
Returns
-------
IPFabricDeviceCollection:
The IP Fabric device collection, that can be used by later processes
that need to cross reference this information.
"""
print("\nEnsure Devices.")
print("Fetching from IP Fabric ... ", flush=True, end="")
ipf_col: IPFabricDeviceCollection = get_collection( # noqa
source=ipf, name="devices"
)
filters = params["filters"]
await ipf_col.fetch(filters=filters)
ipf_col.make_keys()
print(f"{len(ipf_col)} items.", flush=True)
if not len(ipf_col.source_records):
print(f"Done. No source_records matching filter:\n\t{filters}")
return ipf_col
print("Fetching from Netbox ... ", flush=True, end="")
netbox_col: NetboxDeviceCollection = get_collection( # noqa
source=netbox, name="devices"
)
await netbox_col.fetch()
netbox_col.make_keys()
print(f"{len(netbox_col)} items.", flush=True)
diff_res = diff(
source_from=ipf_col,
sync_to=netbox_col,
fields_cmp={
"model": lambda f: True # TODO: do not consider model for diff right now
},
)
if diff_res is None:
print("No changes required.")
return ipf_col
_report_proposed_changes(netbox_col, diff_res)
if params.get("dry_run", False) is True:
return ipf_col
updates = list()
if diff_res.missing:
updates.append(_execute_create(ipf_col, netbox_col, diff_res.missing))
if diff_res.changes:
updates.append(_execute_changes(params, ipf_col, netbox_col, diff_res.changes))
await asyncio.gather(*updates)
return ipf_col
def _report_proposed_changes(nb_col: NetboxDeviceCollection, diff_res: DiffResults):
if diff_res.missing:
print("\nNetbox Missing Devices")
tabular_data = sorted(
map(
itemgetter("hostname", "site", "ipaddr", "vendor", "model"),
diff_res.missing.values(),
),
key=itemgetter(0, 1),
)
print(
tabulate(
tabular_data=tabular_data,
headers=["Hostname", "Site", "IP address", "Vendor", "Model"],
),
end="\n\n",
)
if diff_res.changes:
print("\nDifferences:\tNetbox -> IP Fabric", end="\n")
for key, ch_fields in diff_res.changes.items():
fields = nb_col.inventory[key]
hostname = fields["hostname"]
kv_pairs = ", ".join(
f"{k_}: {fields[k_] or '(empty)'} -> {v_}"
for k_, v_ in ch_fields.items()
)
print(f" {hostname}: {kv_pairs}")
print("\n")
async def _ensure_primary_ipaddrs(
ipf_col: IPFabricDeviceCollection, nb_col: NetboxDeviceCollection, missing: dict
):
ipf_col_ipaddrs = get_collection(source=ipf_col.source, name="ipaddrs")
ipf_col_ifaces = get_collection(source=ipf_col.source, name="interfaces")
# -------------------------------------------------------------------------
# we need to fetch all of the IPF ipaddr records so that we can bind the
# management IP address to the Netbox device record. We use the **IPF**
# collection as the basis for the missing records so that the filter values
# match. This is done to avoid any mapping changes that happended via the
# collection intake process. This code is a bit of 'leaky-abstration',
# so TODO: cleanup.
# -------------------------------------------------------------------------
await asyncio.gather(
*(
ipf_col_ipaddrs.fetch(
filters=f"and(hostname = {_item['hostname']}, ip = '{_item['loginIp']}')"
)
for _item in [ipf_col.source_record_keys[key] for key in missing.keys()]
)
)
ipf_col_ipaddrs.make_keys()
# -------------------------------------------------------------------------
# now we need to gather the IPF interface records so we have any _fields that
# need to be stored into Netbox (e.g. description)
# -------------------------------------------------------------------------
await asyncio.gather(
*(
ipf_col_ifaces.fetch(
filters=f"and(hostname = {_item['hostname']}, intName = {_item['intName']})"
)
for _item in ipf_col_ipaddrs.source_record_keys.values()
)
)
ipf_col_ifaces.make_keys()
# -------------------------------------------------------------------------
# At this point we have the IPF collections for the needed 'interfaces' and
# 'ipaddrs'. We need to ensure these same entities exist in the Netbox
# collections. We will first attempt to find all the existing records in
# Netbox using the `fetch_keys` method.
# -------------------------------------------------------------------------
nb_col_ifaces = get_collection(source=nb_col.source, name="interfaces")
nb_col_ipaddrs = get_collection(source=nb_col.source, name="ipaddrs")
await nb_col_ifaces.fetch_keys(keys=ipf_col_ifaces.inventory)
await nb_col_ipaddrs.fetch_keys(keys=ipf_col_ipaddrs.inventory)
nb_col_ipaddrs.make_keys()
nb_col_ifaces.make_keys()
diff_ifaces = diff(source_from=ipf_col_ifaces, sync_to=nb_col_ifaces)
diff_ipaddrs = diff(source_from=ipf_col_ipaddrs, sync_to=nb_col_ipaddrs)
def _report_iface(item, _res: Response):
_key, _fields = item
hname, iname = _fields["hostname"], _fields["interface"]
if _res.is_error:
print(f"CREATE:FAIL: interface {hname}, {iname}: {_res.text}")
return
print(f"CREATE:OK: interface {hname}, {iname}.")
nb_col_ifaces.source_records.append(_res.json())
def _report_ipaddr(item, _res: Response):
_key, _fields = item
hname, iname, ipaddr = (
_fields["hostname"],
_fields["interface"],
_fields["ipaddr"],
)
ident = f"ipaddr {hname}, {iname}, {ipaddr}"
if _res.is_error:
print(f"CREATE:FAIL: {ident}: {_res.text}")
return
nb_col_ipaddrs.source_records.append(_res.json())
print(f"CREATE:OK: {ident}.")
if diff_ifaces:
await nb_col_ifaces.create_missing(
missing=diff_ifaces.missing, callback=_report_iface
)
if diff_ipaddrs:
await nb_col_ipaddrs.create_missing(
missing=diff_ipaddrs.missing, callback=_report_ipaddr
)
nb_col.make_keys()
nb_col_ifaces.make_keys()
nb_col_ipaddrs.make_keys()
# TODO: Note that I am passing the cached collections of interfaces and ipaddress
# To the device collection to avoid duplicate lookups for record
# indexes. Will give this approach some more thought.
nb_col.cache["interfaces"] = nb_col_ifaces
nb_col.cache["ipaddrs"] = nb_col_ipaddrs
async def _execute_create(
ipf_col: IPFabricDeviceCollection, nb_col: NetboxDeviceCollection, missing: dict
):
# -------------------------------------------------------------------------
# Now create each of the device records. Once the device records are
# created, then go back and add the primary interface and ipaddress values
# using the other collections.
# -------------------------------------------------------------------------
def _report_device(update, _res: Response):
key, item = update
if _res.is_error:
print(f"FAIL: create device {item['hostname']}: {_res.text}")
return
print(f"CREATE:OK: device {item['hostname']} ... creating primary IP ... ")
nb_col.source_records.append(_res.json())
await nb_col.create_missing(missing=missing, callback=_report_device)
await _ensure_primary_ipaddrs(ipf_col=ipf_col, nb_col=nb_col, missing=missing)
# -------------------------------------------------------------------------
# for each of the missing device records perform a "change request" on the
# 'ipaddr' field. so that the primary IP will be assigned.
# -------------------------------------------------------------------------
changes = {
key: {"ipaddr": ipf_col.inventory[key]["ipaddr"]} for key in missing.keys()
}
def _report_primary(item, _res): # noqa
key, fields = item
rec = nb_col.inventory[key]
ident = f"device {rec['hostname']} assigned primary-ip4"
if _res.is_error:
print(f"CREATE:FAIL: {ident}: {_res.text}")
return
print(f"CREATE:OK: {ident}.")
await nb_col.update_changes(changes, callback=_report_primary)
async def _execute_changes(
params: dict,
ipf_col: IPFabricDeviceCollection,
nb_col: NetboxDeviceCollection,
changes,
):
print("\nExaminging changes ... ", flush=True)
def _report(change, res: Response):
ch_key, ch_fields = change
ch_rec = nb_col.inventory[ch_key]
ident = f"device {ch_rec['hostname']}"
print(
f"CHANGE:FAIL: {ident}, {res.text}"
if res.is_error
else f"CHANGE:OK: {ident}"
)
actual_changes = dict()
missing_pri_ip = dict()
for key, key_change in changes.items():
rec = nb_col.inventory[key]
if (ipaddr := key_change.pop("ipaddr", None)) is not None:
if any(
(
rec["ipaddr"] == "",
(ipaddr and (params["force_primary_ip"] is True)),
)
):
key_change["ipaddr"] = ipaddr
missing_pri_ip[key] = key_change
if len(key_change):
actual_changes[key] = key_change
if missing_pri_ip:
await _ensure_primary_ipaddrs(
ipf_col=ipf_col, nb_col=nb_col, missing=missing_pri_ip
)
if not actual_changes:
print("No required changes.")
return
print("Processing changes ... ")
await nb_col.update_changes(changes, callback=_report)
print("Done.\n")
```
#### File: ipf_netbox/tasks/ipaddrs.py
```python
import asyncio
from typing import List
from httpx import Response
from ipf_netbox.collection import get_collection
from ipf_netbox.diff import diff, DiffResults
from ipf_netbox.tasks.tasktools import with_sources
from ipf_netbox.ipfabric.ipaddrs import IPFabricIPAddrCollection
from ipf_netbox.netbox.ipaddrs import NetboxIPAddrCollection
@with_sources
async def ensure_ipaddrs(ipf, nb, **params) -> List[str]:
"""
Ensure Netbox contains devices interfaces IP addresses found IP Fabric.
Parameters
----------
ipf: IPFabric Source instance
nb: Netbox Source instance
Other Parameters
----------------
dry_run: bool
Determines dry-run mode
devices: List[str]
List of device to use as basis for action
filters: str
The IPF device inventory filter expression to use
as basis for action.
Returns
-------
List[str]
The list of IPF device hostnames found in the IPF collection. Can
be used as a basis for other collection activities.
"""
print("\nEnsure Device Interface IP Address.")
print("Fetching from IP Fabric ... ", flush=True, end="")
ipf_col: IPFabricIPAddrCollection = get_collection( # noqa
source=ipf, name="ipaddrs"
)
if (filters := params.get("filters")) is not None:
await ipf_col.fetch(filters=filters)
elif (ipf_device_list := params.get("devices")) is not None:
print(f"{len(ipf_device_list)} devices ... ", flush=True, end="")
await asyncio.gather(
*(
ipf_col.fetch(filters=f"hostname = {hostname}")
for hostname in ipf_device_list
)
)
else:
raise RuntimeError("FAIL: No parameters to fetch ipaddrs")
ipf_col.make_keys()
print(f"{len(ipf_col)} items.")
if not len(ipf_col.source_records):
return []
# create the IPF hostname specific device list for return purposes.
ipf_device_list = [rec["hostname"] for rec in ipf_col.source_records]
# -------------------------------------------------------------------------
# Need to fetch from Netbox on a per-device basis.
# -------------------------------------------------------------------------
print("Fetching from Netbox ... ", flush=True, end="")
nb_col: NetboxIPAddrCollection = get_collection(source=nb, name="ipaddrs") # noqa
col_device_list = {rec["hostname"] for rec in ipf_col.inventory.values()}
print(f"{len(col_device_list)} devices ... ", flush=True, end="")
nb.client.timeout = 120
await asyncio.gather(
*(nb_col.fetch(hostname=hostname) for hostname in col_device_list)
)
nb_col.make_keys()
print(f"{len(nb_col)} items.", flush=True)
# -------------------------------------------------------------------------
# check for differences and process accordingly.
# -------------------------------------------------------------------------
diff_res = diff(source_from=ipf_col, sync_to=nb_col)
if not diff_res:
print("No changes required.")
return ipf_device_list
_diff_report(diff_res)
if params.get("dry_run", False) is True:
return ipf_device_list
tasks = list()
if diff_res.missing:
tasks.append(_diff_create(nb_col, diff_res.missing))
if diff_res.changes:
tasks.append(_diff_update(nb_col, diff_res.changes))
await asyncio.gather(*tasks)
return ipf_device_list
def _diff_report(diff_res: DiffResults):
print("\nDiff Report")
print(f" Missing: count {len(diff_res.missing)}")
print(f" Needs Update: count {len(diff_res.changes)}")
print("\n")
async def _diff_update(nb_col: NetboxIPAddrCollection, changes):
def _done(_item, res: Response):
_key, _changes = _item
_hostname, _ifname = _key
res.raise_for_status()
print(f"UPDATE:OK: ipaddr {_hostname}, {_ifname}", flush=True)
await nb_col.update_changes(changes=changes, callback=_done)
async def _diff_create(nb_col: NetboxIPAddrCollection, missing):
def _done(_item, _res: Response):
_key, _fields = _item
_res.raise_for_status()
ident = (
f"ipaddr {_fields['hostname']}, {_fields['interface']}, {_fields['ipaddr']}"
)
print(
f"CREATE:OK: {ident}", flush=True,
)
await nb_col.create_missing(missing, callback=_done)
```
#### File: ipf-netbox/tests/ipf-testdrive.py
```python
import asyncio
from ipf_netbox.ipfabric import IPFabricDeviceCollection
from ipf_netbox.ipfabric import get_client
loop = asyncio.get_event_loop()
asyncio.set_event_loop(loop)
get_client()
ipf = IPFabricDeviceCollection()
loop.run_until_complete(ipf.fetch())
def exclude_lap(fp):
return False if fp["os_name"] == "lap" else True
ipf.make_fingerprints(with_filter=exclude_lap)
ipf.make_keys()
```
|
{
"source": "jeremyschulman/junospyez-ossh-server",
"score": 2
}
|
#### File: junospyez-ossh-server/junospyez_ossh_server/log.py
```python
from junospyez_ossh_server import about
import logging
# TODO: add logging.conf for debug purposes ...
# import logging
# logging_conf_path = os.path.normpath(os.path.join(os.path.dirname(__file__), '../logging.conf'))
# logging.config.fileConfig(logging_conf_path)
# logger = logging.getLogger(__name__)
logger = logging.getLogger(about.package_name)
def basic():
logger.setLevel(logging.INFO)
logger.addHandler(logging.StreamHandler())
```
|
{
"source": "jeremyschulman/lxml-xpath-ipaddress",
"score": 3
}
|
#### File: lxml-xpath-ipaddress/lxml_xpath_ipaddress/func_namespace.py
```python
from functools import wraps
from lxml.etree import FunctionNamespace
from lxml_xpath_ipaddress.in_subnet import in_subnet
from lxml_xpath_ipaddress.ip4or6 import *
NAMESPACE = 'https://github.com/jeremyschulman/lxml-xpath-ipaddress'
# register this namespace into the lxml system
# the caller must use the NAMESPACE value when calling xpath with the namespace= argument
_ns_ext = FunctionNamespace(NAMESPACE)
# define a dictionary namespace for 'easy use' for default purposes. There is no
# obligation to use this. This dictionary would be used for example:
#
# items = config.xpath('//*[ip:is-net-ip4(.)', namespaces=ns)
ns = {'ip': NAMESPACE}
# -----------------------------------------------------------------------------------------------------------------
# These functions are bound into the LXML namespace. See extension documentation for details
# https://lxml.de/1.3/extensions.html
# -----------------------------------------------------------------------------------------------------------------
def make_nsf(func):
@wraps(func)
def wrapper(dummy, ele):
try:
return func(ele[0].text)
except:
return False
return wrapper
def nsf_in_subnet(dummy, ele, subnet):
"""
lxml extension function wrapping in_subnet
Parameters
----------
dummy
Not used
ele : Element
The lxml element to check
subnet : str
The subnet string value
Returns
-------
bool
True if the given element text value is an IP thing and is within the given subnet value
False otherwise
"""
try:
value = ele[0].text
return in_subnet(value, subnet)
except:
return False
# -----------------------------------------------------------------------------------------------------------------
# Bind functions into LXML namespace object
# -----------------------------------------------------------------------------------------------------------------
_ns_ext['ip-any'] = make_nsf(is_any_ip)
_ns_ext['ip-host'] = make_nsf(is_host_ip)
_ns_ext['ip-net'] = make_nsf(is_net_ip)
_ns_ext['ip6-any'] = make_nsf(is_any_ip6)
_ns_ext['ip6-net'] = make_nsf(is_net_ip6)
_ns_ext['ip6-host'] = make_nsf(is_host_ip6)
_ns_ext['ip4-any'] = make_nsf(is_any_ip4)
_ns_ext['ip4-net'] = make_nsf(is_net_ip4)
_ns_ext['ip4-host'] = make_nsf(is_host_ip4)
_ns_ext['in-subnet'] = nsf_in_subnet
```
|
{
"source": "jeremyschulman/netbox-pyswagger",
"score": 2
}
|
#### File: jeremyschulman/netbox-pyswagger/setup.py
```python
from setuptools import setup
import netbox_pyswagger
def read(fname):
with open(fname) as fp:
content = fp.read()
return content
setup(
name='netbox-pyswagger',
version=netbox_pyswagger.__version__,
description='Swagger client for Netbox',
author='<NAME>',
author_email='<EMAIL>',
url='https://github.com/jeremyschulman/netbox-pyswagger',
packages=['netbox_pyswagger'],
license='MIT',
zip_safe=False,
install_requires=[
'halutz>=0.3.0',
'requests',
'six'
],
keywords=('netbox', 'rest', 'json', 'api',
'network', 'automation'),
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.7',
]
)
```
|
{
"source": "jeremyschulman/netcad-demo-meraki-1",
"score": 2
}
|
#### File: netcad-demo-meraki-1/netcad_demo_meraki1/design.py
```python
from operator import itemgetter
# -----------------------------------------------------------------------------
# Public Imports
# -----------------------------------------------------------------------------
from netcad.design_services import Design
from netcad.topology import TopologyDesignService
from netcad.device import DeviceCatalog
# -----------------------------------------------------------------------------
# Private Imports
# -----------------------------------------------------------------------------
from .device_roles import MS220p8, MR52, MX65, MR42
from .profiles import AccessVlan1
def create_design(design: Design) -> Design:
aliases = design.config["alias"] = dict()
aliases["sw01"] = MS220p8(name="ms01-dl1")
aliases["sw02"] = MS220p8(name="ms01-dl2")
aliases["sw03"] = MS220p8(name="ms01-dl3")
aliases["ap01"] = MR52("ap01-dl1")
aliases["ap02"] = MR42("ap01-dl2")
aliases["ap03"] = MR52("ap01-dl3")
aliases["mx01"] = MX65(name="mx01-dl1")
aliases["mx02"] = MX65(name="mx01-dl2")
all_devs = list(aliases.values())
design.add_devices(*all_devs)
design.add_services(
TopologyDesignService(topology_name=design.name, devices=all_devs)
)
cable_devices(design)
design.update()
return design
def cable_devices(design: Design):
aliasses: DeviceCatalog = design.config["alias"]
sw01, sw02, sw03 = itemgetter("sw01", "sw02", "sw03")(aliasses)
ap01, ap02, ap03 = itemgetter("ap01", "ap02", "ap03")(aliasses)
mx01, mx02 = itemgetter("mx01", "mx02")(aliasses)
cable_id = 1
# -------------------------------------------------------------------------
# Cable Access-Points to Switches
# -------------------------------------------------------------------------
# ap01.0 --- sw03.1
with ap01.interfaces["wired0"] as ap_w0, sw03.interfaces["1"] as sw03_1:
ap_w0.profile = AccessVlan1()
sw03_1.profile = AccessVlan1()
ap_w0.cable_id = sw03_1.cable_id = f"cable_{cable_id}"
cable_id += 1
# ap02.0 --- sw01.2
with ap02.interfaces["wired0"] as ap_w0, sw01.interfaces["2"] as sw_iface:
ap_w0.profile = AccessVlan1()
sw_iface.profile = AccessVlan1()
ap_w0.cable_id = sw_iface.cable_id = f"cable_{cable_id}"
cable_id += 1
# ap03.0 -- sw02.2
with ap03.interfaces["wired0"] as ap_w0, sw02.interfaces["2"] as sw_iface:
ap_w0.profile = AccessVlan1()
sw_iface.profile = AccessVlan1()
ap_w0.cable_id = sw_iface.cable_id = f"cable_{cable_id}"
cable_id += 1
# -------------------------------------------------------------------------
# Cable Switches to Appliance
# -------------------------------------------------------------------------
# sw01.1 -- mx-office (not in design yet)
# sw02.1 -- mx02.3
with sw02.interfaces["1"] as sw_iface, mx02.interfaces["3"] as mx_iface:
mx_iface.profile = AccessVlan1()
sw_iface.profile = AccessVlan1()
mx_iface.cable_id = sw_iface.cable_id = f"cable_{cable_id}"
cable_id += 1
# sw03.2 -- mx01.3
with sw03.interfaces["2"] as sw_iface, mx01.interfaces["3"] as mx_iface:
mx_iface.profile = AccessVlan1()
sw_iface.profile = AccessVlan1()
mx_iface.cable_id = sw_iface.cable_id = f"cable_{cable_id}"
cable_id += 1
```
|
{
"source": "jeremyschulman/netcam-aiomeraki",
"score": 2
}
|
#### File: netcam_aiomeraki/switch/meraki_switch_check_switchports.py
```python
from typing import TYPE_CHECKING
# -----------------------------------------------------------------------------
# Public Imports
# -----------------------------------------------------------------------------
from netcad.checks import check_result_types as tr
from netcad.vlans.checks.check_switchports import (
SwitchportCheckCollection,
SwitchportAccessExpectation,
SwitchportTrunkExpectation,
)
# -----------------------------------------------------------------------------
# Private Imports
# -----------------------------------------------------------------------------
if TYPE_CHECKING:
from .meraki_switch_dut import MerakiSwitchDeviceUnderTest
from netcad.helpers import range_string
# -----------------------------------------------------------------------------
# Exports
# -----------------------------------------------------------------------------
__all__ = ["meraki_switch_check_switchports"]
# -----------------------------------------------------------------------------
#
# CODE BEGINS
#
# -----------------------------------------------------------------------------
async def meraki_switch_check_switchports(
self, check_collection: SwitchportCheckCollection
) -> tr.CheckResultsCollection:
"""
Validate the device switchport configuration against the design
expectations.
"""
dut: MerakiSwitchDeviceUnderTest = self
device = dut.device
ports_config = await dut.get_port_config()
map_ports_config = {rec["portId"]: rec for rec in ports_config}
results = list()
for check in check_collection.checks:
expd_status = check.expected_results
if_name = check.check_id()
# if the interface from the design does not exist on the device, then
# report this error and go to next test-case.
if not (msrd_port := map_ports_config.get(if_name)):
results.append(tr.CheckFailNoExists(device=device, check=check))
continue
# check the switchport mode value. If they do not match, then we report
# the error and continue to the next test-case.
expd_mode = expd_status.switchport_mode
msrd_mode = msrd_port["type"]
if expd_mode != msrd_mode:
results.append(
tr.CheckFailFieldMismatch(
device=device,
check=check,
field="switchport_mode",
measurement=msrd_mode,
)
)
continue
mode_handler = {
"access": _check_access_switchport,
"trunk": _check_trunk_switchport,
}.get(expd_mode)
results.extend(mode_handler(dut, check, expd_status, msrd_port))
return results
def _check_access_switchport(
dut, check, expd_status: SwitchportAccessExpectation, msrd_status: dict
) -> tr.CheckResultsCollection:
"""
Only one check for now, that is to validate that the configured VLAN on the
access port matches the test case.
"""
device = dut.device
vl_id = expd_status.vlan.vlan_id
results = list()
if vl_id and (msrd_vl_id := msrd_status["vlan"]) != vl_id:
results.append(
tr.CheckFailFieldMismatch(
device=device,
check=check,
field="vlan",
expected=vl_id,
measurement=msrd_vl_id,
)
)
return results
def _check_trunk_switchport(
dut, check, expd_status: SwitchportTrunkExpectation, msrd_status: dict
) -> tr.CheckResultsCollection:
"""
Check one interface that is a TRUNK port.
"""
device = dut.device
results = list()
# if there is a native vlan expected, then validate the match.
n_vl_id = expd_status.native_vlan.vlan_id if expd_status.native_vlan else None
if n_vl_id and (msrd_vl_id := msrd_status["vlan"]) != n_vl_id:
results.append(
tr.CheckFailFieldMismatch(
device=device,
check=check,
field="native_vlan",
expected=n_vl_id,
measurement=msrd_vl_id,
)
)
# the trunk is either "all" or a CSV of vlans
msrd_allowd_vlans = msrd_status["allowedVlans"]
# if all, then done checking; really should not be using "all", so log an info.
if msrd_allowd_vlans == "all":
if not results:
results.append(
tr.CheckPassResult(device=device, check=check, measurement=msrd_status)
)
results.append(
tr.CheckInfoLog(
device=device,
check=check,
measurement="trunk port allows 'all' vlans",
)
)
return results
e_tr_allowed_vids = sorted(
[vlan.vlan_id for vlan in expd_status.trunk_allowed_vlans]
)
# conver the list of vlan-ids to a range string for string comparison
# purposes.
e_tr_alwd_vstr = range_string(e_tr_allowed_vids)
if e_tr_alwd_vstr != msrd_allowd_vlans:
results.append(
tr.CheckFailFieldMismatch(
device=device,
check=check,
field="trunk_allowed_vlans",
expected=e_tr_alwd_vstr,
measurement=msrd_allowd_vlans,
)
)
if not results:
results = [
tr.CheckPassResult(device=device, check=check, measurement=msrd_status)
]
return results
```
#### File: netcam_aiomeraki/switch/meraki_switch_dut.py
```python
from typing import Optional, List, Dict
from functools import singledispatchmethod
# -----------------------------------------------------------------------------
# Private Imports
# -----------------------------------------------------------------------------
from netcam_aiomeraki.meraki_dut import (
MerakiDeviceUnderTest,
CheckCollection,
CheckResultsCollection,
)
class MerakiSwitchDeviceUnderTest(MerakiDeviceUnderTest):
"""
Support the Meraki switch devices, product models that being with "MS".
"""
async def get_port_config(self) -> List[Dict]:
"""
Obtain the switch port configuration. The API content is cached.
"""
return await self.api_cache_get(
key="ports_config",
call="switch.getDeviceSwitchPorts",
serial=self.serial,
)
async def get_port_status(self) -> dict:
"""
Obtain the switch port status information. The API content is cached.
"""
return await self.api_cache_get(
key="ports_status",
call="switch.getDeviceSwitchPortsStatuses",
serial=self.serial,
)
# -------------------------------------------------------------------------
#
# DUT Methods
#
# -------------------------------------------------------------------------
@singledispatchmethod
async def execute_checks(
self, testcases: CheckCollection
) -> Optional["CheckResultsCollection"]:
"""
If this DUT does not explicity implement a test-case, then try the
superclass.
"""
return await super().execute_checks(testcases)
# -------------------------------------------------------------------------
# Support the 'cabling' testcases
# -------------------------------------------------------------------------
from .meraki_switch_check_cabling import meraki_switch_check_cabling
execute_checks.register(meraki_switch_check_cabling)
# -------------------------------------------------------------------------
# Support the 'interfaces' testcases
# -------------------------------------------------------------------------
from .meraki_switch_check_interfaces import meraki_switch_check_interfaces
execute_checks.register(meraki_switch_check_interfaces)
# -------------------------------------------------------------------------
# Support the 'switchports' testcases
# -------------------------------------------------------------------------
from .meraki_switch_check_switchports import meraki_switch_check_switchports
execute_checks.register(meraki_switch_check_switchports)
# -------------------------------------------------------------------------
# Support the 'vlans' testcases
# -------------------------------------------------------------------------
from .meraki_switch_tc_vlans import meraki_switch_check_vlans
execute_checks.register(meraki_switch_check_vlans)
```
#### File: jeremyschulman/netcam-aiomeraki/tasks.py
```python
from invoke import task
@task
def precheck(ctx):
ctx.run("black .")
ctx.run("pre-commit run -a")
ctx.run("interrogate -c pyproject.toml", pty=True)
@task
def clean(ctx):
ctx.run("python setup.py clean")
ctx.run("rm -rf netcfgbu.egg-info")
ctx.run("rm -rf .pytest_cache .pytest_tmpdir .coverage")
ctx.run("rm -rf htmlcov")
```
|
{
"source": "jeremyschulman/netcfgbu",
"score": 2
}
|
#### File: netcfgbu/cli/root.py
```python
from importlib import metadata
from pathlib import Path
import click
from functools import reduce
from first import first
import netcfgbu
from netcfgbu import config as _config
from netcfgbu import inventory as _inventory
from netcfgbu import jumphosts
VERSION = metadata.version(netcfgbu.__package__)
# -----------------------------------------------------------------------------
#
# CLI Custom Click Commands
#
# -----------------------------------------------------------------------------
class WithConfigCommand(click.Command):
def invoke(self, ctx):
try:
ctx.obj["app_cfg"] = _config.load(fileio=ctx.params["config"])
super().invoke(ctx)
except Exception as exc:
ctx.fail(str(exc))
class WithInventoryCommand(click.Command):
def invoke(self, ctx):
try:
app_cfg = ctx.obj["app_cfg"] = _config.load(fileio=ctx.params["config"])
if debug_ssh_lvl := ctx.params.get("debug_ssh"): # pragma: no cover
from asyncssh import logging as assh_lgr
import logging
assh_lgr.set_log_level(logging.DEBUG)
assh_lgr.set_debug_level(debug_ssh_lvl)
if ctx.params["inventory"]:
ctx.obj["app_cfg"].defaults.inventory = ctx.params["inventory"]
inv = ctx.obj["inventory_recs"] = _inventory.load(
app_cfg=app_cfg,
limits=ctx.params["limit"],
excludes=ctx.params["exclude"],
)
if not inv:
raise RuntimeError(
f"No inventory matching limits in: {app_cfg.defaults.inventory}"
)
# if there is jump host configuraiton then prepare for later use.
if app_cfg.jumphost:
jumphosts.init_jumphosts(jumphost_specs=app_cfg.jumphost, inventory=inv)
super().invoke(ctx)
except Exception as exc:
ctx.fail(str(exc))
# -----------------------------------------------------------------------------
#
# CLI Options
#
# -----------------------------------------------------------------------------
def get_spec_nameorfirst(spec_list, spec_name=None):
if not spec_list:
return None
if not spec_name:
return first(spec_list)
return first(spec for spec in spec_list if getattr(spec, "name", "") == spec_name)
def check_for_default(ctx, opt, value):
if value or Path("netcfgbu.toml").exists():
return value
return None
opt_config_file = click.option(
"-C",
"--config",
envvar="NETCFGBU_CONFIG",
type=click.File(),
callback=check_for_default
# required=True,
# default="netcfgbu.toml",
)
# -----------------------------------------------------------------------------
# Inventory Options
# -----------------------------------------------------------------------------
opt_inventory = click.option(
"--inventory", "-i", help="Inventory file-name", envvar="NETCFGBU_INVENTORY"
)
opt_limits = click.option(
"--limit",
"-l",
"--include",
multiple=True,
help="limit/include in inventory",
)
opt_excludes = click.option(
"--exclude",
"-e",
multiple=True,
help="exclude from inventory",
)
def opts_inventory(in_fn_deco):
return reduce(
lambda _d, fn: fn(_d), [opt_inventory, opt_limits, opt_excludes], in_fn_deco
)
opt_batch = click.option(
"--batch",
"-b",
type=click.IntRange(1, 500),
help="inevntory record processing batch size",
)
opt_timeout = click.option(
"--timeout", "-t", help="timeout(s)", type=click.IntRange(0, 5 * 60)
)
opt_debug_ssh = click.option(
"--debug-ssh", help="enable SSH debugging", type=click.IntRange(1, 3)
)
@click.group()
@click.version_option(version=VERSION)
def cli():
pass # pragma: no cover
```
|
{
"source": "jeremyschulman/phpipam-pyez",
"score": 2
}
|
#### File: phpipam-pyez/phpipampyez/search.py
```python
import json
from requests.cookies import create_cookie
from bs4 import BeautifulSoup
from phpipampyez.utils import expand_ids
DEFAULT_SEARCH_OPTIONS = [
'addresses', 'subnets', 'vlans', 'vrf'
]
SEARCH_OPTIONS = DEFAULT_SEARCH_OPTIONS + ['pstn', 'circuits']
def extracto_subnets(soup):
found = soup.find_all('tr', attrs={'class': 'subnetSearch'})
return [item.attrs['subnetid'] for item in found]
def extracto_addresses(soup):
found = soup.find_all('tr', attrs={'class': 'ipSearch'})
return [item.attrs['id'] for item in found]
def extracto_vlans(soup):
text = 'Search results (VLANs):'
anchor = soup.find('h4', text=text)
if not anchor:
return []
table = anchor.find_next_sibling('table')
items = table.find_all('a', attrs={'data-action': 'edit'})
return [item['data-vlanid'] for item in items]
def extracto_vrfs(soup):
text = 'Search results (VRFs):'
anchor = soup.find('h4', text=text)
if not anchor:
return []
table = anchor.find_next_sibling('table')
items = table.find_all('a', attrs={'data-action': 'edit'})
return [item['data-vrfid'] for item in items]
def search(client, find, search_options, expand=False):
"""
Executes the "search" tool found on the WebUI and returns back structured results.
See the same method defined in the PhpIpamClient class.
"""
search_url = client.api.phpipam_host + f'/tools/search/{find}'
# determine the search options based on whether or not the caller provided
# them. If they did not, then use the defaults; i.e. those listed in
# DEFAULT_SEARCH_OPTIONS. Options specified will be turned 'on' in the
# search; 'off' otherwise.
opt_settings = search_options or DEFAULT_SEARCH_OPTIONS
opt_dict = {opt: ('off', 'on')[opt in opt_settings]
for opt in SEARCH_OPTIONS}
# the search options are specified as a cookie called 'search_parameters';
# found this by introspecting the WebUI network calls. The cookie is a
# dict, so we must dump to JSON for the purpose of HTTP usage.
client.webui.cookies.set_cookie(create_cookie('search_parameters', json.dumps(opt_dict)))
# The search is invoked as a HTTP GET call, and then we need to parse the HTML results.
# using the BeautifulSoup package for this purpose.
res = client.webui.get(search_url)
res.raise_for_status()
soup = BeautifulSoup(res.content, 'html.parser')
# Store each set of results in separate keys that correspond to the search
# option keys. For any option that is either not specified as part of the
# search, or if the results yield no values, the list will be an empty list
# ([]), and not `None`.
results = dict()
results['subnets'] = extracto_subnets(soup)
results['addresses'] = extracto_addresses(soup)
results['vlans'] = extracto_vlans(soup)
results['vrfs'] = extracto_vrfs(soup)
# TODO: add pstn and circuits
# If the caller did not request the ID values to be expanded into data
# dictionaries, then we are all done, and can return the results now.
if not expand:
return results
# If we are here, then we need to transform the list of IDs to list of dicts
results['subnets'] = expand_ids(client.subnets, results['subnets'])
results['addresses'] = expand_ids(client.addresses, results['addresses'])
results['vlans'] = expand_ids(client.vlans, results['vlans'])
results['vrfs'] = expand_ids(client.vrfs, results['vrfs'])
return results
```
|
{
"source": "jeremyschulman/pyzayo",
"score": 2
}
|
#### File: pyzayo/pyzayo/api.py
```python
from httpx import AsyncClient
# -----------------------------------------------------------------------------
# Module Exports
# -----------------------------------------------------------------------------
__all__ = ["ZayoAPI"]
# -----------------------------------------------------------------------------
#
# CODE BEGINS
#
# -----------------------------------------------------------------------------
class ZayoAPI(AsyncClient):
"""
Zayo API asyncio HTTPx client class. Each Zayo API functional area client,
mainteance for example, should define an attribute, `api` for example, that
uses this class to access the Zayo API system.
"""
def __init__(self, base_url, access_token, **kwargs):
""" init the client with the acess token and set content for JSON """
super().__init__(base_url=base_url, **kwargs)
self.headers["Authorization"] = access_token
self.headers["content-type"] = "application/json"
```
#### File: pyzayo/pyzayo/base_client.py
```python
from typing import Optional, List, Dict
import math
from os import getenv
import asyncio
from itertools import chain
# -----------------------------------------------------------------------------
# Public Imports
# -----------------------------------------------------------------------------
import httpx
from tenacity import retry, wait_random_exponential, retry_if_exception
# -----------------------------------------------------------------------------
# Private Imports
# -----------------------------------------------------------------------------
from pyzayo import consts
from pyzayo.api import ZayoAPI
# -----------------------------------------------------------------------------
# Module Exports
# -----------------------------------------------------------------------------
__all__ = ["ZayoClientBase"]
# -----------------------------------------------------------------------------
#
# CODE BEGINS
#
# -----------------------------------------------------------------------------
class ZayoClientBase(object):
"""
This is a base class for any Zayo Client. This class provides the common
functionality that would be used by subclassed clients such as the Zayo
maintenance client, ZayoMatenanceMixin.
"""
def __init__(self):
""" authorize to the ZAYO API and setup for the mainteance functioanl area """
self._auth_payload: Optional[dict] = None
self.authenticate()
self.api = ZayoAPI(base_url=consts.ZAYO_URL_SM, access_token=self.access_token)
@property
def access_token(self):
""" returns the current access token value """
return self._auth_payload["access_token"]
def authenticate(self):
"""
This method is used to authenticate to the Zayo API system using the
client-id and client-secret values obtained from the environment.
This method is called during instance initialization and the access
token can be obtained via the `access_token` property.
Notes
-----
According to the Zayo API documentation, a token is valid for 1hr. Plan
accordingly.
"""
client_id = getenv("ZAYO_CLIENT_ID")
client_secret = getenv("ZAYO_CLIENT_SECRET")
payload = {
"client_id": client_id,
"client_secret": client_secret,
"grant_type": "client_credentials",
"scope": "openid",
}
res = httpx.post(url=consts.ZAYO_URL_AUTH, data=payload)
res.raise_for_status()
self._auth_payload = res.json()
def get_records_count(self, url, **params) -> int:
"""
This function will return the total number of records that match the
request criteria defined by `params`. If `params` is not provided, then
the count of all records for the given URL.
Parameters
----------
url: str
The route endpoint providing recoreds, for example the
value defined in the `consts.ZAYO_SM_ROUTE_MTC_CASES`.
Other Parameters
----------------
The other `params` are key-values as defined by the Zayo API
spec; and these are specific to the URL used.
Returns
-------
The number of records matching the criterial (or all)
"""
loop = asyncio.get_event_loop()
# do not request any records to be returned, just need the record count
# from the metadata response.
payload = params.copy()
payload["paging"] = {"top": 0}
res = loop.run_until_complete(self.api.post(url, json=payload))
res.raise_for_status()
return res.json()["data"]["metadata"]["totalRecordCount"]
def paginate_records(self, url, **params) -> List[Dict]:
"""
This function will return all records for a given request criterial
determined by `params` or all records.
Parameters
----------
url: str
The API route endpoint
Other Parameters
----------------
key-values speciifc to the url being used, determines the request
criteria.
Notes
-----
The API is limited to return a maximum of 100 records per the "top" and
"step" fields of the paging criteria. In the event there are more than
100 records, only the first 100 records will be returned. The Caller
could use the `get_records_count` to determine the total value before
calling this function to create a better `params` criterial (perhaps by
date range) to avoid > 100 records.
Returns
-------
List of records, each dict schema is specific to the url.
"""
if params:
paging = params.setdefault("paging", {})
page_sz = paging.setdefault("top", consts.MAX_TOP_COUNT)
else:
page_sz = consts.MAX_TOP_COUNT
params = dict(paging=dict(top=page_sz, skip=0))
loop = asyncio.get_event_loop()
total_recs = self.get_records_count(url=url, **params)
# TODO: limit to 100? API seems to allow to page beyond skip=50 ...
# max_recs = min(total_recs, consts.MAX_PAGED_RECORDS)
max_recs = total_recs
total_pages = math.ceil(max_recs / page_sz)
tasks = list()
# print(f"paging total records {max_recs} pages {total_pages} of size {page_sz}")
@retry(
retry=retry_if_exception(httpx.ReadTimeout),
wait=wait_random_exponential(multiplier=1, max=10),
)
async def get_page(payload):
""" get a page and retry if exception """
return await self.api.post(url, json=payload)
for page in range(total_pages):
task_params = params.copy()
task_params["paging"] = {"top": page_sz, "skip": (page * page_sz)}
tasks.append(get_page(task_params))
http_res_list = loop.run_until_complete(
asyncio.gather(*tasks, return_exceptions=True)
)
return list(
chain.from_iterable(
resp.json()["data"]["records"]
for resp in http_res_list
if resp.is_error is False
)
)
```
#### File: pyzayo/cli/cli_services.py
```python
from typing import List, Dict
# -----------------------------------------------------------------------------
# Public Imports
# -----------------------------------------------------------------------------
import click
from rich.console import Console
from rich.table import Table, Text
# -----------------------------------------------------------------------------
# Private Imports
# -----------------------------------------------------------------------------
from pyzayo import ZayoClient
from .cli_root import cli
from pyzayo.consts import InventoryStatusOption
# -----------------------------------------------------------------------------
#
# TABLE CODE BEGINS
#
# -----------------------------------------------------------------------------
def colorize_status(status):
""" colorize the service.status field """
return Text(
status,
style={
InventoryStatusOption.active: "bright_green",
InventoryStatusOption.pending_change: "bright_yellow",
}.get(status),
)
def make_services_table(services: List[Dict]) -> Table:
"""
Create a Rich.Table of service inventory records.
Parameters
----------
services: List[Dict]
Service inventory records in the form of the API dict.
Returns
-------
The Table ready for console rendering.
"""
count = len(services)
title = f"Services ({count})" if count > 1 else "Service"
table = Table(
title=Text(title, style="bright_white", justify="left"),
show_header=True,
header_style="bold magenta",
show_lines=True,
)
table.add_column("Name")
table.add_column("Status")
table.add_column("Product")
table.add_column("Circuit Id")
table.add_column("Bandwidth")
table.add_column("Location A")
table.add_column("Location Z")
def make_location(_loc):
""" create address from location fields """
return f"{_loc['name']}\n{_loc['city']}, {_loc['state']} {_loc['postalCode']}"
for rec in services:
comps = rec["components"][0]
table.add_row(
rec["serviceName"],
colorize_status(rec["status"]),
f"{rec['productGroup']}\n{rec['productCategory']}",
comps["circuitId"],
comps["bandwidth"],
make_location(comps["locations"][0]),
make_location(comps["locations"][1]),
)
return table
# -----------------------------------------------------------------------------
#
# CLI CODE BEGINS
#
# -----------------------------------------------------------------------------
@cli.group("services")
def svc():
""" Inventory Service commands. """
pass
@svc.command(name="list")
def cli_svc_inventory_list():
"""
List service inventory.
"""
zapi = ZayoClient()
svc_list = zapi.get_services()
console = Console()
console.print(make_services_table(services=svc_list))
@svc.command(name="circuit")
@click.argument("circuit_id")
def cli_svc_by_circuit(circuit_id):
"""
Show service record for given circuit ID.
"""
zapi = ZayoClient()
cir_rec = zapi.get_service_by_circuit_id(circuit_id)
if not cir_rec:
print(f"Circuit not found: {circuit_id}")
return
console = Console()
console.print(make_services_table(services=[cir_rec]))
```
#### File: jeremyschulman/pyzayo/setup.py
```python
from setuptools import setup, find_packages
package_name = "pyzayo"
package_version = open("VERSION").read().strip()
def requirements(filename="requirements.txt"):
return open(filename.strip()).readlines()
with open("README.md", "r") as fh:
long_description = fh.read()
# -----------------------------------------------------------------------------
#
# Main Setup
#
# -----------------------------------------------------------------------------
setup(
name=package_name,
version=package_version,
description="Zayo API python client",
long_description=long_description,
long_description_content_type="text/markdown",
author="<NAME>",
packages=find_packages(),
include_package_data=True,
install_requires=requirements(),
entry_points={"console_scripts": ["zayocli = pyzayo.cli.__main__:main"]},
classifiers=[
"Development Status :: 3 - Alpha",
"Intended Audience :: Developers",
"Intended Audience :: Customer Service",
"Intended Audience :: Information Technology",
"Programming Language :: Python :: 3.8",
"Topic :: Software Development :: Libraries :: Python Modules",
"Topic :: System :: Networking",
"License :: OSI Approved :: Apache Software License",
],
)
```
|
{
"source": "jeremyschulman/slackapptk3",
"score": 2
}
|
#### File: slackapptk3/slackapptk3/cli_click.py
```python
from typing import Coroutine
import asyncio
from functools import wraps
from contextvars import ContextVar
# -----------------------------------------------------------------------------
# Public Imports
# -----------------------------------------------------------------------------
import click
from click import decorators
from click import Command, Option, Group
from slackapptk3.response import Response
from slackapptk3.request.command import CommandRequest
from first import first
import pyee
# -----------------------------------------------------------------------------
#
# CODE BEGINS
#
# -----------------------------------------------------------------------------
def version_option(version: str, *param_decls, **attrs):
"""
The Click version_option decorator used to support asyncio Slack
applications.
Parameters
----------
version:
The version string; required.
param_decls:
The list of parameter declarations as they are documented in
Click.version_option. These are optional; and adds "--version" just as
it would with the Click package.
attrs
The keyword arguments supported by Click.version_option; used in the
same manner as Click.
Returns
-------
The decorator to be used as one would normally use @click.version_option(...)
"""
async def send_version(ctx, message):
rqst: CommandRequest = ctx.obj["rqst"]
await Response(rqst).send(text=message)
def decorator(f):
prog_name = attrs.pop("prog_name", None)
message = attrs.pop("message", "%(prog)s, version %(version)s")
def callback(ctx, param, value): # noqa
if not value or ctx.resilient_parsing:
return
prog = prog_name
if prog is None:
prog = ctx.find_root().info_name
asyncio.create_task(
send_version(ctx, (message % {"prog": prog, "version": version}))
)
ctx.exit()
attrs.setdefault("is_flag", True)
attrs.setdefault("expose_value", False)
attrs.setdefault("is_eager", True)
attrs.setdefault("help", "Show the version and exit.")
attrs["callback"] = callback
return decorators.option(*(param_decls or ("--version",)), **attrs)(f)
return decorator
class SlackClickHelper(Command):
def __init__(self, *vargs, **kwargs):
self.event_id = kwargs.get("name")
super(SlackClickHelper, self).__init__(*vargs, **kwargs)
@staticmethod
def format_slack_usage_help(rqst: CommandRequest, ctx: click.Context, errmsg: str):
"""
This function returns a dictionary formatted with the Slack message
that will be sent to the User upon any command usage error. As a
Developer you may want to change the response content/format for this
type of help.
Parameters
----------
rqst: CommandRequest
The origin Slack command request that provides information about the command
requested. The rqst.rqst_data['command'] contains the slash-command used
and the rqst.rqst_data['text'] contains the command parameters as entered by the
User. The rqst.user_id contains the Slack UserId that originated the request.
For more inforamtion about the CommandRequest class attributes, refer to that
class definition.
ctx: click.Context
The Click context processing the User command.
errmsg: str
The speific usage error message, generally produced by the Click package depending
on the offending User input.
Returns
-------
dict
The Slack message body dictionary that will be returned to the Slack User.
"""
help_text = ctx.get_help()
msg_body = dict()
atts = msg_body["attachments"] = list()
try_cmd = f"{rqst.rqst_data['command']} {rqst.rqst_data['text']}"
user_id = rqst.user_id
atts.append(
dict(
color="#FF0000", # red
pretext=f"Hi <@{user_id}>, I could not run your command",
text=f"```{try_cmd}```",
fallback=try_cmd,
)
)
atts.append(dict(text=f"```{errmsg}```", fallback=errmsg))
atts.append(
dict(pretext="Command help", text=f"```{help_text}```", fallback=help_text)
)
return msg_body
@staticmethod
def format_slack_help(ctx: click.Context):
help_text = ctx.get_help()
return dict(text=f"*Command help:*\n```{help_text}```", fallback=help_text)
def get_help_option(self, ctx):
help_options = self.get_help_option_names(ctx)
if not help_options or not self.add_help_option:
return
def slack_show_help(_ctx: click.Context, param, value): # noqa
if value and not _ctx.resilient_parsing:
payload = self.format_slack_help(_ctx)
resp = Response(_ctx.obj["rqst"])
asyncio.create_task(resp.send(**payload))
_ctx.exit()
return Option(
help_options,
is_flag=True,
is_eager=True,
expose_value=False,
callback=slack_show_help,
help="Show this message and exit.",
)
def make_context(self, info_name, args, parent=None, **extra):
ctx = super(SlackClickHelper, self).make_context(
info_name=info_name, args=args, parent=parent, **extra
)
g_click_context.set(ctx)
return ctx
async def run(self, rqst, **_extras):
args = rqst.rqst_data["text"].split()
ctx_obj = dict(rqst=rqst, args=args)
try:
# Call the Click main method for this Command/Group instance. The
# result will either be that a handler returned a coroutine for
# async handling, or there is an Exception that needs to be
# handled.
cli_coro = self.main(
args=args, prog_name=self.name, obj=ctx_obj, standalone_mode=False
)
if isinstance(cli_coro, Coroutine):
return await cli_coro
except click.exceptions.UsageError as exc:
ctx = (
exc.ctx
or g_click_context.get()
or self.make_context(self.name, args, obj=ctx_obj)
)
payload = self.format_slack_usage_help(
rqst, ctx, errmsg=exc.format_message()
)
resp = Response(rqst)
await resp.send(**payload)
return
except click.exceptions.Exit:
return
class SlackClickCommand(SlackClickHelper, Command):
pass
class SlackClickGroup(SlackClickHelper, Group):
def __init__(self, *vargs, **kwargs):
self.ic = pyee.EventEmitter()
kwargs["invoke_without_command"] = True
super(SlackClickGroup, self).__init__(*vargs, **kwargs)
@staticmethod
def as_async_group(f):
orig_callback = f.callback
@wraps(f)
def new_callback(*vargs, **kwargs):
ctx = get_current_context()
if ctx.invoked_subcommand:
return
# presume the orig_callback was defined as an async def. Therefore
# defer the execution of the coroutine to the calling main.
return orig_callback(*vargs, **kwargs)
f.callback = new_callback
return f
def add_command(self, cmd, name=None):
# need to wrap Groups in async handler since the underlying Click code
# is assuming sync processing.
cmd.event_id = f"{self.event_id}.{name or cmd.name}"
if isinstance(cmd, SlackClickGroup):
cmd = self.as_async_group(cmd)
super(SlackClickGroup, self).add_command(cmd, name)
def command(self, *args, **kwargs):
kwargs["cls"] = SlackClickCommand
return super().command(*args, **kwargs)
def group(self, *args, **kwargs):
kwargs["cls"] = SlackClickGroup
return super().group(*args, **kwargs)
def on(self, cmd: SlackClickCommand):
def wrapper(f):
self.ic.on(cmd.event_id, f)
return wrapper
async def emit(self, rqst, event):
handler = first(self.ic.listeners(event))
if handler is None:
rqst.app.log.critical(f"No handler for command option '{event}'")
return
return await handler(rqst)
# -----------------------------------------------------------------------------
# WARNING! WARNING! WARNING! WARNING! WARNING! WARNING! WARNING! WARNING!
# -----------------------------------------------------------------------------
# Monkey-Patching Click for Asyncio Support
# -----------------------------------------------------------------------------
# the click context "context var" is used to support asyncio environments; and
# the following private function _contextvar_get_current_context is
# monkeypatched into the Click package do avoid the use of the threading.local
# stack (as implemented in Click).
g_click_context = ContextVar("click_context")
def get_current_context(silent=False):
"""Returns the current click context. This can be used as a way to
access the current context object from anywhere. This is a more implicit
alternative to the :func:`pass_context` decorator. This function is
primarily useful for helpers such as :func:`echo` which might be
interested in changing its behavior based on the current context.
To push the current context, :meth:`Context.scope` can be used.
.. versionadded:: 5.0
:param silent: if set to `True` the return value is `None` if no context
is available. The default behavior is to raise a
:exc:`RuntimeError`.
"""
try:
return g_click_context.get()
except LookupError:
if not silent:
raise RuntimeError("There is no active click context.")
click.decorators.get_current_context = get_current_context
```
#### File: slackapptk3/slackapptk3/messenger.py
```python
from collections import UserDict
from typing import Any, Optional
from slack_sdk.errors import SlackApiError
from slack_sdk.web.async_client import AsyncWebClient
__all__ = ["Messenger"]
class Messenger(UserDict):
"""
The Messenger class is used to create an object that can respond back to the User
with the context of a received Request message. This use is suitable in contexts
such as code running in a background thread.
"""
def __init__(
self,
app, # SlackApp
response_url: Optional[str] = None,
channel: Optional[str] = None,
thread_ts: Optional[str] = None,
):
"""
Creates an instance of a Messenger based on the provided SlackAPp.
Parameters
----------
app: SlackApp
The app context
response_url: Optional[str]
If provided, this becomes the default response URL in use with the
send() method.
channel: Optional[str]
If provided, this becomes the default channel value in use with the
send_channel() method.
thread_ts: Optional[str]
If provided, this becomes the default thread timestamp to use,
and messages will be threaded.
"""
super(Messenger, self).__init__()
self.app = app
self.response_url = response_url
self.channel = channel
if thread_ts:
self["thread_ts"] = thread_ts
self.client = AsyncWebClient(self.app.config.token)
async def send_response(
self, response_url: Optional[str] = None, **kwargs: Optional[Any]
):
"""
This method is used to send a message via the response_url rathern
than using the api.slack.com endpoints.
Parameters
----------
response_url: str
The message will be POST to this URL; originates from a message received
from api.slack.com
Other Parameters
----------------
Any other kwargs are passed as content into the message.
Raises
------
SlackApiError upon error sending; HTTP status code other
than 200.
Returns
-------
True if the message was sent without error (HTTP code 200).
Notes
-----
Ideally this method should be a part of the `slackclient` BaseClient class to avoid
using the internals of the client instance. TODO: open issue with that repo.
"""
req_args = dict(
# contents of messenger[UserDict]
**self,
# any other API fields
**kwargs,
)
api_url = response_url or self.response_url
res = await self.client._request( # noqa
http_verb="POST", api_url=api_url, req_args=dict(json=req_args)
)
status = res["status_code"]
if status != 200:
raise SlackApiError(
message="Failed to send response_url: {}: status={}".format(
api_url, status
),
response=res,
)
return True
async def send(self, channel=None, **kwargs):
"""
Send a message to the User.
Parameters
----------
channel: str
Direct the message to channel, rather than original channel value
from instance initialization.
Other Parameters
----------------
user: str
send a private message (via postEphemeral) to user
"""
if "user" in kwargs:
api_call = self.client.chat_postEphemeral
else:
api_call = self.client.chat_postMessage
return await api_call(
channel=channel or self.channel,
# contents of messenger[UserDict]
**self,
# any other API fields provided by Caller
**kwargs,
)
```
#### File: slackapptk3/request/outmoded.py
```python
from typing import Dict
import json
from slackapptk3.request.any import AnyRequest
__all__ = ["AnyRequest", "DialogRequest", "InteractiveMessageRequest"]
class DialogRequest(AnyRequest):
def __init__(self, app, payload: Dict):
"""
Inbound request originated from a Dialog user interaction.
Parameters
----------
app: SlackApp
payload: Dict
The request form 'payload' dict
Notes
-----
Dialog requests are considered to be depreciated in favor of using Modals
and Blocks.
"""
super().__init__(
app=app,
rqst_type=payload["type"],
rqst_data=payload,
user_id=payload["user"]["id"],
)
self.state = json.loads(payload.get("state") or "{}")
class InteractiveMessageRequest(AnyRequest):
def __init__(self, app, payload: Dict):
"""
Interative Message attachments are an outmoded approach using Message Attachments.
The new approach is to use Blocks.
Parameters
----------
app
payload: Dict
Notes
-----
https://api.slack.com/docs/outmoded-messaging
"""
super().__init__(
app=app,
rqst_type=payload["type"],
rqst_data=payload,
user_id=payload["user"]["id"],
)
self.user_name = payload["user"]["name"]
self.channel = payload["channel"]["id"]
```
|
{
"source": "jeremyschulman/slackapptk",
"score": 2
}
|
#### File: slackapptk/request/view_inputs.py
```python
__all__ = ['get_input_value']
VIEW_INPUT_TYPE_VALUE = {
'plain_text_input': lambda e: e.get('value'),
'datepicker': lambda e: e.get('selected_date'),
# single select elements:
'static_select': lambda e: e.get('selected_option', {}).get('value'),
'external_select': lambda e: e.get('selected_option', {}).get('value'),
'users_select': lambda e: e.get('selected_user'),
'conversations_select': lambda e: e.get('selected_conversation'),
'channels_select': lambda e: e.get('selected_channel'),
'radio_buttons': lambda e: e.get('selected_option', {}).get('value'),
# multi-select elements
'multi_static_select': lambda e: [i['value'] for i in e.get('selected_options', {})],
'multi_external_select': lambda e: [i['value'] for i in e.get('selected_options', {})],
'multi_users_select': lambda e: e.get('selected_users'),
'multi_conversations_select': lambda e: e.get('selected_conversations'),
'multi_channels_select': lambda e: e.get('selected_channel'),
'checkboxes': lambda e: [i['value'] for i in e.get('selected_options', {})]
}
def get_input_value(ele):
value_type = ele['type']
return VIEW_INPUT_TYPE_VALUE[value_type](ele)
```
|
{
"source": "jeremyschulman/slack-click",
"score": 2
}
|
#### File: example/clicker/command_click.py
```python
import click
from slack_bolt.request.async_request import AsyncBoltRequest as Request
from slack_click.async_click import click_async, version_option, AsyncSlackClickGroup
# -----------------------------------------------------------------------------
# Private Imports
# -----------------------------------------------------------------------------
from .app_data import app
# -----------------------------------------------------------------------------
#
# CODE BEGINS
#
# -----------------------------------------------------------------------------
# -----------------------------------------------------------------------------
# Define a Click group handler for the Slack Slash-Command
#
# Notes:
# @click_async decorator must be used for asyncio mode
# @click.pass_obj inserts the click.Context obj into the callback parameters
# By default the obj is the Slack-Bolt request instance; see the
# @app.command code further down.
# -----------------------------------------------------------------------------
@click.group(name="/click", cls=AsyncSlackClickGroup)
@version_option(version="0.1.0")
@click.pass_obj
@click_async
async def cli_click_group(request: Request):
"""
This is the Clicker /click command group
"""
say = request.context["say"]
await say("`/click` command invoked without any commands or options.")
# -----------------------------------------------------------------------------
# Register the command with Slack-Bolt
# -----------------------------------------------------------------------------
@app.command(cli_click_group.name)
async def on_clicker(request: Request, ack, say):
await ack()
await say("Got it.")
return await cli_click_group(prog_name=cli_click_group.name, obj=request)
# -----------------------------------------------------------------------------
# Define Click group commands; at this point everything is the same as writing
# any Click command decorator stack.
# -----------------------------------------------------------------------------
@cli_click_group.command("hello")
@click.pass_obj
async def click_hello_command(request: Request):
await request.context.say(f"Hi there <@{request.context.user_id}> :eyes:")
@cli_click_group.command("goodbye")
@click.option("--name", help="who dis?", required=True)
@click.pass_obj
async def click_goodby_command(request: Request, name: str):
await request.context.say(f"Good-bye {name} :wave:")
```
#### File: slack-click/slack_click/async_click.py
```python
from typing import Coroutine, Callable, Any
import asyncio
from functools import wraps
from contextvars import ContextVar
# -----------------------------------------------------------------------------
# Public Imports
# -----------------------------------------------------------------------------
import click
from click import decorators
from click import Command, Option, Group
import pyee
from slack_bolt.request.async_request import AsyncBoltRequest as Request
from first import first
# -----------------------------------------------------------------------------
# Exports
# -----------------------------------------------------------------------------
__all__ = [
"version_option",
"click_async",
"AsyncSlackClickGroup",
"AsyncSlackClickCommand",
]
# -----------------------------------------------------------------------------
#
# CODE BEGINS
#
# -----------------------------------------------------------------------------
def click_async(callback=None):
def wrapped(func):
@wraps(func)
def new_callback(*vargs, **kwargs):
ctx = g_click_context.get()
if ctx.invoked_subcommand:
return
# presume the orig_callback was defined as an async def. Therefore
# defer the execution of the coroutine to the calling main.
return func(*vargs, **kwargs)
return new_callback
return wrapped if not callback else wrapped(callback)
def version_option(version: str, *param_decls, **attrs):
"""
The Click version_option decorator used to support asyncio Slack
applications.
Parameters
----------
version:
The version string; required.
param_decls:
The list of parameter declarations as they are documented in
Click.version_option. These are optional; and adds "--version" just as
it would with the Click package.
attrs
The keyword arguments supported by Click.version_option; used in the
same manner as Click.
Returns
-------
The decorator to be used as one would normally use @click.version_option(...)
"""
async def send_version(ctx: click.Context, message: str):
slack_cmd: SlackClickHelper = ctx.command
request = slack_cmd.obj_slack_request(ctx.obj)
await request.context["say"](f"```{message}```")
def decorator(func):
prog_name = attrs.pop("prog_name", None)
message = attrs.pop("message", "%(prog)s, version %(version)s")
def callback(ctx, param, value): # noqa
if not value or ctx.resilient_parsing:
return
prog = prog_name
if prog is None:
prog = ctx.find_root().info_name
asyncio.create_task(
send_version(ctx, (message % {"prog": prog, "version": version}))
)
ctx.exit()
attrs.setdefault("is_flag", True)
attrs.setdefault("expose_value", False)
attrs.setdefault("is_eager", True)
attrs.setdefault("help", "Show the version and exit.")
attrs["callback"] = callback
return decorators.option(*(param_decls or ("--version",)), **attrs)(func)
return decorator
class SlackClickHelper(Command):
def __init__(self, *vargs, **kwargs):
self.event_id = kwargs.get("name")
self.obj_slack_request: Callable[[Any], Request] = kwargs.pop(
"slack_request", self._slack_request_is_obj
)
super(SlackClickHelper, self).__init__(*vargs, **kwargs)
@staticmethod
def _slack_request_is_obj(obj):
return obj
@staticmethod
def slack_format_usage_help(command: dict, ctx: click.Context, errmsg: str):
"""
This function returns a dictionary formatted with the Slack message
that will be sent to the User upon any command usage error. As a
Developer you may want to change the response content/format for this
type of help.
Parameters
----------
command: dict
The command data from the Slack request
ctx: click.Context
The Click context processing the User command.
errmsg: str
The speific usage error message, generally produced by the Click package depending
on the offending User input.
Returns
-------
dict
The Slack message body dictionary that will be returned to the Slack User.
"""
help_text = ctx.get_help()
msg_body = dict()
atts = msg_body["attachments"] = list()
try_cmd = f"{command['command']} {command['text']}"
user_id = command["user_id"]
atts.append(
dict(
color="#FF0000", # red
pretext=f"Hi <@{user_id}>, I could not run your command",
text=f"```{try_cmd}```",
fallback=try_cmd,
)
)
atts.append(dict(text=f"```{errmsg}```", fallback=errmsg))
atts.append(
dict(pretext="Command help", text=f"```{help_text}```", fallback=help_text)
)
return msg_body
@staticmethod
def slack_format_help(ctx: click.Context):
help_text = ctx.get_help()
return dict(text=f"*Command help:*\n```{help_text}```", fallback=help_text)
def get_help_option(self, ctx):
help_options = self.get_help_option_names(ctx)
if not help_options or not self.add_help_option:
return
def slack_show_help(_ctx: click.Context, param, value): # noqa
if value and not _ctx.resilient_parsing:
payload = self.slack_format_help(_ctx)
slack_cmd: SlackClickHelper = _ctx.command
request = slack_cmd.obj_slack_request(ctx.obj)
asyncio.create_task(request.context["say"](**payload))
_ctx.exit()
return Option(
help_options,
is_flag=True,
is_eager=True,
expose_value=False,
callback=slack_show_help,
help="Show this message and exit.",
)
def make_context(self, info_name, args, parent=None, **extra):
ctx = super(SlackClickHelper, self).make_context(
info_name=info_name, args=args, parent=parent, **extra
)
g_click_context.set(ctx)
return ctx
async def __call__(self, *vargs, **kwargs):
await self.main(*vargs, **kwargs)
async def main(
self,
args=None,
prog_name=None,
complete_var=None,
standalone_mode=False,
**extra,
):
if (obj := extra.get("obj")) is None:
raise ValueError("Missing obj to contain Slack-Bolt request, required.")
request = self.obj_slack_request(obj)
if not isinstance(request, Request):
raise ValueError(
"obj missing expected Slack-Bolt request instance, required."
)
# if args are not explicitly provided, then examine the slack command
# request 'text' field in the payload body.
if not args:
args = request.body.get("text", "").split()
try:
# Call the Click main method for this Command/Group instance. The
# result will either be that a handler returned a coroutine for
# async handling, or there is an Exception that needs to be
# handled.
cli_coro = super(SlackClickHelper, self).main(
args, prog_name, complete_var, standalone_mode, **extra
)
if isinstance(cli_coro, Coroutine):
return await cli_coro
except click.exceptions.UsageError as exc:
ctx = (
exc.ctx
or g_click_context.get()
or self.make_context(self.name, args, obj=obj)
)
payload = self.slack_format_usage_help(
request.body, ctx, errmsg=exc.format_message()
)
await request.context["say"](**payload)
return
except click.exceptions.Exit:
return
class AsyncSlackClickCommand(SlackClickHelper, Command):
pass
class AsyncSlackClickGroup(SlackClickHelper, Group):
def __init__(self, *vargs, **kwargs):
self.ic = pyee.EventEmitter()
kwargs.setdefault("invoke_without_command", True)
super(AsyncSlackClickGroup, self).__init__(*vargs, **kwargs)
@staticmethod
def as_async_group(func):
orig_callback = func.callback
@wraps(func)
def new_callback(*vargs, **kwargs):
ctx = _contextvar_get_current_context()
if ctx.invoked_subcommand:
return
# presume the orig_callback was defined as an async def. Therefore
# defer the execution of the coroutine to the calling main.
return orig_callback(*vargs, **kwargs)
func.callback = new_callback
return func
def add_command(self, cmd, name=None):
# need to wrap Groups in async handler since the underlying Click code
# is assuming sync processing.
cmd.event_id = f"{self.event_id}.{name or cmd.name}"
if isinstance(cmd, AsyncSlackClickGroup):
cmd = self.as_async_group(cmd)
super(AsyncSlackClickGroup, self).add_command(cmd, name)
def command(self, *args, **kwargs):
kwargs.setdefault("cls", AsyncSlackClickCommand)
return super().command(*args, **kwargs)
def group(self, *args, **kwargs):
kwargs.setdefault("cls", AsyncSlackClickGroup)
return super().group(*args, **kwargs)
# -------------------------------------------------------------------------
# Used for interactive workflows such as button-press so that the
# associated click groups/command is invoked.
# -------------------------------------------------------------------------
def on(self, cmd: AsyncSlackClickCommand):
def wrapper(func):
self.ic.on(cmd.event_id, func)
return wrapper
async def emit(self, request: Request, event: str):
handler = first(self.ic.listeners(event))
if handler is None:
log = request.context.logger
log.critical(f"No handler for command option '{event}'")
return
return await handler(request)
# -----------------------------------------------------------------------------
# WARNING! WARNING! WARNING! WARNING! WARNING! WARNING! WARNING! WARNING!
# -----------------------------------------------------------------------------
# Monkey-Patching Click for Asyncio Support
# -----------------------------------------------------------------------------
# the click context "context var" is used to support asyncio environments; and
# the following private function _contextvar_get_current_context is
# monkeypatched into the Click package do avoid the use of the threading.local
# stack (as implemented in Click).
g_click_context = ContextVar("click_context")
def _contextvar_get_current_context(silent=False):
"""Returns the current click context. This can be used as a way to
access the current context object from anywhere. This is a more implicit
alternative to the :func:`pass_context` decorator. This function is
primarily useful for helpers such as :func:`echo` which might be
interested in changing its behavior based on the current context.
To push the current context, :meth:`Context.scope` can be used.
.. versionadded:: 5.0
:param silent: if set to `True` the return value is `None` if no context
is available. The default behavior is to raise a
:exc:`RuntimeError`.
"""
try:
return g_click_context.get()
except LookupError:
if not silent:
raise RuntimeError("There is no active click context.")
click.decorators.get_current_context = _contextvar_get_current_context
```
|
{
"source": "jeremyschulman/slack-scim-rbac",
"score": 2
}
|
#### File: example/rbacker/app_listeners.py
```python
import re
import json
# -----------------------------------------------------------------------------
# Public Imports
# -----------------------------------------------------------------------------
from slack_sdk.web.async_client import AsyncWebClient
from slack_sdk.models.views import View
from slack_sdk.models.blocks import SectionBlock, MarkdownTextObject as MdText
from slack_bolt.async_app import (
AsyncBoltContext as Context,
)
from slack_bolt.async_app import AsyncSay as Say
from slack_bolt.context.ack.async_ack import AsyncAck as Ack
from slack_scim_rbac.middleware import AsyncSlackScimRBAC
# -----------------------------------------------------------------------------
# Private Imports
# -----------------------------------------------------------------------------
from .app_data import app
@app.event("app_mention")
async def app_mention(context: Context, say: Say):
await say(f"You mentioned me <@{context.user_id}>?")
# -----------------------------------------------------------------------
# Messages to the app in the app specific channel
# -----------------------------------------------------------------------
@app.message(re.compile("hi", re.I))
async def app_say_ohai(context: Context, say: Say):
await say(f"Ohai <@{context.user_id}>")
@app.message(re.compile("show port", re.I))
async def app_show_port(context: Context, say: Say):
await say(f"getting port status for you <@{context.user_id}> ... standby")
async def no_your_cant(context: Context, say: Say):
await say(
f"Nope, sorry <@{context.user_id}>, you don't have permission to do that."
)
@app.message(
re.compile("bounce port", re.I),
middleware=[
AsyncSlackScimRBAC(
app_name=app.name, groups={"ChatOps-bozo"},
error_response=no_your_cant
)
],
)
async def app_bounce_port(context: Context, say: Say):
await say(f"bouncing port for you <@{context.user_id}> ... standby")
# -----------------------------------------------------------------------
# Slash commands
# -----------------------------------------------------------------------
async def is_bounce_port_command(command: dict):
return "bounce port" in command["text"]
async def modal_no_you_cant(client: AsyncWebClient, body, context: Context):
msg = f"Nope! Sorry <@{<EMAIL>id}> but you cannot do that!"
view = View(title="Permission Denied!", type="modal", close="Bummer")
view.blocks = [SectionBlock(text=MdText(text=msg))]
await client.views_open(trigger_id=body["trigger_id"], view=view)
@app.command(
command="/rbacker",
matchers=[is_bounce_port_command],
middleware=[
AsyncSlackScimRBAC(
app_name=app.name,
groups={"ChatOps-bozo"},
error_response=modal_no_you_cant,
)
],
)
async def slash_rbacker_bounce_port(ack: Ack, say: Say, context: Context):
await ack()
await say(
f"Already then, <@{context.user_id}>, let's get to bouncing that port for ya!"
)
# -----------------------------------------------------------------------
# Unhandled messages
# -----------------------------------------------------------------------
@app.event("message")
async def handle_message_events(body, logger):
logger.info(json.dumps(body, indent=3))
@app.command("/rbacker")
async def handle_some_command(ack, body, logger):
await ack()
logger.info(json.dumps(body, indent=3))
```
|
{
"source": "jeremyselan/OpenColorIO",
"score": 3
}
|
#### File: pyglue/DocStrings/CDLTransform.py
```python
class CDLTransform:
"""
CDLTransform
"""
def __init__(self):
pass
def CreateFromFile(self, src, cccid):
pass
def equals(self):
pass
def getXML(self):
pass
def setXML(self, xmltext):
pass
def getSlope(self):
pass
def getOffset(self):
pass
def getPower(self):
pass
def getSOP(self):
pass
def getSat(self):
pass
def setSlope(self, slope):
"""
setSlope(pyData)
Sets the slope ('S' part of SOP) in :py:class:`PyOpenColorIO.CDLTransform`.
:param pyData:
:type pyData: object
"""
pass
def setOffset(self, offset):
"""
setOffset(pyData)
Sets the offset ('O' part of SOP) in :py:class:`PyOpenColorIO.CDLTransform`.
:param pyData: list of three floats
:type pyData: object
"""
pass
def setPower(self, power):
"""
setPower(pyData)
Sets the power ('P' part of SOP) in :py:class:`PyOpenColorIO.CDLTransform`.
:param pyData: list of three floats
:type pyData: object
"""
pass
def setSOP(self, sop):
"""
setSOP(pyData)
Sets SOP in :py:class:`PyOpenColorIO.CDLTransform`.
:param pyData: list of nine floats
:type pyData: object
"""
pass
def setSat(self, sat):
"""
setSAT(pyData)
Sets SAT (saturation) in :py:class:`PyOpenColorIO.CDLTransform`.
:param pyData: saturation
:type pyData: float
"""
pass
def getSatLumaCoefs(self):
"""
getSatLumaCoefs(pyData)
Returns the SAT (saturation) and luma coefficients in :py:class:`CDLTransform`.
:return: saturation and luma coefficients
:rtype: list of floats
"""
pass
def getID(self):
"""
getID()
Returns the ID from :py:class:`PyOpenColorIO.CDLTransform`.
:return: ID
:rtype: string
"""
pass
def setID(self, id):
"""
setID(str)
Sets the ID in :py:class:`PyOpenColorIO.CDLTransform`.
:param str: ID
:type str: string
"""
pass
def getDescription(self):
"""
getDescription()
Returns the description of :py:class:`PyOpenColorIO.CDLTransform`.
:return: description
:rtype: string
"""
pass
def setDescription(self, desc):
"""
setDescription(str)
Sets the description of :py:class:`PyOpenColorIO.CDLTransform`.
:param str: description
:type str: string
"""
pass
```
|
{
"source": "jeremyselan/OpenShadingLanguage",
"score": 3
}
|
#### File: OpenShadingLanguage/testsuite/runtest.py
```python
import os
import glob
import sys
import platform
import subprocess
from optparse import OptionParser
# Handy functions...
def osl_app (app):
# when we use Visual Studio, built applications are stored
# in the app/$(OutDir)/ directory, e.g., Release or Debug.
# In that case the special token "$<CONFIGURATION>" which is replaced by
# the actual configuration if one is specified. "$<CONFIGURATION>" works
# because on Windows it is a forbidden filename due to the "<>" chars.
if (platform.system () == 'Windows'):
return app + "/$<CONFIGURATION>/" + app + " "
return path + "/" + app + "/" + app + " "
# Construct a command that will compile the shader file, appending output to
# the file "out.txt".
def oslc (args) :
return (osl_app("oslc") + args + " >> out.txt 2>&1 ;\n")
# Construct a command that will compile the shader file, appending output to
# the file "out.txt".
def testshade (args) :
return (osl_app("testshade") + args + " >> out.txt 2>&1 ;\n")
def runtest (command, outputs, failureok=0, failthresh=0, failpercent=0) :
parser = OptionParser()
parser.add_option("-p", "--path", help="add to executable path",
action="store", type="string", dest="path", default="")
parser.add_option("--devenv-config", help="use a MS Visual Studio configuration",
action="store", type="string", dest="devenv_config", default="")
parser.add_option("--solution-path", help="MS Visual Studio solution path",
action="store", type="string", dest="solution_path", default="")
(options, args) = parser.parse_args()
# print ("working dir = " + tmpdir)
os.chdir (srcdir)
open ("out.txt", "w").close() # truncate out.txt
if options.path != "" :
sys.path = [options.path] + sys.path
print "command = " + command
if (platform.system () == 'Windows'):
# Replace the /$<CONFIGURATION>/ component added in oiio_app
oiio_app_replace_str = "/"
if options.devenv_config != "":
oiio_app_replace_str = '/' + options.devenv_config + '/'
command = command.replace ("/$<CONFIGURATION>/", oiio_app_replace_str)
test_environ = None
if (platform.system () == 'Windows') and (options.solution_path != "") and \
(os.path.isdir (options.solution_path)):
test_environ = os.environ
libOIIO_path = options.solution_path + "\\libOpenImageIO\\"
if options.devenv_config != "":
libOIIO_path = libOIIO_path + '\\' + options.devenv_config
test_environ["PATH"] = libOIIO_path + ';' + test_environ["PATH"]
for sub_command in command.split(';'):
cmdret = subprocess.call (sub_command, shell=True, env=test_environ)
if cmdret != 0 and failureok == 0 :
print "#### Error: this command failed: ", sub_command
print "FAIL"
return (1)
err = 0
for out in outputs :
ok = 0
# We will first compare out to ref/out, and if that fails, we will
# compare it to everything else in the ref directory. That allows us
# to have multiple matching variants for different platforms, etc.
for testfile in (["ref/"+out] + glob.glob (os.path.join ("ref", "*"))) :
#print ("comparing " + out + " to " + testfile)
extension = os.path.splitext(out)[1]
if extension == ".tif" or extension == ".exr" :
# images -- use idiff
cmpcommand = (os.path.join (os.environ['OPENIMAGEIOHOME'], "bin", "idiff")
+ " -fail 0"
+ " -failpercent " + str(failpercent)
+ " -hardfail " + str(failthresh)
+ " -warn " + str(2*failthresh)
+ " " + out + " " + testfile)
else :
# anything else, mainly text files
if (platform.system () == 'Windows'):
diff_cmd = "fc "
else:
diff_cmd = "diff "
cmpcommand = (diff_cmd + out + " " + testfile)
print "cmpcommand = " + cmpcommand
cmpresult = os.system (cmpcommand)
if cmpresult == 0 :
print ("PASS: " + out + " matches " + testfile)
ok = 1
break # we're done
if ok == 0:
err = 1
print "NO MATCH for " + out
print "FAIL " + out
return (err)
##########################################################################
#
# Get standard testsuite test arguments: srcdir exepath
#
srcdir = "."
tmpdir = "."
path = "../.."
if len(sys.argv) > 1 :
srcdir = sys.argv[1]
srcdir = os.path.abspath (srcdir) + "/"
os.chdir (srcdir)
if len(sys.argv) > 2 :
path = sys.argv[2]
tmpdir = "."
tmpdir = os.path.abspath (tmpdir)
refdir = "ref/"
parent = "../../../../../"
outputs = [ "out.txt" ] # default
command = ""
failureok = 0
failthresh = 0.004
failpercent = 0.02
#print ("srcdir = " + srcdir)
#print ("tmpdir = " + tmpdir)
#print ("path = " + path)
#print ("refdir = " + refdir)
#
# Read the individual run.py file for this test, which will define
# command and outputs.
#
if os.path.exists("run.py") :
execfile ("run.py")
# Force out.txt to be in the outputs
if "out.txt" not in outputs :
outputs.append ("out.txt")
# Force any local shaders to compile automatically, prepending the
# compilation onto whatever else the individual run.py file requested.
compiles = ""
for testfile in glob.glob ("*.osl") :
compiles += oslc (testfile)
command = compiles + command
# If either out.exr or out.tif is in the reference directory but somehow
# is not in the outputs list, put it there anyway!
if (os.path.exists("ref/out.exr") and ("out.exr" not in outputs)) :
outputs.append ("out.exr")
if (os.path.exists("ref/out.tif") and ("out.tif" not in outputs)) :
outputs.append ("out.tif")
# Run the test and check the outputs
ret = runtest (command, outputs, failureok=failureok,
failthresh=failthresh, failpercent=failpercent)
sys.exit (ret)
```
|
{
"source": "JeremySilvaSilva/Django-Rest-Framework-User-Template",
"score": 2
}
|
#### File: api/user/controller.py
```python
from app.user.models import User
from .values import UserValues
class UserController:
def __init__(self):
self.all_user = UserValues.all_user
def getUsers(self):
user = User.objects.values(
*self.all_user
)
return user
userController = UserController()
```
#### File: app/core/views.py
```python
from rest_framework.decorators import api_view
from rest_framework.response import Response
from rest_framework.reverse import reverse
from django.shortcuts import render
def email_send(request):
from utils.emailback import send_email
'''Bootstrapemail parsea html para ser renderizado por email'''
#https://bootstrapemail.com
send_email()
return render(request,'email/activate.html')
@api_view(['GET'])
def api_root(request, format=None):
return Response({
'User': reverse('user:user_api_root', request=request, format=format),
})
```
|
{
"source": "JeremySilverTongue/trundl",
"score": 3
}
|
#### File: src/algorithms/Navigation.py
```python
import math
import sys
from Queue import PriorityQueue
def heuristic_cost_estimate(location, goal):
return math.sqrt((location.x - goal.x) ** 2 + (location.y - goal.y) ** 2)
UP = "up"
DOWN = "down"
LEFT = "left"
RIGHT = "right"
UP_LEFT = "up_left"
UP_RIGHT = "up_right"
DOWN_LEFT = "down_left"
DOWN_RIGHT = "down_right"
GOAL = "goal"
#
directions = {
RIGHT: (0, 1),
UP_RIGHT: (-1, 1),
UP: (-1, 0),
UP_LEFT: (-1, -1),
LEFT: (0, -1),
DOWN_LEFT: (1, -1),
DOWN: (1, 0),
DOWN_RIGHT: (1, 1),
GOAL: (0, 0)
}
def process_grid(goal, occupancy, danger_grid, danger_weight=1):
# visited = [(sys.maxint, None) for _ in xrange(occupancy.shape[0]) for _ in xrange(occupancy.shape[1])]
visited = [[(sys.maxint, None) for _ in xrange(occupancy.shape[1])] for _ in xrange(occupancy.shape[0])]
visited[goal[0]][goal[1]] = (0, GOAL)
frontier = PriorityQueue()
frontier.put((0, goal))
while not frontier.empty():
cost_so_far, position = frontier.get()
for direction, delta in directions.iteritems():
dy, dx = -delta[0], -delta[1]
y, x = neighbor_position = position[0] + dy, position[1] + dx
if 0 <= y < occupancy.shape[0] and 0 <= x < occupancy.shape[1]:
danger = danger_grid[y][x]
if occupancy[y][x] == 1:
danger = 1000000
new_cost = cost_so_far + math.sqrt(dx ** 2 + dy ** 2) + danger * danger_weight
if visited[y][x][1] is None:
frontier.put((new_cost, neighbor_position))
visited[y][x] = min((new_cost, direction), visited[y][x])
return visited
def get_path(start, plan):
path = [start]
y, x = start
while plan[y][x][1] != GOAL:
dy, dx = directions[plan[y][x][1]]
y += dy
x += dx
path.append((y, x))
return path
```
#### File: src/algorithms/ParticleFilter.py
```python
import random
class ParticleFilter:
def __init__(self, prior_distribution, particle_count=1000):
self.particle_count = particle_count
self.particles = [prior_distribution() for _ in xrange(particle_count)]
def move(self, movement, movement_model):
self.particles = [movement_model(particle, movement) for particle in self.particles]
def measure(self, measurement, measurement_likelihood):
""" Algorithm from table 4.3 of Probabilistic Robotics by <NAME> """
weights = [measurement_likelihood(measurement, particle) for particle in self.particles]
if sum(weights) > 0:
weights = [weight / sum(weights) for weight in weights]
self.low_variance_sampler(self.particles, weights)
else:
print "We're totally off the scent"
def low_variance_sampler(self, new_particles, weights):
""" Algorithm from table 4.4 of Probabilistic Robotics by my boy Sebastian"""
self.particles = []
r = random.random() / self.particle_count
c = weights[0]
i = 0
for m in xrange(self.particle_count):
u = r + 1.0 * m / self.particle_count
while u > c:
i += 1
c += weights[i]
self.particles.append(new_particles[i])
```
#### File: src/gui/DrawingUtils.py
```python
import math
BOT_TAG = "bots"
MEASUREMENT_TAG = "measurement"
GOAL_TAG = "goal"
def clear_all(canvas):
clear_bots(canvas)
clear_measurement(canvas)
clear_goal(canvas)
def clear_bots(canvas):
canvas.delete(BOT_TAG)
def draw_bots(canvas, real_bot, guesses=None, scale=3, bot_radius=5, guess_radius=1):
if guesses is None:
guesses = []
clear_bots(canvas)
for guess in guesses:
canvas.create_oval(
scale * guess[1] - guess_radius,
scale * guess[0] - guess_radius,
scale * guess[1] + guess_radius,
scale * guess[0] + guess_radius,
tags=BOT_TAG,
fill="yellow")
canvas.create_oval(scale * real_bot[1] - bot_radius,
scale * real_bot[0] - bot_radius,
scale * real_bot[1] + bot_radius,
scale * real_bot[0] + bot_radius,
tags=BOT_TAG,
fill="green")
def clear_goal(canvas):
canvas.delete(GOAL_TAG)
def draw_goal(canvas, location, scale=3, radius=5):
clear_bots(canvas)
canvas.create_oval(scale * location[1] - radius,
scale * location[0] - radius,
scale * location[1] + radius,
scale * location[0] + radius,
tags=GOAL_TAG,
fill="red")
def clear_measurement(canvas):
canvas.delete(MEASUREMENT_TAG)
def draw_measurement(canvas, bot, measurement, scale=3):
clear_measurement(canvas)
canvas.create_line(scale * bot[1], scale * bot[0],
scale * bot[1] + scale * measurement[0],
scale * bot[0],
tags=MEASUREMENT_TAG,
fill="purple"
)
canvas.create_line(scale * bot[1], scale * bot[0],
scale * bot[1] + scale * measurement[1] / math.sqrt(2),
scale * bot[0] - scale * measurement[1] / math.sqrt(2) + scale,
tags=MEASUREMENT_TAG,
fill="purple"
)
canvas.create_line(scale * bot[1], scale * bot[0],
scale * bot[1],
scale * bot[0] - scale * measurement[2] + scale,
tags=MEASUREMENT_TAG,
fill="purple"
)
canvas.create_line(scale * bot[1], scale * bot[0],
scale * bot[1] - scale * measurement[scale] / math.sqrt(2) + scale,
scale * bot[0] - scale * measurement[scale] / math.sqrt(2) + scale,
tags=MEASUREMENT_TAG,
fill="purple"
)
canvas.create_line(scale * bot[1], scale * bot[0],
scale * bot[1] - scale * measurement[4] + scale,
scale * bot[0],
tags=MEASUREMENT_TAG,
fill="purple"
)
canvas.create_line(scale * bot[1], scale * bot[0],
scale * bot[1] - scale * measurement[5] / math.sqrt(2) + scale,
scale * bot[0] + scale * measurement[5] / math.sqrt(2),
tags=MEASUREMENT_TAG,
fill="purple"
)
canvas.create_line(scale * bot[1], scale * bot[0],
scale * bot[1],
scale * bot[0] + scale * measurement[6],
tags=MEASUREMENT_TAG,
fill="purple"
)
canvas.create_line(scale * bot[1], scale * bot[0],
scale * bot[1] + scale * measurement[7] / math.sqrt(2),
scale * bot[0] + scale * measurement[7] / math.sqrt(2),
tags=MEASUREMENT_TAG,
fill="purple"
)
```
#### File: src/maps/OccupancyGrid.py
```python
import math
import numpy as np
from PIL import Image
from scipy.ndimage.filters import gaussian_filter
class OccupancyGrid:
def __init__(self, file_name, danger_variance=5, preprocess=True):
img = Image.open(file_name)
self.occupancy = 1 - np.array(img.convert("L")) / 255
self.danger = gaussian_filter(1. * self.occupancy, sigma=math.sqrt(danger_variance), mode="constant", cval=1)
self.danger = np.maximum(self.danger, self.occupancy * 100000)
self.measure_memo = {}
if preprocess:
self.preprocess()
def preprocess(self, event=None):
for y in xrange(self.occupancy.shape[0]):
for x in xrange(self.occupancy.shape[1]):
self.eight_way_measurement((y, x))
def eight_way_measurement(self, position, max_range=500, img=None):
"""
Gives the result of eight perfect depth sensors, arranged at the compass points, starting with the positive
x axis and moving counter clockwise. Note that the y-axis is the first coordinate, and points down X_x
"""
position = int(position[0]), int(position[1])
if position not in self.measure_memo:
self.measure_memo[position] = (
self.simple_range_measurement(position, 0, 1, img),
self.simple_range_measurement(position, -1, 1, img),
self.simple_range_measurement(position, -1, 0, img),
self.simple_range_measurement(position, -1, -1, img),
self.simple_range_measurement(position, 0, -1, img),
self.simple_range_measurement(position, 1, -1, img),
self.simple_range_measurement(position, 1, 0, img),
self.simple_range_measurement(position, 1, 1, img)
)
return [min(max_range, measurement) for measurement in self.measure_memo[position]]
def simple_range_measurement(self, position, dy, dx, img=None):
measurement = 0
y, x = position[0], position[1]
step = np.math.sqrt(dy ** 2 + dx ** 2)
while 0 < y < self.occupancy.shape[0] and 0 < x < self.occupancy.shape[1]:
if self.occupancy[y][x] == 1:
break
if img is not None:
img[y, x] = (0, 255, 0)
y += dy
x += dx
measurement += step
return measurement
# def get_range_measurement(self, position, azimuth, max_range=500):
# pass
#
# def pixels_to_check(self, azimuth, max_range):
# first_oct = azimuth % np.math.pi / 4
# xf = max_range * np.math.cos(first_oct)
# yf = max_range * np.math.sin(first_oct)
# error = -1
# delta_err = yf / xf
# y = 0
# first_oct_pix = []
# for x in xrange(int(np.math.ceil(xf))):
# first_oct_pix.append((x, y))
# error += delta_err
# if error >= 0:
# y += 1
# error -= 1
#
# return first_oct_pix
def test():
grid = OccupancyGrid("map.png", 5, 3)
# img = Image.open("map.png").convert("RGB")
#
# pix = img.load()
#
# print pix[50, 23]
# pix[50, 23] = (255, 0, 0)
#
# print grid.eight_way_measurement((50, 23), img=pix)
# img.show()
# print grid.occupancy
# img = Image.fromarray(grid.occupancy)
# img.show()
if __name__ == "__main__":
test()
```
#### File: src/maps/Point.py
```python
import math
import unittest
class Point:
def __init__(self, x, y):
self.x = x
self.y = y
def cross_mag(self, other):
return self.x * other.y - self.y * other.x
def __sub__(self, other):
return Point(self.x - other.x, self.y - other.y)
def __add__(self, other):
return Point(self.x + other.x, self.y + other.y)
def __mul__(self, other):
return Point(other * self.x, other * self.y)
def length(self):
return math.sqrt(self.x ** 2 + self.y ** 2)
def distance(self, other):
return (self - other).length()
def with_azimuth_and_range(self, azimuth, r=0):
return Point(self.x + r * math.cos(azimuth), self.y + r * math.sin(azimuth))
def __str__(self):
return "Point({:.2f}, {:.2f})".format(self.x, self.y)
def __repr__(self):
return "Point({}, {})".format(self.x, self.y)
class PointTest(unittest.TestCase):
pass
# def test_parallel_cross_magEasyHit(self):
# wall = Polygon([Point(0, 0), Point(0, 2)])
# expected = 1
# result = wall.range_measurement(Point(1, 1), math.pi, 0, 100)
# self.assertEqual(result, expected)
if __name__ == "__main__":
unittest.main()
```
#### File: trundl/src/OneDimensionalContinuousWorld.py
```python
import math
import random
import matplotlib.pyplot as plt
from algorithms.ParticleFilter import ParticleFilter
WORLD_SIZE = 10
MARKERS = [1, 2, 5, 6, 8]
MAX_RANGE = 5
HIT_VARIANCE = .75
MOVEMENT_VARIANCE = 5
def sample_measurement_distribution(actual_range):
measurement = min(random.normalvariate(actual_range, HIT_VARIANCE), MAX_RANGE)
return max(0, measurement)
def true_ranges(position):
left_markers = filter(lambda x: x < position, MARKERS)
right_markers = filter(lambda x: x > position, MARKERS)
true_left_range = MAX_RANGE if len(left_markers) == 0 else position - max(left_markers)
true_right_range = MAX_RANGE if len(right_markers) == 0 else min(right_markers) - position
return true_left_range, true_right_range
def make_measurement(position):
true_left_range, true_right_range = true_ranges(position)
return sample_measurement_distribution(true_left_range), sample_measurement_distribution(true_right_range)
def range_likelihood(true_range, measurement):
return 1 / math.sqrt(2 * math.pi * HIT_VARIANCE) * math.exp(-1 / 2 * (true_range - measurement) ** 2 / HIT_VARIANCE)
def measurement_likelihood(measurement, position):
left_range, right_range = true_ranges(position)
left_measurement, right_measurement = measurement
return range_likelihood(left_range, left_measurement) * range_likelihood(right_range, right_measurement)
def prior_distribution():
return random.random() * WORLD_SIZE
def movement_model(position, intended_movement):
return position + intended_movement * max(random.normalvariate(1, .1), 0)
def show_particles(particles):
plt.hist(particles, bins=100, range=(0, WORLD_SIZE))
plt.show()
def test():
# random.seed(42)
bot = 5.5
print true_ranges(bot)
print make_measurement(bot)
particle_filter = ParticleFilter(prior_distribution)
show_particles(particle_filter.particles)
print MARKERS
for move in [0, 1, 1, -3, -1, 5]:
bot = movement_model(bot, move)
print "Bot moving", move
particle_filter.move(move, movement_model)
show_particles(particle_filter.particles)
print "Measuring with bot at", bot
particle_filter.measure(make_measurement(bot), measurement_likelihood)
show_particles(particle_filter.particles)
# n, bins, patches = plt.hist(particle_filter.particles, 50, normed=1, facecolor='green', alpha=0.75)
# plt.hist([sample_measurement_distribution(2) for _ in xrange(1000)])
# plt.hist([movement_model(2, 2) for _ in xrange(1000)])
# plt.show()
if __name__ == "__main__":
test()
```
|
{
"source": "JeremySimpson/redditstream",
"score": 3
}
|
#### File: JeremySimpson/redditstream/example.py
```python
import logging
from stream import RedditStream
URL_REDDIT_ALL_SUBMISSIONS = 'https://oauth.reddit.com/r/all/new'
def main():
USERNAME = 'usename_here'
PASSWORD = '<PASSWORD>'
CLIENT_ID = 'client_id_here'
CLIENT_SECRET = 'client_secret_here'
USER_AGENT = 'your_user_agent'
logging.basicConfig()
rs = RedditStream(USERNAME, PASSWORD, CLIENT_ID, CLIENT_SECRET, USER_AGENT)
for e in rs.stream_listing(URL_REDDIT_ALL_SUBMISSIONS):
print e
if __name__ == '__main__':
main()
```
|
{
"source": "jeremysintes/cookiecutter-flask",
"score": 2
}
|
#### File: {{cookiecutter.project_slug}}/{{cookiecutter.project_slug}}/views.py
```python
from {{cookiecutter.project_slug}} import app
from flask import render_template
#--------------------------------------------------------------------------------------------------------------------
# ACCUEIL:
# Tous les templates liés à l'accueil de l'utilisateur
#
#--------------------------------------------------------------------------------------------------------------------
# Page d'accueil
@app.route('/')
@app.route('/index')
def index():
return render_template("index.html",
title='Home')
@app.errorhandler(404)
def page_not_found(e):
return render_template('404.html', title='page not found'), 404
```
|
{
"source": "JeremySkalla/SmashGGPythonWrapper",
"score": 3
}
|
#### File: SmashGGPythonWrapper/pysmashgg/api.py
```python
import time
import requests
from pysmashgg.exceptions import *
# Runs queries
def run_query(query, variables, header, auto_retry):
# This helper function is necessary for TooManyRequestsErrors
def _run_query(query, variables, header, auto_retry, seconds):
json_request = {'query': query, 'variables': variables}
try:
request = requests.post(url='https://api.smash.gg/gql/alpha', json=json_request, headers=header)
if request.status_code == 400:
raise RequestError
elif request.status_code == 429:
raise TooManyRequestsError
elif 400 <= request.status_code < 500:
raise ResponseError
elif 500 <= request.status_code < 600:
raise ServerError
elif 300 <= request.status_code < 400:
raise NoIdeaError
response = request.json()
return response
except RequestError:
print("Error 400: Bad request (probably means your key is wrong)")
return
except TooManyRequestsError:
if auto_retry:
print("Error 429: Sending too many requests right now, trying again in {} seconds".format(seconds))
time.sleep(seconds)
return _run_query(query, variables, header, auto_retry, seconds*2)
else:
print("Error 429: Sending too many requests right now")
return
except ResponseError:
print("Error {}: Unknown request error".format(request.status_code))
return
except ServerError:
print("Error {}: Unknown server error".format(request.status_code))
return
except NoIdeaError:
print("Error {}: I literally have no idea how you got this status code, please send this to me".format(request.status_code))
return
return _run_query(query, variables, header, auto_retry, 10)
```
|
{
"source": "jeremysprofile/smtpdfix",
"score": 3
}
|
#### File: smtpdfix/smtpdfix/authenticator.py
```python
class Authenticator():
def validate(self, username, password):
raise NotImplementedError() # pragma: no cover
def verify(self, username):
raise NotImplementedError() # pragma: no cover
def get_password(self, username):
raise NotImplementedError() # pragma: no cover
```
#### File: smtpdfix/smtpdfix/lazy.py
```python
from lazy_object_proxy import Proxy
def lazy_class(cls):
class Lazy():
def __new__(cls, *args, **kwargs):
orig_cls = type(cls.__name__, (cls,), {})
orig_cls.__new__ = lambda cl_, *args, **kwargs: object.__new__(cl_)
return Proxy(lambda: orig_cls(*args, **kwargs))
lazy_class = type(cls.__name__, (cls, Lazy), {})
return lazy_class
```
#### File: smtpdfix/tests/test_fixture.py
```python
import os
from collections import namedtuple
from smtplib import (SMTP, SMTP_SSL, SMTPAuthenticationError,
SMTPResponseException)
from unittest import mock
import pytest
from smtpdfix.handlers import _base64_encode as encode
@pytest.fixture
def user():
user = namedtuple("User", "username, password")
user.username = os.getenv("SMTPD_USERNAME", "user")
user.password = os.getenv("SMTPD_PASSWORD", "password")
return user
def test_init(smtpd):
host = os.getenv("SMTPD_HOST", "127.0.0.1")
port = int(os.getenv("SMTPD_PORT", "8025"))
assert smtpd.hostname == host
assert smtpd.port == port
assert len(smtpd.messages) == 0
def test_init_ssl(mock_smtpd_use_ssl, smtpd, msg):
with SMTP_SSL(smtpd.hostname, smtpd.port) as client:
client.send_message(msg)
assert len(smtpd.messages) == 1
def test_HELO(smtpd):
with SMTP(smtpd.hostname, smtpd.port) as client:
client.helo()
helo = client.helo_resp
assert helo.startswith(b"AUTH")
def test_AUTH_unknown_mechanism(mock_smtpd_use_starttls, smtpd):
with SMTP(smtpd.hostname, smtpd.port) as client:
client.starttls()
client.ehlo()
code, response = client.docmd("AUTH", args="FAKEMECH")
assert code == 504
def test_AUTH_LOGIN_abort(mock_smtpd_use_starttls, smtpd, user):
with SMTP(smtpd.hostname, smtpd.port) as client:
client.starttls()
client.ehlo()
code, resp = client.docmd("AUTH", f"LOGIN {encode(user.username)}")
assert code == 334
code, resp = client.docmd("*")
assert code == 501
def test_AUTH_LOGIN_success(mock_smtpd_use_starttls, smtpd, user):
with SMTP(smtpd.hostname, smtpd.port) as client:
client.starttls()
client.ehlo()
code, resp = client.docmd("AUTH", f"LOGIN {encode(user.username)}")
assert code == 334
assert resp == bytes(encode("Password"), "ascii")
code, resp = client.docmd(f"{encode(user.password)}")
assert code == 235
def test_AUTH_PLAIN(mock_smtpd_use_starttls, smtpd, user):
enc = encode(f"{user.username} {user.password}")
cmd_text = f"PLAIN {enc}"
with SMTP(smtpd.hostname, smtpd.port) as client:
client.starttls()
client.ehlo()
(code, resp) = client.docmd("AUTH", args=cmd_text)
assert code == 235
def test_AUTH_PLAIN_no_encryption(smtpd, user):
enc = encode(f"{user.username} {user.password}")
cmd_text = f"PLAIN {enc}"
with SMTP(smtpd.hostname, smtpd.port) as client:
client.ehlo()
(code, resp) = client.docmd("AUTH", args=cmd_text)
assert code == 538
def test_VRFY(smtpd, user):
with SMTP(smtpd.hostname, smtpd.port) as client:
code, resp = client.verify(user.username)
assert code == 252
def test_VRFY_failure(smtpd):
with SMTP(smtpd.hostname, smtpd.port) as client:
client.help()
code, resp = client.verify("<EMAIL>")
assert code == 502
def test_alt_port(mock_smtpd_port, smtpd):
assert smtpd.port == 5025
def test_login(mock_smtpd_use_starttls, smtpd, user):
with SMTP(smtpd.hostname, smtpd.port) as client:
client.starttls()
assert client.login(user.username, user.password)
def test_login_fail(mock_smtpd_use_starttls, smtpd, user):
with pytest.raises(SMTPAuthenticationError) as ex:
with SMTP(smtpd.hostname, smtpd.port) as client:
client.starttls()
assert client.login(user.username, user.password[:0:-1])
assert ex.type is SMTPAuthenticationError
def test_login_no_tls(smtpd, user):
with SMTP(smtpd.hostname, smtpd.port) as client:
assert client.login(user.username, user.password)
def test_login_already_done(smtpd, user):
with SMTP(smtpd.hostname, smtpd.port) as client:
client.login(user.username, user.password)
# we need to explicitly get the response from the the second AUTH
# command because smtplib doesn't treat it as an error.
code, resp = client.docmd("AUTH")
assert code == 503
assert resp == b"Already authenticated"
def test_no_messages(smtpd):
assert len(smtpd.messages) == 0
def test_send_message(smtpd, msg):
with SMTP(smtpd.hostname, smtpd.port) as client:
client.send_message(msg)
assert len(smtpd.messages) == 1
def test_send_message_logged_in(mock_smtpd_use_starttls, smtpd, user, msg):
with SMTP(smtpd.hostname, smtpd.port) as client:
client.starttls()
client.login(user.username, user.password)
client.send_message(msg)
assert len(smtpd.messages) == 1
def test_send_messaged_auth_not_compete(mock_enforce_auth, smtpd, msg):
with pytest.raises(SMTPResponseException) as er:
with SMTP(smtpd.hostname, smtpd.port) as client:
client.send_message(msg)
assert er.match(r"^\(530")
def test_sendmail(smtpd):
from_addr = "<EMAIL>"
to_addr = "<EMAIL>"
msg = f"From: {from_addr}\r\nTo: {to_addr}\r\nSubject: Foo\r\n\r\nFoo bar"
with SMTP(smtpd.hostname, smtpd.port) as client:
client.sendmail(from_addr, to_addr, msg)
assert len(smtpd.messages) == 1
@mock.patch.dict(os.environ, {"SMTPD_ENFORCE_AUTH": "True"})
def test_mock_patch(smtpd):
with SMTP(smtpd.hostname, smtpd.port) as client:
client.helo()
code, repl = client.docmd("DATA", "")
assert code == 530
assert repl.startswith(b"SMTP authentication is required")
def test_monkeypatch(monkeypatch, smtpd):
monkeypatch.setenv("SMTPD_ENFORCE_AUTH", "True")
with SMTP(smtpd.hostname, smtpd.port) as client:
client.helo()
code, repl = client.docmd("DATA", "")
assert code == 530
assert repl.startswith(b"SMTP authentication is required")
```
|
{
"source": "Jeremy-Stafford/datacli",
"score": 3
}
|
#### File: datacli/datacli/__init__.py
```python
import os
from typing import List, Optional, Tuple
"""A library for building simple CLIs from dataclasses.
DataCLI is based on argparse.
"""
from argparse import ArgumentParser
from contextlib import suppress
from dataclasses import Field, MISSING, fields
__version__ = "0.1.1"
env_var_default_factory_marker = "_datacli_get_env_var"
def get_names(field):
"""Return the CLI for a field."""
with suppress(KeyError):
yield field.metadata["short_name"]
try:
yield field.metadata["long_name"]
except KeyError:
yield "--" + field.name.replace("_", "-")
def from_env_var(env_var_name: str):
"""
Add a default factory to extract the cli argument from env.
Returns an anonymous function and passes metadata that can be used for further processing.
This solves the following problem:
A default_factory is a zero parameter function that will be called as a default. As such it has no
awareness about the field name that it supposed to fill. The overall intention is to enable error
logging that reports both the cli parameter and the corresponding env variable that can be used to
fill a required value. Therefore, by raising an error in the default_factory, the application would
not be able to report the corresponding cli argument without tightly coupling the name of the cli
argument and that of the env variable. Hence, by attaching the meta info about the fact that this
default factory reads env variables the name of the variable, the subsequent make_parser method can
provide more helpful error logging.
"""
factory_function = lambda: os.getenv(env_var_name, "")
factory_function.__qualname__ = env_var_default_factory_marker
factory_function.__name__ = env_var_name
return factory_function
def has_env_default_factory(field: Field) -> bool:
"""
Check if a field can be filled with an environment variable. This utilizes the explicit naming of the
initially anonymous default factory returned by `from_env_var`
"""
default_factory = field.default_factory
return not (default_factory is MISSING) \
and default_factory.__qualname__ == env_var_default_factory_marker
def get_corresponding_env_var(field: Field) -> Optional[str]:
"""
Utilize the dyanic name assigned to the initially anonymous default factory returned by `from_env_var`.
"""
result = None
if has_env_default_factory(field):
result = field.default_factory.__name__
return result
def make_parser(cls):
"""Create an argument parser from a dataclass."""
parser = ArgumentParser()
for field in fields(cls):
default_factory = field.default_factory
required = (field.default is MISSING
and default_factory is MISSING)
help_text = field.metadata.get("help", "")
env_var_default: Optional[str] = None
corresponding_env_var = get_corresponding_env_var(field)
if corresponding_env_var:
help_text = help_text + \
f", can also be set with environment variable {corresponding_env_var}"
env_var_default = os.getenv(corresponding_env_var)
arg_type = field.metadata.get("arg_type", field.type)
parser.add_argument(*get_names(field),
type=arg_type, # type: ignore
help=help_text,
required=required,
default=env_var_default)
return parser
def check_fields_with_env_defaults(instance):
for field in fields(type(instance)):
field_content = getattr(instance, field.name)
corresponding_env_var = get_corresponding_env_var(field)
if corresponding_env_var and not bool(field_content):
raise ValueError(
f"{field.name} not set, either supply either of the arguments {list(get_names(field))} "
+ f"or set environment variable {corresponding_env_var}"
)
def datacli(cls, argv=None):
"""Parse command line arguments into a 'cls' object."""
parser = make_parser(cls)
data = {key: val for key, val in vars(parser.parse_args(argv)).items()
if val is not None}
instance = cls(**data)
check_fields_with_env_defaults(instance)
return instance
```
|
{
"source": "jeremystevens/Lay-Z-Lang",
"score": 3
}
|
#### File: Lay-Z-Lang/src/compiler.py
```python
import click
#from lz_parser import LZ_Parser
#from ir_generator import Generator
from ir_generator import Generator
from lz_parser import LZ_Parser
@click.command()
@click.option('--filename',
default="code.lzy",
help="The file from which the pseudocode will be read"
)
@click.option('--output',
default="output.ll",
help="The file which will contain the compiled code"
)
def main(filename, output):
input_file = open(filename)
lines = [line.lstrip() for i, line in enumerate(input_file) if line.strip()]
lines[-1] = lines[-1].rstrip()
input_file.close()
text = ''.join(lines)
ast = LZ_Parser().parse(text)
ir = Generator().generate(ast, output)
output_file = open(output, "w+")
output_file.write(str(ir))
output_file.close()
if __name__ == "__main__":
main()
```
|
{
"source": "jeremystretch/netbox-topology-views",
"score": 2
}
|
#### File: netbox-topology-views/netbox_topology_views/filters.py
```python
import django_filters
from extras.filters import TagFilter
from dcim.models import Device, DeviceRole, Region, Site, Location
from utilities.filters import TreeNodeMultipleChoiceFilter
from django.db.models import Q
class DeviceFilterSet(django_filters.FilterSet):
q = django_filters.CharFilter(
method='search',
label='Search',
)
tag = TagFilter()
device_role_id = django_filters.ModelMultipleChoiceFilter(
field_name='device_role_id',
queryset=DeviceRole.objects.all(),
label='Role (ID)',
)
region_id = TreeNodeMultipleChoiceFilter(
queryset=Region.objects.all(),
field_name='site__region',
lookup_expr='in',
label='Region (ID)',
)
site_id = django_filters.ModelMultipleChoiceFilter(
queryset=Site.objects.all(),
label='Site (ID)',
)
location_id = TreeNodeMultipleChoiceFilter(
queryset=Location.objects.all(),
field_name='location',
lookup_expr='in',
label='Location (ID)',
)
class Meta:
model = Device
fields = ['id', 'name', ]
def search(self, queryset, name, value):
"""Perform the filtered search."""
if not value.strip():
return queryset
qs_filter = (
Q(name__icontains=value)
)
return queryset.filter(qs_filter)
```
|
{
"source": "jeremystryer/WikiEduDashboard",
"score": 2
}
|
#### File: jeremystryer/WikiEduDashboard/setup.py
```python
import platform
import subprocess
print ("WARNING! This is a work in progress script. It has been tested to work for Fedora and debian-based systems. \
There are individual operating system dependent scripts being called from this one. \
You can find them in setup directory. The script for windows is still not complete. \
You can use them as a starting point or as a reference. If you run into any errors while running this script, \
please comment with your issue on https://github.com/WikiEducationFoundation/WikiEduDashboard/issues/1709 \
Please upload your logs for installation with your issue reports. \
The logs can be found in the setup directory. If you can help improve this script, \
We would love your contributions.")
print ("Please install ruby-2.7.1 before running this script.")
def deb_setup():
print ("Your system is found to be debian-based.")
subprocess.run("sudo chmod 775 setup/deb-setup.sh && setup/deb-setup.sh",
shell=True, check=True)
def dnf_setup():
print("Your system is found to be Fedora")
subprocess.run("sudo chmod 775 setup/dnf-setup.sh && setup/dnf-setup.sh",
shell=True, check=True)
def win_setup():
print ("Your system is found to be Windows")
subprocess.run("runas /user:Administrator \"setup\win-setup.bat\"",
shell=True, check=True)
def osx_setup():
print ("Your system is found to be OSX")
subprocess.run("sudo chmod 775 setup/macOS-setup.sh && setup/macOS-setup.sh",
shell=True, check=True)
if platform.platform().lower().find('ubuntu') != -1 \
or platform.platform().lower().find('debian') != -1 \
or platform.platform().lower().find('elementary') != -1:
deb_setup()
elif platform.platform().lower().find('fedora') != -1:
dnf_setup()
elif platform.platform().lower().find('darwin') != -1 \
or platform.platform().lower().find('mac') != -1:
osx_setup()
elif platform.platform().lower().find('windows') != -1:
win_setup()
else:
print ("Sorry! Your operating is not supported by this script. Please refer \
https://github.com/WikiEducationFoundation/WikiEduDashboard/blob/master/ \
docs/setup.md for manual setup instructions.")
```
|
{
"source": "JeremySun1224/Algorithms-and-Data_Structures",
"score": 3
}
|
#### File: JeremySun1224/Algorithms-and-Data_Structures/01.binary_search.py
```python
from cal_time import *
@call_time
def linear_search(li, val):
for idx, i in enumerate(li):
if i == val: # 不能用i,应该使用索引去找到对应的值
return idx
# else:
# return None # 这样写的话,测试一个值后代码就结束了
else:
return None
@call_time
def binary_search(li, val):
left = 0
right = len(li) - 1 # 因为python是零索引,所以需要减1
while left <= right:
mid = (left + right) // 2
if li[mid] == val:
return mid
elif li[mid] < val:
left = mid + 1
else:
right = mid - 1
else:
return None
if __name__ == '__main__':
li = list(range(1000000))
val = 389000
print(linear_search(li=li, val=val))
print(binary_search(li=li, val=val))
"""
linear_search running time is 0.017003774642944336.
389000
binary_search running time is 0.0.
389000
"""
```
#### File: JeremySun1224/Algorithms-and-Data_Structures/11.bucket_sort.py
```python
import random
def bucket_sort(li, n=100, max_num=10000):
buckets = [[] for _ in range(n)] # 创建桶
for val in li:
i = min(val // (max_num // n), n - 1) # i表示val放到几号桶里
buckets[i].append(val) # 把var加到桶里面
for j in range(len(buckets[i]) - 1, 0, -1): # 用类似插入排序的方法保持桶内顺序
if buckets[i][j] < buckets[i][j - 1]:
buckets[i][j], buckets[i][j - 1] = buckets[i][j - 1], buckets[i][j]
else:
break
sorted_li = []
for buc in buckets:
sorted_li.extend(buc)
return sorted_li
if __name__ == '__main__':
li = [random.randint(0, 10000) for _ in range(100000)]
# print(f'排序前: {li}')
print(f'排序后: {bucket_sort(li=li)}')
```
#### File: JeremySun1224/Algorithms-and-Data_Structures/17.file_system.py
```python
class Node(object):
def __init__(self, name, type='dir'):
self.name = name
self.type = type
self.children = []
self.parent = None
def __repr__(self):
return self.name
class FileSystemTree(object):
def __init__(self):
self.root = Node(name='/')
self.now = self.root
def mkdir(self, name):
if name[-1] != '/':
name += '/'
node = Node(name=name) # 创建文件夹
self.now.children.append(node)
node.parent = self.now
def ls(self):
return self.now.children
def cd(self, name):
if name[-1] != '/':
name += '/'
if name == '../':
self.now = self.now.parent
return None
for child in self.now.children:
if child.name == name:
self.now = child # 切换目录
return None
raise ValueError('Invalid Dir.')
if __name__ == '__main__':
tree = FileSystemTree()
tree.mkdir('var/')
tree.mkdir('bin/')
tree.mkdir('usr/')
tree.cd('bin/')
tree.mkdir('python/')
tree.cd('../')
print(tree.root.children)
print(tree.ls())
```
#### File: JeremySun1224/Algorithms-and-Data_Structures/18.binary_tree.py
```python
from collections import deque
class BiTreeNode(object):
def __init__(self, data):
self.data = data
self.lchild = None # 左孩子
self.rchild = None # 右孩子
def pre_order(root):
"""前序遍历"""
if root:
print(root.data, end=',') # 先访问根节点
pre_order(root.lchild) # 再递归左子树
pre_order(root.rchild) # 最后递归右子树
def in_order(root):
"""中序遍历"""
if root:
in_order(root.lchild) # 先递归左子树
print(root.data, end=',') # 再访问根节点
in_order(root.rchild) # 最后递归右子树
def post_order(root):
"""后序遍历"""
if root:
post_order(root.lchild) # 先递归左子树
post_order(root.rchild) # 再递归右子树
print(root.data, end=',') # 最后访问根节点
def level_order(root):
"""层次遍历"""
queue = deque()
queue.append(root) # 先让root进队
while len(queue) > 0: # 只要队不空
node = queue.popleft() # 出队
print(node.data, end=',')
# 检查是否有左右孩子,有则进队
if node.lchild:
queue.append(node.lchild)
if node.rchild:
queue.append(node.rchild)
if __name__ == '__main__':
a = BiTreeNode('A')
b = BiTreeNode('B')
c = BiTreeNode('C')
d = BiTreeNode('D')
e = BiTreeNode('E')
f = BiTreeNode('F')
g = BiTreeNode('G')
e.lchild = a
e.rchild = g
a.rchild = c
c.lchild = b
c.rchild = d
g.rchild = f
root = e
# 创建二叉树
# print(root.lchild.rchild.data)
# 遍历二叉树
# pre_order(root=root) # 前序遍历
# in_order(root=root) # 中序遍历
# post_order(root=root) # 后序遍历
level_order(root=root) # 层次遍历
```
#### File: JeremySun1224/Algorithms-and-Data_Structures/20.AVLTree.py
```python
class BiTreeNode(object):
def __init__(self, data):
self.data = data
self.lchild = None
self.rchild = None
self.parent = None
class BST(object):
def __init__(self, li=None):
self.root = None
if li:
for val in li:
self.insert_non_recurrent(val)
def insert_recurrent(self, node, val):
"""
递归插入
:param node: 递归的查到哪个节点
:param val: 要插入的值
:return: node
"""
if not node: # 如果node是空
node = BiTreeNode(val) # 直接插入。BiTreeNode(val)指的是把val变成一个node
elif val < node.data:
node.lchild = self.insert_recurrent(node.lchild, val) # 递归地调用左孩子
node.lchild.parent = node
elif val > node.data:
node.rchild = self.insert_recurrent(node.rchild, val) # 递归地调用右孩子
node.rchild.parent = node
return node
def insert_non_recurrent(self, val):
"""
非递归插入
:param val:
:return:
"""
p = self.root
if not p: # 空树
self.root = BiTreeNode(val)
return
while True:
if val < p.data:
if p.lchild:
p = p.lchild
else: # 左孩子不存在
p.lchild = BiTreeNode(val)
p.lchild.parent = p
return
elif val > p.data:
if p.rchild:
p = p.rchild
else:
p.rchild = BiTreeNode(val)
p.rchild.parent = p
return
else:
return
def query_recurrent(self, node, val):
"""递归查询"""
if not node:
return None
if node.data < val:
return self.query_recurrent(node.rchild, val)
elif node.data > val:
return self.query_recurrent(node.lchild, val)
else:
return None
def query_non_recurrent(self, val):
"""非递归查询"""
p = self.root
while p:
if p.data < val:
p = p.rchild
elif p.data > val:
p = p.lchild
else:
return p
return None
def __remove_node_1(self, node):
"""情况1:node是叶子节点"""
if not node.parent:
self.root = None
if node == node.parent.lchild: # node是它父亲的左孩子
node.parent.lchild = None
else: # node是它父亲的右孩子
node.parent.rchild = None
def __remove_node_21(self, node):
"""情况2.1:node只有一个左孩子"""
if not node.parent: # 判断是否是根节点
self.root = node.lchild
node.lchild.parent = None
elif node == node.parent.lchild: # node是它父亲的左孩子
node.parent.lchild = node.lchild
node.lchild.parent = node.parent
else: # node是它父亲的右孩子
node.parent.rchild = node.lchild
node.lchild.parent = node.parent
def __remove_node_22(self, node):
"""情况2.2:node只有一个右孩子"""
if not node.parent:
self.root = node.rchild
elif node == node.parent.lchild:
node.parent.lchild = node.rchild # node.parent.lchild这里指的是树中连接树的箭头
node.rchild.parent = node.parent # node.rchild.parent这里指的是树中连接树的箭头
else:
node.parent.rchild = node.rchild
node.rchild.parent = node.parent
def delete(self, val):
"""删除操作"""
if self.root: # 不是空树
node = self.query_non_recurrent(val)
if not node: # 不存在
return False
if not node.lchild and not node.rchild:
self.__remove_node_1(node)
elif not node.rchild: # 如果它没有右孩子,那么就一定有左孩子
self.__remove_node_21(node)
elif not node.lchild: # 只有一个右孩子
self.__remove_node_22(node)
else:
"""
情况3:两个孩子都有:
1).先找右子树最小的节点,如果有就一直往左找,直到没有为止
2).把找到的节点替换到当前的节点
3).删除min_node
"""
min_node = node.rchild
while min_node.lchild: # min_node有左孩子
min_node = min_node.lchild # 找到了右子树里最小的节点
node.data = min_node.data # 把找到的节点替换到当前的节点
# 删除min_node
if min_node.rchild:
# self.__remove_node_22(node) # 错误写法,这里需要移除的是最小节点
self.__remove_node_22(min_node)
else:
self.__remove_node_1(min_node)
def pre_order(self, root):
"""前序遍历"""
if root:
print(root.data, end=',') # 先访问根节点
self.pre_order(root.lchild) # 再递归左子树
self.pre_order(root.rchild) # 最后递归右子树
def in_order(self, root):
"""中序遍历"""
if root:
self.in_order(root.lchild) # 先递归左子树
print(root.data, end=',') # 再访问根节点
self.in_order(root.rchild) # 最后递归右子树
def post_order(self, root):
"""后序遍历"""
if root:
self.post_order(root.lchild) # 先递归左子树
self.post_order(root.rchild) # 再递归右子树
print(root.data, end=',') # 最后访问根节点
class AVLNode(BiTreeNode):
def __init__(self, data):
super(AVLNode, self).__init__(data)
self.bf = 0
class AVLTree(BST):
def __init__(self, li=None):
super(AVLTree, self).__init__(li)
def rotate_left(self, p, c):
s2 = c.lchild
p.rchild = s2
if s2:
s2.parent = p
c.lchild = p
p.parent = c
p.bf = 0
c.bf = 0
def rotate_right(self, p, c):
s2 = c.rchild
p.lchild = s2
if s2:
s2.parent = p
c.rchild = p
p.parent = c
p.bf = 0
c.bf = 0
def rotate_right_left(self, p, c):
g = c.lchild
s3 = g.rchild
c.lchild = s3
if s3:
s3.parent = c
g.rchild = c
c.parent = g
s2 = g.lchild
p.rchild = s2
if s2:
s2.parent = p
g.lchild = p
p.parent = g
# 更新bf
if g.bf > 0:
p.bf = -1
c.bf = 0
elif g.bf < 0:
p.bf = 0
c.bf = 1
else:
p.bf = 0
c.bf = 0
def rotate_left_right(self, p, c):
g = c.rchild
s2 = g.lchild
c.rchild = s2
if s2:
s2.parent = c
g.lchild = c
c.parent = g
s3 = g.rchild
p.lchild = s3
if s3:
s3.parent = p
g.rchild = p
p.parent = g
# 更新bf
if g.bf < 0:
p.bf = 1
c.bf = 0
elif g.bf > 0:
p.bf = 0
c.bf = -1
else:
p.bf = 0
c.bf = 0
```
#### File: JeremySun1224/Algorithms-and-Data_Structures/23.number_join.py
```python
from functools import cmp_to_key
def xy_cmp(x, y):
if x + y < y + x:
return 1
elif x + y > y + x:
return -1
else:
return 0
def number_join(li):
li = list(map(str, li)) # 整数转化成字符串
li.sort(key=cmp_to_key(mycmp=xy_cmp)) # cmp_to_key()在list中的工作机制就是将列表中的元素去两两比较,当cmp返回是正数时 交换两元素
return ''.join(li)
if __name__ == '__main__':
l = [32, 94, 128, 1286, 6, 71]
print(number_join(li=l))
```
#### File: JeremySun1224/Algorithms-and-Data_Structures/24.activity_select.py
```python
def activity_select(a):
a.sort(key=lambda x: x[1]) # 需要保证传入的活动集a是排序好的
res = [a[0]]
for i in range(1, len(a)):
# 如果当前活动的开始时间小于等于最后一个入选活动的结束时间则不冲突
if a[i][0] >= res[-1][1]:
res.append(a[i])
return res
if __name__ == '__main__':
activities = [(1, 4), (3, 5), (0, 6), (5, 7), (3, 9), (6, 10), (8, 11), (8, 12), (2, 14), (12, 16)]
print(activity_select(a=activities))
```
#### File: JeremySun1224/Algorithms-and-Data_Structures/25.fibonacci.py
```python
def fibonacci(n):
"""递归写法,进行了大量子问题的重复计算,所以递归写法效率低"""
if n == 1 or n == 2:
return 1
else:
return fibonacci(n - 1) + fibonacci(n - 2)
def fibonacci_non_recurrent(n):
"""
非递归写法,可以理解成动态规划的思想:
1.最优子结构,即要解决某个问题,只需解决这个问题的某个子问题即可。如fibonacci数列的递推式
2.重复子问题,可以保留计算结果便于后期使用。
"""
f = [0, 1, 1] # 为了让下标同步,故f从0开始写
if n > 2:
for i in range(n - 2):
num = f[-1] + f[-2]
f.append(num)
return f[n]
if __name__ == '__main__':
# print(fibonacci(50))
print(fibonacci_non_recurrent(50))
```
#### File: JeremySun1224/Algorithms-and-Data_Structures/27.cut_rod_DP.py
```python
from time import time
def cal_time(func):
def wrapper(*arg, **kwargs):
t1 = time()
result = func(*arg, **kwargs)
t2 = time()
print(f'{func.__name__} running time: {t2 - t1}')
return result
return wrapper
def cut_rod_recurrent_2(p, n):
"""左边不切割,只切割右边:O(2^n)"""
if n == 0:
return 0
else:
res = 0 # 初始化最大收益,在for循环的计算中,还会有p[i]做基础
for i in range(1, n + 1):
res = max(res, p[i] + cut_rod_recurrent_2(p, n - i)) # p[i]表示左边不切割的部分
return res
@cal_time
def c2(p, n):
return cut_rod_recurrent_2(p, n)
@cal_time
def cut_rod_dp(p, n):
"""动态规划:O(n^2)"""
r = [0] # 存储子问题的结果
for i in range(1, n + 1):
res = 0
for j in range(1, i + 1):
res = max(res, p[j] + r[i - j])
r.append(res) # 暂存
return r[n]
if __name__ == '__main__':
price = [0, 1, 5, 8, 9, 10, 17, 17, 20, 21, 23, 24, 26, 27, 27, 28, 30, 33, 36, 39, 40]
print(f'最大收益为:{c2(p=price, n=20)}')
print(f'最大收益为:{cut_rod_dp(p=price, n=20)}')
"""
c2 running time: 0.39551401138305664
最大收益为:56
cut_rod_dp running time: 0.0
最大收益为:56
"""
```
#### File: JeremySun1224/Algorithms-and-Data_Structures/29.lcs.py
```python
def lcs_length(x, y):
"""返回最长公共子序列的长度"""
m = len(x)
n = len(y)
c = [[0 for _ in range(n + 1)] for _ in range(m + 1)]
for i in range(1, m + 1):
for j in range(1, n + 1):
if x[i - 1] == y[j - 1]: # i,j位置上的字符匹配时,来自于左上方+1
c[i][j] = c[i - 1][j - 1] + 1
else:
c[i][j] = max(c[i - 1][j], c[i][j - 1])
return c[m][n]
def lcs(x, y):
"""返回路径"""
m = len(x)
n = len(y)
c = [[0 for _ in range(n + 1)] for _ in range(m + 1)]
b = [[0 for _ in range(n + 1)] for _ in range(m + 1)] # 左上方:1,上方:2,左方:3
for i in range(1, m + 1):
for j in range(1, n + 1):
if x[i - 1] == y[j - 1]: # i,j位置上的字符匹配时,来自于左上方+1
c[i][j] = c[i - 1][j - 1] + 1
b[i][j] = 1 # 左上方
elif c[i - 1][j] > c[i][j - 1]: # 上方
c[i][j] = c[i - 1][j]
b[i][j] = 2
else:
c[i][j] = c[i][j - 1]
b[i][j] = 3
return c[m][n], b
def lcs_track_back(x, y):
"""回溯法"""
_, b = lcs(x, y)
i = len(x)
j = len(y)
res = []
while i > 0 and j > 0: # 不能用or
if b[i][j] == 1: # 来自左上方=>匹配
res.append(x[i - 1])
i -= 1
j -= 1
elif b[i][j] == 2: # 来自上方=>不匹配
i -= 1
else: # 来自左方=>不匹配
j -= 1
return ''.join(reversed(res))
if __name__ == '__main__':
print(f"最长公共子序列为:{lcs_track_back('ABCBDAB', 'BDCABA')}")
"""最长公共子序列为:BDAB"""
```
|
{
"source": "jeremytammik/the_building_code_blog",
"score": 2
}
|
#### File: a/zip/pycolorize.py
```python
import re, win32con
import win32clipboard as w
from optparse import OptionParser
color_map = { '#2b91af' : 'teal', '#a31515' : 'maroon' }
def getText():
w.OpenClipboard()
d = w.GetClipboardData( win32con.CF_TEXT )
w.CloseClipboard()
return d
def setText( aType, aString ):
w.OpenClipboard()
w.EmptyClipboard()
w.SetClipboardData( aType, aString )
w.CloseClipboard()
_regexColor = re.compile( '\.(cb[1-9]) \{ color\: ([#0-9a-z]+); \}' )
_regexStyle = re.compile( '(<style type="text/css">.*</style>\s*<div class="cf">\s*)', re.DOTALL )
_regexEnd = re.compile( '(</pre>\s*</div>)', re.DOTALL )
#_regexPreEnd = re.compile( '(</pre>$)' )
def replace_cb_by_color( s ):
"Search for '.cb1 { color: blue; }' and globally replace cb[1-9] by the appropriate colour."
m = _regexColor.search( s )
if m:
#print 'match found'
a = m.groups()
#print a
if 2 == len( a ):
color = a[1]
#print color
if color_map.has_key( color ): color = color_map[color]
#print color
return True, s.replace( a[0], color )
#else:
# print 'no match found'
return False, s
def main():
'''
progname = 'pycolorize'
usage = 'usage: %s [-bfo]' % progname
parser = OptionParser( usage, version = progname + ' 1.0' )
parser.add_option( '-b', '--backup', action='store_true', dest='backup', help = 'create backup file, only useful with -f' )
parser.add_option( '-f', '--filename', action='store', dest='filename', help = 'define input filename' )
parser.add_option( '-o', '--outfilename', action='store', dest='outfilename', help = 'define output filename' )
(options, args) = parser.parse_args()
#print options
#print args
'''
s = getText()
#s = '''<style type="text/css"> ... '''
#print s
go = True
while go: go, s = replace_cb_by_color( s )
#print s
m = _regexStyle.match( s )
#print m
if m:
s = s.replace( m.group( 1 ), '' )
#print s
s = s.replace( '<pre class="cl">', '' )
#print s
m = _regexEnd.search( s )
#print m
if m:
s = s.replace( m.group( 1 ), '' )
#print s
s = s.replace( '</pre>\r\n', '\r\n' )
#print s
setText( win32con.CF_TEXT, s )
#print s
if __name__ == '__main__':
main()
```
#### File: a/zip/rename_view_numbers.py
```python
from Autodesk.Revit.UI.Selection import ObjectType
from Autodesk.Revit.UI import TaskDialog
import clr
import traceback
import operator
import math
clr.AddReference('RevitAPI')
clr.AddReference('RevitAPIUI')
from Autodesk.Revit.DB import *
app = __revit__.Application
doc = __revit__.ActiveUIDocument.Document
uidoc = __revit__.ActiveUIDocument
SHEET = doc.ActiveView
LOGFILE = "T:\Malcolm-Chris\REVIT\PYTHON REVIT SHELL\log.txt"
RAW_WRITE = True
def is_array(var):
return isinstance(var, (list, tuple))
def log(textArr,is_raw=False):
global LOGFILE
filename = LOGFILE
if (not is_array(textArr)):
textArr = [textArr]
target = open(filename, 'a+')
target.write("\n")
for i in textArr:
if (not is_raw):
target.write(repr(i))
else:
target.write(i)
target.close()
def clearLog():
global LOGFILE
filename = LOGFILE
target = open(filename, 'w+')
target.write("")
target.close()
def transpose(lis):
return map(list, zip(*lis))
def dicViewer(dic):
list = []
for key,val in dic.items():
list.append(["key:"+str(key),val])
return list
def getPtList(dic):
list = []
for key,val in dic.items():
list.append(map(lambda x: float(x), key.split(",")))
return list
def seq(start, stop, step=1):
n = int(round((stop - start)/float(step)))
if n > 1:
return([start + step*i for i in range(n+1)])
else:
return([])
def listToDic(list):
dic = {}
for i in list:
i[0] = i[0].replace("key:","")
dic[i[0]] = i[1]
return dic
def xyToPoint(xyPt):
return [xyPt[0],xyPt[1],0]
#return Point.ByCoordinates(xyPt[0],xyPt[1],0)
def getClosest(x,n):
return math.floor(x / n) * n;
def closestNode(coord,points):
"""Return closest point to coord from points"""
dists = [(pow(point[0] - coord[0], 2) + pow(point[1] - coord[1], 2), point) for point in points]
# list of (dist, point) tuples
nearest = min(dists)
return nearest[1] # return point only
def getNewDetailViewNumber(pts,offsetX, offsetY, stepX, stepY,gridPtsDic):
newPts = []
notClosestPts = []
newPtsPts = []
detailViewNumber = []
for pt in pts:
approxPt = ([round(getClosest(pt[0], stepX)+offsetX,2),round(getClosest(pt[1], stepY)+offsetY,2)])
log(["approxPt",approxPt])
notClosestPts.append(xyToPoint(approxPt))
closestPt = closestNode(approxPt, getPtList(gridPtsDic))
newPtsPts.append(xyToPoint(closestPt))
newPts.append(closestPt)
detailViewNumber.append(gridPtsDic[",".join(map(lambda x: str(x), closestPt))])
#Assign your output to the OUT variable.
return {
"newPts":newPts,
"detailViewNumber":detailViewNumber,
"notClosestPts":notClosestPts,
"newPtsPts":newPtsPts
}
def getPtGrid(startPt, endPt):
coords = {}
# CHANGE THIS IF GRID IS DIFFERENT!
detailGrid = [
[30 ,25 ,20 ,15 ,10,5],
[29 ,24 ,19 ,14 ,9 ,4],
[28 ,23 ,18 ,13 ,8 ,3],
[27 ,22 ,17 ,12 ,7 ,2],
[26 ,21 ,16 ,11 ,6 ,1]]
xDiv = len(detailGrid[0])
yDiv = len(detailGrid)
detailGridFlat = reduce(operator.add, transpose(detailGrid))
count = 0
stepX = round((endPt.X-startPt.X)/(xDiv),2)
stepY = round((endPt.Y-startPt.Y)/(yDiv),2)
for i in seq(startPt.X, endPt.X-stepX, stepX):
for j in seq(startPt.Y, endPt.Y-stepY, stepY):
coords[str(round(i,2))+","+str(round(j,2))]= detailGridFlat[count]
count=count+1
#Assign your output to the OUT variable.
return { "coords": coords,
"offsetX": startPt.X,
"offsetY": endPt.Y,
"stepX": stepX,
"stepY": stepY
}
def elementFromId(id):
log(["id:", id])
global doc
return doc.GetElement(id)
def setParam(el, paramName, value):
param = getParam(el, paramName, True)
if not param.IsReadOnly:
param.Set(value)
else:
log(["Coult not edit ",paramName," on ",el, ". It is either readonly or not user editable. // Read Only: ",param.IsReadOnly])
def getParam(el, paramName, asParamObject=False):
params = getParameters(el, asParamObject)
return params[paramName]
def getParameters(el,asParamObject=False):
parameters = el.Parameters
params = {}
for param in parameters:
if (asParamObject==False):
params[param.Definition.Name] = param.AsString()
else:
params[param.Definition.Name] = param
return params
def elementFromReference(ref):
global doc
id = ref.ElementId
return doc.GetElement(id)
def getPointsFromViewports(viewport):
outline = viewport.GetLabelOutline()
return [outline.MaximumPoint.X,outline.MinimumPoint.Y,0]
def pickObject():
__window__.Hide()
TaskDialog.Show ("Select Objects", "Select the line representing the grid bounds after closing this dialog.")
picked = uidoc.Selection.PickObject(ObjectType.Element)
#__window__.Topmost = True
#__window__.Show()
return picked
# clear log file
clearLog()
t = Transaction(doc, 'Rename Detail Numbers')
t.Start()
#<------------- the stuff ------------>
#lets get the guide curve
try:
bbCrvRef = pickObject()
#log(bbCrvRef)
bbCrv = elementFromReference(bbCrvRef).GeometryCurve
#log(bbCrv)
pts = [bbCrv.GetEndPoint(0),bbCrv.GetEndPoint(1)]
log(pts)
ptGridData = getPtGrid(pts[0],pts[1])
log(ptGridData)
viewports = map(lambda x: elementFromId(x), SHEET.GetAllViewports())
titleBlockPts = map(lambda x: getPointsFromViewports(x) ,viewports)
log(titleBlockPts)
detailViewNumberData = getNewDetailViewNumber(titleBlockPts,ptGridData['offsetX'], ptGridData['offsetY'],ptGridData['stepX'], ptGridData['stepY'],ptGridData['coords'])
log(detailViewNumberData)
log(map(lambda x: getParameters(x) ,viewports))
log(map(lambda x: getParam(x,"Detail Number") ,viewports))
log("Hello")
# <---- Make unique numbers
for i, viewport in enumerate(viewports):
paramName = "Detail Number"
currentVal = getParam(viewport,"Detail Number")
setParam(viewport, paramName,currentVal+"x")
t.Commit()
'''
# <---- Do the thang
t2 = Transaction(doc, 'Rename Detail Numbers')
t2.Start()
for i, viewport in enumerate(viewports):
setParam(viewport, "Detail Number",detailViewNumberData[i])
t2.Commit()
'''
except SyntaxError, e:
log(["Error!\n---------\n", traceback.format_exc()],RAW_WRITE)
except Exception, e:
log(["Error!\n---------\n", traceback.format_exc()],RAW_WRITE)
#<------------- end of the stuff ------------>
__window__.Close()
```
|
{
"source": "JeremyTangCD/lm-sdk-python",
"score": 2
}
|
#### File: logicmonitor_sdk/api/lm_api.py
```python
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from logicmonitor_sdk.api_client import ApiClient
class LMApi(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def ack_alert_by_id(self, body, id, **kwargs): # noqa: E501
"""ack alert by id # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.ack_alert_by_id(body, id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param AlertAck body: (required)
:param str id: (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.ack_alert_by_id_with_http_info(body, id, **kwargs) # noqa: E501
else:
(data) = self.ack_alert_by_id_with_http_info(body, id, **kwargs) # noqa: E501
return data
def ack_alert_by_id_with_http_info(self, body, id, **kwargs): # noqa: E501
"""ack alert by id # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.ack_alert_by_id_with_http_info(body, id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param AlertAck body: (required)
:param str id: (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body', 'id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method ack_alert_by_id" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `ack_alert_by_id`") # noqa: E501
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `ack_alert_by_id`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['LMv1'] # noqa: E501
return self.api_client.call_api(
'/alert/alerts/{id}/ack', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='object', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def ack_collector_down_alert_by_id(self, id, body, **kwargs): # noqa: E501
"""ack collector down alert # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.ack_collector_down_alert_by_id(id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: (required)
:param AckCollectorDown body: (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.ack_collector_down_alert_by_id_with_http_info(id, body, **kwargs) # noqa: E501
else:
(data) = self.ack_collector_down_alert_by_id_with_http_info(id, body, **kwargs) # noqa: E501
return data
def ack_collector_down_alert_by_id_with_http_info(self, id, body, **kwargs): # noqa: E501
"""ack collector down alert # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.ack_collector_down_alert_by_id_with_http_info(id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: (required)
:param AckCollectorDown body: (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method ack_collector_down_alert_by_id" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `ack_collector_down_alert_by_id`") # noqa: E501
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `ack_collector_down_alert_by_id`") # noqa: E501
if 'id' in params and not re.search('\d+', params['id'] if type(params['id']) is str else str(params['id'])): # noqa: E501
raise ValueError("Invalid value for parameter `id` when calling `ack_collector_down_alert_by_id`, must conform to the pattern `/\d+/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['LMv1'] # noqa: E501
return self.api_client.call_api(
'/setting/collector/collectors/{id}/ackdown', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='object', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def add_admin(self, body, **kwargs): # noqa: E501
"""add user # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.add_admin(body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param Admin body: (required)
:return: Admin
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.add_admin_with_http_info(body, **kwargs) # noqa: E501
else:
(data) = self.add_admin_with_http_info(body, **kwargs) # noqa: E501
return data
def add_admin_with_http_info(self, body, **kwargs): # noqa: E501
"""add user # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.add_admin_with_http_info(body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param Admin body: (required)
:return: Admin
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method add_admin" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `add_admin`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['LMv1'] # noqa: E501
return self.api_client.call_api(
'/setting/admins', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Admin', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def add_alert_note_by_id(self, body, id, **kwargs): # noqa: E501
"""add alert note # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.add_alert_note_by_id(body, id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param AlertAck body: (required)
:param str id: (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.add_alert_note_by_id_with_http_info(body, id, **kwargs) # noqa: E501
else:
(data) = self.add_alert_note_by_id_with_http_info(body, id, **kwargs) # noqa: E501
return data
def add_alert_note_by_id_with_http_info(self, body, id, **kwargs): # noqa: E501
"""add alert note # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.add_alert_note_by_id_with_http_info(body, id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param AlertAck body: (required)
:param str id: (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body', 'id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method add_alert_note_by_id" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `add_alert_note_by_id`") # noqa: E501
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `add_alert_note_by_id`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['LMv1'] # noqa: E501
return self.api_client.call_api(
'/alert/alerts/{id}/note', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='object', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def add_alert_rule(self, body, **kwargs): # noqa: E501
"""add alert rule # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.add_alert_rule(body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param AlertRule body: (required)
:return: AlertRule
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.add_alert_rule_with_http_info(body, **kwargs) # noqa: E501
else:
(data) = self.add_alert_rule_with_http_info(body, **kwargs) # noqa: E501
return data
def add_alert_rule_with_http_info(self, body, **kwargs): # noqa: E501
"""add alert rule # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.add_alert_rule_with_http_info(body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param AlertRule body: (required)
:return: AlertRule
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method add_alert_rule" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `add_alert_rule`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['LMv1'] # noqa: E501
return self.api_client.call_api(
'/setting/alert/rules', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='AlertRule', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def add_api_token_by_admin_id(self, admin_id, body, **kwargs): # noqa: E501
"""add api tokens for a user # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.add_api_token_by_admin_id(admin_id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int admin_id: (required)
:param APIToken body: (required)
:return: APIToken
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.add_api_token_by_admin_id_with_http_info(admin_id, body, **kwargs) # noqa: E501
else:
(data) = self.add_api_token_by_admin_id_with_http_info(admin_id, body, **kwargs) # noqa: E501
return data
def add_api_token_by_admin_id_with_http_info(self, admin_id, body, **kwargs): # noqa: E501
"""add api tokens for a user # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.add_api_token_by_admin_id_with_http_info(admin_id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int admin_id: (required)
:param APIToken body: (required)
:return: APIToken
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['admin_id', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method add_api_token_by_admin_id" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'admin_id' is set
if ('admin_id' not in params or
params['admin_id'] is None):
raise ValueError("Missing the required parameter `admin_id` when calling `add_api_token_by_admin_id`") # noqa: E501
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `add_api_token_by_admin_id`") # noqa: E501
if 'admin_id' in params and not re.search('\d+', params['admin_id'] if type(params['admin_id']) is str else str(params['admin_id'])): # noqa: E501
raise ValueError("Invalid value for parameter `admin_id` when calling `add_api_token_by_admin_id`, must conform to the pattern `/\d+/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'admin_id' in params:
path_params['adminId'] = params['admin_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['LMv1'] # noqa: E501
return self.api_client.call_api(
'/setting/admins/{adminId}/apitokens', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='APIToken', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def add_collector(self, body, **kwargs): # noqa: E501
"""add collector # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.add_collector(body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param Collector body: (required)
:return: Collector
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.add_collector_with_http_info(body, **kwargs) # noqa: E501
else:
(data) = self.add_collector_with_http_info(body, **kwargs) # noqa: E501
return data
def add_collector_with_http_info(self, body, **kwargs): # noqa: E501
"""add collector # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.add_collector_with_http_info(body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param Collector body: (required)
:return: Collector
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method add_collector" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `add_collector`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['LMv1'] # noqa: E501
return self.api_client.call_api(
'/setting/collector/collectors', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Collector', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def add_collector_group(self, body, **kwargs): # noqa: E501
"""add collector group # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.add_collector_group(body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param CollectorGroup body: (required)
:return: CollectorGroup
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.add_collector_group_with_http_info(body, **kwargs) # noqa: E501
else:
(data) = self.add_collector_group_with_http_info(body, **kwargs) # noqa: E501
return data
def add_collector_group_with_http_info(self, body, **kwargs): # noqa: E501
"""add collector group # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.add_collector_group_with_http_info(body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param CollectorGroup body: (required)
:return: CollectorGroup
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method add_collector_group" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `add_collector_group`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['LMv1'] # noqa: E501
return self.api_client.call_api(
'/setting/collector/groups', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='CollectorGroup', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def add_dashboard(self, body, **kwargs): # noqa: E501
"""add dashboard # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.add_dashboard(body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param Dashboard body: (required)
:return: Dashboard
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.add_dashboard_with_http_info(body, **kwargs) # noqa: E501
else:
(data) = self.add_dashboard_with_http_info(body, **kwargs) # noqa: E501
return data
def add_dashboard_with_http_info(self, body, **kwargs): # noqa: E501
"""add dashboard # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.add_dashboard_with_http_info(body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param Dashboard body: (required)
:return: Dashboard
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method add_dashboard" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `add_dashboard`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['LMv1'] # noqa: E501
return self.api_client.call_api(
'/dashboard/dashboards', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Dashboard', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def add_dashboard_group(self, body, **kwargs): # noqa: E501
"""add dashboard group # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.add_dashboard_group(body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param DashboardGroup body: (required)
:return: DashboardGroup
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.add_dashboard_group_with_http_info(body, **kwargs) # noqa: E501
else:
(data) = self.add_dashboard_group_with_http_info(body, **kwargs) # noqa: E501
return data
def add_dashboard_group_with_http_info(self, body, **kwargs): # noqa: E501
"""add dashboard group # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.add_dashboard_group_with_http_info(body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param DashboardGroup body: (required)
:return: DashboardGroup
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method add_dashboard_group" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `add_dashboard_group`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['LMv1'] # noqa: E501
return self.api_client.call_api(
'/dashboard/groups', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='DashboardGroup', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def add_device(self, body, **kwargs): # noqa: E501
"""add a new device # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.add_device(body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param Device body: (required)
:param int start:
:param int end:
:param str netflow_filter:
:param bool add_from_wizard:
:return: Device
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.add_device_with_http_info(body, **kwargs) # noqa: E501
else:
(data) = self.add_device_with_http_info(body, **kwargs) # noqa: E501
return data
def add_device_with_http_info(self, body, **kwargs): # noqa: E501
"""add a new device # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.add_device_with_http_info(body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param Device body: (required)
:param int start:
:param int end:
:param str netflow_filter:
:param bool add_from_wizard:
:return: Device
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body', 'start', 'end', 'netflow_filter', 'add_from_wizard'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method add_device" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `add_device`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
if 'start' in params:
query_params.append(('start', params['start'])) # noqa: E501
if 'end' in params:
query_params.append(('end', params['end'])) # noqa: E501
if 'netflow_filter' in params:
query_params.append(('netflowFilter', params['netflow_filter'])) # noqa: E501
if 'add_from_wizard' in params:
query_params.append(('addFromWizard', params['add_from_wizard'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['LMv1'] # noqa: E501
return self.api_client.call_api(
'/device/devices', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Device', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def add_device_datasource_instance(self, device_id, hds_id, body, **kwargs): # noqa: E501
"""add device instance # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.add_device_datasource_instance(device_id, hds_id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int device_id: (required)
:param int hds_id: The device-datasource ID (required)
:param DeviceDataSourceInstance body: (required)
:return: DeviceDataSourceInstance
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.add_device_datasource_instance_with_http_info(device_id, hds_id, body, **kwargs) # noqa: E501
else:
(data) = self.add_device_datasource_instance_with_http_info(device_id, hds_id, body, **kwargs) # noqa: E501
return data
def add_device_datasource_instance_with_http_info(self, device_id, hds_id, body, **kwargs): # noqa: E501
"""add device instance # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.add_device_datasource_instance_with_http_info(device_id, hds_id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int device_id: (required)
:param int hds_id: The device-datasource ID (required)
:param DeviceDataSourceInstance body: (required)
:return: DeviceDataSourceInstance
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['device_id', 'hds_id', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method add_device_datasource_instance" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'device_id' is set
if ('device_id' not in params or
params['device_id'] is None):
raise ValueError("Missing the required parameter `device_id` when calling `add_device_datasource_instance`") # noqa: E501
# verify the required parameter 'hds_id' is set
if ('hds_id' not in params or
params['hds_id'] is None):
raise ValueError("Missing the required parameter `hds_id` when calling `add_device_datasource_instance`") # noqa: E501
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `add_device_datasource_instance`") # noqa: E501
if 'device_id' in params and not re.search('\d+', params['device_id'] if type(params['device_id']) is str else str(params['device_id'])): # noqa: E501
raise ValueError("Invalid value for parameter `device_id` when calling `add_device_datasource_instance`, must conform to the pattern `/\d+/`") # noqa: E501
if 'hds_id' in params and not re.search('\d+', params['hds_id'] if type(params['hds_id']) is str else str(params['hds_id'])): # noqa: E501
raise ValueError("Invalid value for parameter `hds_id` when calling `add_device_datasource_instance`, must conform to the pattern `/\d+/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'device_id' in params:
path_params['deviceId'] = params['device_id'] # noqa: E501
if 'hds_id' in params:
path_params['hdsId'] = params['hds_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['LMv1'] # noqa: E501
return self.api_client.call_api(
'/device/devices/{deviceId}/devicedatasources/{hdsId}/instances', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='DeviceDataSourceInstance', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def add_device_datasource_instance_group(self, device_id, device_ds_id, body, **kwargs): # noqa: E501
"""add device datasource instance group # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.add_device_datasource_instance_group(device_id, device_ds_id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int device_id: (required)
:param int device_ds_id: The device-datasource ID you'd like to add an instance group for (required)
:param DeviceDataSourceInstanceGroup body: (required)
:return: DeviceDataSourceInstanceGroup
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.add_device_datasource_instance_group_with_http_info(device_id, device_ds_id, body, **kwargs) # noqa: E501
else:
(data) = self.add_device_datasource_instance_group_with_http_info(device_id, device_ds_id, body, **kwargs) # noqa: E501
return data
def add_device_datasource_instance_group_with_http_info(self, device_id, device_ds_id, body, **kwargs): # noqa: E501
"""add device datasource instance group # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.add_device_datasource_instance_group_with_http_info(device_id, device_ds_id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int device_id: (required)
:param int device_ds_id: The device-datasource ID you'd like to add an instance group for (required)
:param DeviceDataSourceInstanceGroup body: (required)
:return: DeviceDataSourceInstanceGroup
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['device_id', 'device_ds_id', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method add_device_datasource_instance_group" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'device_id' is set
if ('device_id' not in params or
params['device_id'] is None):
raise ValueError("Missing the required parameter `device_id` when calling `add_device_datasource_instance_group`") # noqa: E501
# verify the required parameter 'device_ds_id' is set
if ('device_ds_id' not in params or
params['device_ds_id'] is None):
raise ValueError("Missing the required parameter `device_ds_id` when calling `add_device_datasource_instance_group`") # noqa: E501
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `add_device_datasource_instance_group`") # noqa: E501
if 'device_id' in params and not re.search('\d+', params['device_id'] if type(params['device_id']) is str else str(params['device_id'])): # noqa: E501
raise ValueError("Invalid value for parameter `device_id` when calling `add_device_datasource_instance_group`, must conform to the pattern `/\d+/`") # noqa: E501
if 'device_ds_id' in params and not re.search('\d+', params['device_ds_id'] if type(params['device_ds_id']) is str else str(params['device_ds_id'])): # noqa: E501
raise ValueError("Invalid value for parameter `device_ds_id` when calling `add_device_datasource_instance_group`, must conform to the pattern `/\d+/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'device_id' in params:
path_params['deviceId'] = params['device_id'] # noqa: E501
if 'device_ds_id' in params:
path_params['deviceDsId'] = params['device_ds_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['LMv1'] # noqa: E501
return self.api_client.call_api(
'/device/devices/{deviceId}/devicedatasources/{deviceDsId}/groups', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='DeviceDataSourceInstanceGroup', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def add_device_group(self, body, **kwargs): # noqa: E501
"""add device group # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.add_device_group(body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param DeviceGroup body: (required)
:return: DeviceGroup
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.add_device_group_with_http_info(body, **kwargs) # noqa: E501
else:
(data) = self.add_device_group_with_http_info(body, **kwargs) # noqa: E501
return data
def add_device_group_with_http_info(self, body, **kwargs): # noqa: E501
"""add device group # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.add_device_group_with_http_info(body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param DeviceGroup body: (required)
:return: DeviceGroup
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method add_device_group" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `add_device_group`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['LMv1'] # noqa: E501
return self.api_client.call_api(
'/device/groups', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='DeviceGroup', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def add_device_group_cluster_alert_conf(self, device_group_id, body, **kwargs): # noqa: E501
"""Add cluster alert configuration # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.add_device_group_cluster_alert_conf(device_group_id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int device_group_id: (required)
:param DeviceClusterAlertConfig body: (required)
:return: DeviceClusterAlertConfig
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.add_device_group_cluster_alert_conf_with_http_info(device_group_id, body, **kwargs) # noqa: E501
else:
(data) = self.add_device_group_cluster_alert_conf_with_http_info(device_group_id, body, **kwargs) # noqa: E501
return data
def add_device_group_cluster_alert_conf_with_http_info(self, device_group_id, body, **kwargs): # noqa: E501
"""Add cluster alert configuration # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.add_device_group_cluster_alert_conf_with_http_info(device_group_id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int device_group_id: (required)
:param DeviceClusterAlertConfig body: (required)
:return: DeviceClusterAlertConfig
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['device_group_id', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method add_device_group_cluster_alert_conf" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'device_group_id' is set
if ('device_group_id' not in params or
params['device_group_id'] is None):
raise ValueError("Missing the required parameter `device_group_id` when calling `add_device_group_cluster_alert_conf`") # noqa: E501
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `add_device_group_cluster_alert_conf`") # noqa: E501
if 'device_group_id' in params and not re.search('\d+', params['device_group_id'] if type(params['device_group_id']) is str else str(params['device_group_id'])): # noqa: E501
raise ValueError("Invalid value for parameter `device_group_id` when calling `add_device_group_cluster_alert_conf`, must conform to the pattern `/\d+/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'device_group_id' in params:
path_params['deviceGroupId'] = params['device_group_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['LMv1'] # noqa: E501
return self.api_client.call_api(
'/device/groups/{deviceGroupId}/clusterAlertConf', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='DeviceClusterAlertConfig', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def add_device_group_property(self, gid, body, **kwargs): # noqa: E501
"""add device group property # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.add_device_group_property(gid, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int gid: group ID (required)
:param EntityProperty body: (required)
:return: EntityProperty
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.add_device_group_property_with_http_info(gid, body, **kwargs) # noqa: E501
else:
(data) = self.add_device_group_property_with_http_info(gid, body, **kwargs) # noqa: E501
return data
def add_device_group_property_with_http_info(self, gid, body, **kwargs): # noqa: E501
"""add device group property # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.add_device_group_property_with_http_info(gid, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int gid: group ID (required)
:param EntityProperty body: (required)
:return: EntityProperty
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['gid', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method add_device_group_property" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'gid' is set
if ('gid' not in params or
params['gid'] is None):
raise ValueError("Missing the required parameter `gid` when calling `add_device_group_property`") # noqa: E501
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `add_device_group_property`") # noqa: E501
if 'gid' in params and not re.search('\d+', params['gid'] if type(params['gid']) is str else str(params['gid'])): # noqa: E501
raise ValueError("Invalid value for parameter `gid` when calling `add_device_group_property`, must conform to the pattern `/\d+/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'gid' in params:
path_params['gid'] = params['gid'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['LMv1'] # noqa: E501
return self.api_client.call_api(
'/device/groups/{gid}/properties', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='EntityProperty', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def add_device_property(self, device_id, body, **kwargs): # noqa: E501
"""add device property # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.add_device_property(device_id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int device_id: (required)
:param EntityProperty body: (required)
:return: EntityProperty
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.add_device_property_with_http_info(device_id, body, **kwargs) # noqa: E501
else:
(data) = self.add_device_property_with_http_info(device_id, body, **kwargs) # noqa: E501
return data
def add_device_property_with_http_info(self, device_id, body, **kwargs): # noqa: E501
"""add device property # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.add_device_property_with_http_info(device_id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int device_id: (required)
:param EntityProperty body: (required)
:return: EntityProperty
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['device_id', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method add_device_property" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'device_id' is set
if ('device_id' not in params or
params['device_id'] is None):
raise ValueError("Missing the required parameter `device_id` when calling `add_device_property`") # noqa: E501
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `add_device_property`") # noqa: E501
if 'device_id' in params and not re.search('\d+', params['device_id'] if type(params['device_id']) is str else str(params['device_id'])): # noqa: E501
raise ValueError("Invalid value for parameter `device_id` when calling `add_device_property`, must conform to the pattern `/\d+/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'device_id' in params:
path_params['deviceId'] = params['device_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['LMv1'] # noqa: E501
return self.api_client.call_api(
'/device/devices/{deviceId}/properties', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='EntityProperty', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def add_escalation_chain(self, body, **kwargs): # noqa: E501
"""add escalation chain # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.add_escalation_chain(body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param EscalatingChain body: (required)
:return: EscalatingChain
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.add_escalation_chain_with_http_info(body, **kwargs) # noqa: E501
else:
(data) = self.add_escalation_chain_with_http_info(body, **kwargs) # noqa: E501
return data
def add_escalation_chain_with_http_info(self, body, **kwargs): # noqa: E501
"""add escalation chain # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.add_escalation_chain_with_http_info(body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param EscalatingChain body: (required)
:return: EscalatingChain
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method add_escalation_chain" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `add_escalation_chain`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['LMv1'] # noqa: E501
return self.api_client.call_api(
'/setting/alert/chains', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='EscalatingChain', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def add_netscan(self, **kwargs): # noqa: E501
"""add a new netscan # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.add_netscan(async_req=True)
>>> result = thread.get()
:param async_req bool
:param Netscan body:
:return: Netscan
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.add_netscan_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.add_netscan_with_http_info(**kwargs) # noqa: E501
return data
def add_netscan_with_http_info(self, **kwargs): # noqa: E501
"""add a new netscan # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.add_netscan_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param Netscan body:
:return: Netscan
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method add_netscan" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['LMv1'] # noqa: E501
return self.api_client.call_api(
'/setting/netscans', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Netscan', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def add_ops_note(self, body, **kwargs): # noqa: E501
"""add opsnote # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.add_ops_note(body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param OpsNote body: (required)
:return: OpsNote
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.add_ops_note_with_http_info(body, **kwargs) # noqa: E501
else:
(data) = self.add_ops_note_with_http_info(body, **kwargs) # noqa: E501
return data
def add_ops_note_with_http_info(self, body, **kwargs): # noqa: E501
"""add opsnote # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.add_ops_note_with_http_info(body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param OpsNote body: (required)
:return: OpsNote
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method add_ops_note" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `add_ops_note`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['LMv1'] # noqa: E501
return self.api_client.call_api(
'/setting/opsnotes', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='OpsNote', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def add_recipient_group(self, body, **kwargs): # noqa: E501
"""add recipient group # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.add_recipient_group(body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param RecipientGroup body: (required)
:return: RecipientGroup
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.add_recipient_group_with_http_info(body, **kwargs) # noqa: E501
else:
(data) = self.add_recipient_group_with_http_info(body, **kwargs) # noqa: E501
return data
def add_recipient_group_with_http_info(self, body, **kwargs): # noqa: E501
"""add recipient group # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.add_recipient_group_with_http_info(body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param RecipientGroup body: (required)
:return: RecipientGroup
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method add_recipient_group" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `add_recipient_group`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['LMv1'] # noqa: E501
return self.api_client.call_api(
'/setting/recipientgroups', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='RecipientGroup', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def add_report(self, body, **kwargs): # noqa: E501
"""add report # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.add_report(body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param ReportBase body: (required)
:return: ReportBase
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.add_report_with_http_info(body, **kwargs) # noqa: E501
else:
(data) = self.add_report_with_http_info(body, **kwargs) # noqa: E501
return data
def add_report_with_http_info(self, body, **kwargs): # noqa: E501
"""add report # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.add_report_with_http_info(body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param ReportBase body: (required)
:return: ReportBase
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method add_report" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `add_report`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['LMv1'] # noqa: E501
return self.api_client.call_api(
'/report/reports', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ReportBase', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def add_report_group(self, body, **kwargs): # noqa: E501
"""add report group # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.add_report_group(body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param ReportGroup body: (required)
:return: ReportGroup
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.add_report_group_with_http_info(body, **kwargs) # noqa: E501
else:
(data) = self.add_report_group_with_http_info(body, **kwargs) # noqa: E501
return data
def add_report_group_with_http_info(self, body, **kwargs): # noqa: E501
"""add report group # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.add_report_group_with_http_info(body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param ReportGroup body: (required)
:return: ReportGroup
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method add_report_group" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `add_report_group`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['LMv1'] # noqa: E501
return self.api_client.call_api(
'/report/groups', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ReportGroup', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def add_role(self, body, **kwargs): # noqa: E501
"""add role # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.add_role(body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param Role body: (required)
:return: Role
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.add_role_with_http_info(body, **kwargs) # noqa: E501
else:
(data) = self.add_role_with_http_info(body, **kwargs) # noqa: E501
return data
def add_role_with_http_info(self, body, **kwargs): # noqa: E501
"""add role # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.add_role_with_http_info(body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param Role body: (required)
:return: Role
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method add_role" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `add_role`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['LMv1'] # noqa: E501
return self.api_client.call_api(
'/setting/roles', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Role', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def add_sdt(self, body, **kwargs): # noqa: E501
"""add SDT # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.add_sdt(body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param SDT body: (required)
:return: SDT
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.add_sdt_with_http_info(body, **kwargs) # noqa: E501
else:
(data) = self.add_sdt_with_http_info(body, **kwargs) # noqa: E501
return data
def add_sdt_with_http_info(self, body, **kwargs): # noqa: E501
"""add SDT # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.add_sdt_with_http_info(body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param SDT body: (required)
:return: SDT
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method add_sdt" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `add_sdt`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['LMv1'] # noqa: E501
return self.api_client.call_api(
'/sdt/sdts', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='SDT', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def add_website(self, body, **kwargs): # noqa: E501
"""add website # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.add_website(body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param Website body: (required)
:return: Website
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.add_website_with_http_info(body, **kwargs) # noqa: E501
else:
(data) = self.add_website_with_http_info(body, **kwargs) # noqa: E501
return data
def add_website_with_http_info(self, body, **kwargs): # noqa: E501
"""add website # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.add_website_with_http_info(body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param Website body: (required)
:return: Website
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method add_website" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `add_website`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['LMv1'] # noqa: E501
return self.api_client.call_api(
'/website/websites', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Website', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def add_website_group(self, body, **kwargs): # noqa: E501
"""add website group # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.add_website_group(body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param WebsiteGroup body: (required)
:return: WebsiteGroup
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.add_website_group_with_http_info(body, **kwargs) # noqa: E501
else:
(data) = self.add_website_group_with_http_info(body, **kwargs) # noqa: E501
return data
def add_website_group_with_http_info(self, body, **kwargs): # noqa: E501
"""add website group # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.add_website_group_with_http_info(body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param WebsiteGroup body: (required)
:return: WebsiteGroup
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method add_website_group" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `add_website_group`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['LMv1'] # noqa: E501
return self.api_client.call_api(
'/website/groups', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='WebsiteGroup', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def add_widget(self, body, **kwargs): # noqa: E501
"""add widget # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.add_widget(body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param Widget body: (required)
:return: Widget
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.add_widget_with_http_info(body, **kwargs) # noqa: E501
else:
(data) = self.add_widget_with_http_info(body, **kwargs) # noqa: E501
return data
def add_widget_with_http_info(self, body, **kwargs): # noqa: E501
"""add widget # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.add_widget_with_http_info(body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param Widget body: (required)
:return: Widget
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method add_widget" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `add_widget`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['LMv1'] # noqa: E501
return self.api_client.call_api(
'/dashboard/widgets', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Widget', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def collect_device_config_source_config(self, device_id, hds_id, instance_id, **kwargs): # noqa: E501
"""collect a config for a device # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.collect_device_config_source_config(device_id, hds_id, instance_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int device_id: (required)
:param int hds_id: (required)
:param int instance_id: (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.collect_device_config_source_config_with_http_info(device_id, hds_id, instance_id, **kwargs) # noqa: E501
else:
(data) = self.collect_device_config_source_config_with_http_info(device_id, hds_id, instance_id, **kwargs) # noqa: E501
return data
def collect_device_config_source_config_with_http_info(self, device_id, hds_id, instance_id, **kwargs): # noqa: E501
"""collect a config for a device # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.collect_device_config_source_config_with_http_info(device_id, hds_id, instance_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int device_id: (required)
:param int hds_id: (required)
:param int instance_id: (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['device_id', 'hds_id', 'instance_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method collect_device_config_source_config" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'device_id' is set
if ('device_id' not in params or
params['device_id'] is None):
raise ValueError("Missing the required parameter `device_id` when calling `collect_device_config_source_config`") # noqa: E501
# verify the required parameter 'hds_id' is set
if ('hds_id' not in params or
params['hds_id'] is None):
raise ValueError("Missing the required parameter `hds_id` when calling `collect_device_config_source_config`") # noqa: E501
# verify the required parameter 'instance_id' is set
if ('instance_id' not in params or
params['instance_id'] is None):
raise ValueError("Missing the required parameter `instance_id` when calling `collect_device_config_source_config`") # noqa: E501
if 'device_id' in params and not re.search('\d+', params['device_id'] if type(params['device_id']) is str else str(params['device_id'])): # noqa: E501
raise ValueError("Invalid value for parameter `device_id` when calling `collect_device_config_source_config`, must conform to the pattern `/\d+/`") # noqa: E501
if 'hds_id' in params and not re.search('\d+', params['hds_id'] if type(params['hds_id']) is str else str(params['hds_id'])): # noqa: E501
raise ValueError("Invalid value for parameter `hds_id` when calling `collect_device_config_source_config`, must conform to the pattern `/\d+/`") # noqa: E501
if 'instance_id' in params and not re.search('\d+', params['instance_id'] if type(params['instance_id']) is str else str(params['instance_id'])): # noqa: E501
raise ValueError("Invalid value for parameter `instance_id` when calling `collect_device_config_source_config`, must conform to the pattern `/\d+/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'device_id' in params:
path_params['deviceId'] = params['device_id'] # noqa: E501
if 'hds_id' in params:
path_params['hdsId'] = params['hds_id'] # noqa: E501
if 'instance_id' in params:
path_params['instanceId'] = params['instance_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['LMv1'] # noqa: E501
return self.api_client.call_api(
'/device/devices/{deviceId}/devicedatasources/{hdsId}/instances/{instanceId}/config/collectNow', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='object', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_admin_by_id(self, id, **kwargs): # noqa: E501
"""delete user # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_admin_by_id(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.delete_admin_by_id_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.delete_admin_by_id_with_http_info(id, **kwargs) # noqa: E501
return data
def delete_admin_by_id_with_http_info(self, id, **kwargs): # noqa: E501
"""delete user # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_admin_by_id_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_admin_by_id" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `delete_admin_by_id`") # noqa: E501
if 'id' in params and not re.search('\d+', params['id'] if type(params['id']) is str else str(params['id'])): # noqa: E501
raise ValueError("Invalid value for parameter `id` when calling `delete_admin_by_id`, must conform to the pattern `/\d+/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['LMv1'] # noqa: E501
return self.api_client.call_api(
'/setting/admins/{id}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='object', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_alert_rule_by_id(self, id, **kwargs): # noqa: E501
"""delete alert rule # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_alert_rule_by_id(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.delete_alert_rule_by_id_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.delete_alert_rule_by_id_with_http_info(id, **kwargs) # noqa: E501
return data
def delete_alert_rule_by_id_with_http_info(self, id, **kwargs): # noqa: E501
"""delete alert rule # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_alert_rule_by_id_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_alert_rule_by_id" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `delete_alert_rule_by_id`") # noqa: E501
if 'id' in params and not re.search('\d+', params['id'] if type(params['id']) is str else str(params['id'])): # noqa: E501
raise ValueError("Invalid value for parameter `id` when calling `delete_alert_rule_by_id`, must conform to the pattern `/\d+/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['LMv1'] # noqa: E501
return self.api_client.call_api(
'/setting/alert/rules/{id}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='object', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_api_token_by_id(self, admin_id, apitoken_id, **kwargs): # noqa: E501
"""delete apiToken # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_api_token_by_id(admin_id, apitoken_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int admin_id: (required)
:param int apitoken_id: (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.delete_api_token_by_id_with_http_info(admin_id, apitoken_id, **kwargs) # noqa: E501
else:
(data) = self.delete_api_token_by_id_with_http_info(admin_id, apitoken_id, **kwargs) # noqa: E501
return data
def delete_api_token_by_id_with_http_info(self, admin_id, apitoken_id, **kwargs): # noqa: E501
"""delete apiToken # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_api_token_by_id_with_http_info(admin_id, apitoken_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int admin_id: (required)
:param int apitoken_id: (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['admin_id', 'apitoken_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_api_token_by_id" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'admin_id' is set
if ('admin_id' not in params or
params['admin_id'] is None):
raise ValueError("Missing the required parameter `admin_id` when calling `delete_api_token_by_id`") # noqa: E501
# verify the required parameter 'apitoken_id' is set
if ('apitoken_id' not in params or
params['apitoken_id'] is None):
raise ValueError("Missing the required parameter `apitoken_id` when calling `delete_api_token_by_id`") # noqa: E501
if 'admin_id' in params and not re.search('\d+', params['admin_id'] if type(params['admin_id']) is str else str(params['admin_id'])): # noqa: E501
raise ValueError("Invalid value for parameter `admin_id` when calling `delete_api_token_by_id`, must conform to the pattern `/\d+/`") # noqa: E501
if 'apitoken_id' in params and not re.search('\d+', params['apitoken_id'] if type(params['apitoken_id']) is str else str(params['apitoken_id'])): # noqa: E501
raise ValueError("Invalid value for parameter `apitoken_id` when calling `delete_api_token_by_id`, must conform to the pattern `/\d+/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'admin_id' in params:
path_params['adminId'] = params['admin_id'] # noqa: E501
if 'apitoken_id' in params:
path_params['apitokenId'] = params['apitoken_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['LMv1'] # noqa: E501
return self.api_client.call_api(
'/setting/admins/{adminId}/apitokens/{apitokenId}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='object', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_collector_by_id(self, id, **kwargs): # noqa: E501
"""delete collector # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_collector_by_id(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.delete_collector_by_id_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.delete_collector_by_id_with_http_info(id, **kwargs) # noqa: E501
return data
def delete_collector_by_id_with_http_info(self, id, **kwargs): # noqa: E501
"""delete collector # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_collector_by_id_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_collector_by_id" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `delete_collector_by_id`") # noqa: E501
if 'id' in params and not re.search('\d+', params['id'] if type(params['id']) is str else str(params['id'])): # noqa: E501
raise ValueError("Invalid value for parameter `id` when calling `delete_collector_by_id`, must conform to the pattern `/\d+/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['LMv1'] # noqa: E501
return self.api_client.call_api(
'/setting/collector/collectors/{id}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='object', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_collector_group_by_id(self, id, **kwargs): # noqa: E501
"""delete collector group # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_collector_group_by_id(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.delete_collector_group_by_id_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.delete_collector_group_by_id_with_http_info(id, **kwargs) # noqa: E501
return data
def delete_collector_group_by_id_with_http_info(self, id, **kwargs): # noqa: E501
"""delete collector group # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_collector_group_by_id_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_collector_group_by_id" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `delete_collector_group_by_id`") # noqa: E501
if 'id' in params and not re.search('\d+', params['id'] if type(params['id']) is str else str(params['id'])): # noqa: E501
raise ValueError("Invalid value for parameter `id` when calling `delete_collector_group_by_id`, must conform to the pattern `/\d+/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['LMv1'] # noqa: E501
return self.api_client.call_api(
'/setting/collector/groups/{id}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='object', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_dashboard_by_id(self, id, **kwargs): # noqa: E501
"""delete dashboard # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_dashboard_by_id(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.delete_dashboard_by_id_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.delete_dashboard_by_id_with_http_info(id, **kwargs) # noqa: E501
return data
def delete_dashboard_by_id_with_http_info(self, id, **kwargs): # noqa: E501
"""delete dashboard # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_dashboard_by_id_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_dashboard_by_id" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `delete_dashboard_by_id`") # noqa: E501
if 'id' in params and not re.search('\d+', params['id'] if type(params['id']) is str else str(params['id'])): # noqa: E501
raise ValueError("Invalid value for parameter `id` when calling `delete_dashboard_by_id`, must conform to the pattern `/\d+/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['LMv1'] # noqa: E501
return self.api_client.call_api(
'/dashboard/dashboards/{id}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='object', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_dashboard_group_by_id(self, id, **kwargs): # noqa: E501
"""delete dashboard group # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_dashboard_group_by_id(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: (required)
:param bool allow_non_empty_group:
:return: object
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.delete_dashboard_group_by_id_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.delete_dashboard_group_by_id_with_http_info(id, **kwargs) # noqa: E501
return data
def delete_dashboard_group_by_id_with_http_info(self, id, **kwargs): # noqa: E501
"""delete dashboard group # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_dashboard_group_by_id_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: (required)
:param bool allow_non_empty_group:
:return: object
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'allow_non_empty_group'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_dashboard_group_by_id" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `delete_dashboard_group_by_id`") # noqa: E501
if 'id' in params and not re.search('\d+', params['id'] if type(params['id']) is str else str(params['id'])): # noqa: E501
raise ValueError("Invalid value for parameter `id` when calling `delete_dashboard_group_by_id`, must conform to the pattern `/\d+/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
if 'allow_non_empty_group' in params:
query_params.append(('allowNonEmptyGroup', params['allow_non_empty_group'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['LMv1'] # noqa: E501
return self.api_client.call_api(
'/dashboard/groups/{id}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='object', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_datasource_by_id(self, id, **kwargs): # noqa: E501
"""delete datasource # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_datasource_by_id(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.delete_datasource_by_id_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.delete_datasource_by_id_with_http_info(id, **kwargs) # noqa: E501
return data
def delete_datasource_by_id_with_http_info(self, id, **kwargs): # noqa: E501
"""delete datasource # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_datasource_by_id_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_datasource_by_id" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `delete_datasource_by_id`") # noqa: E501
if 'id' in params and not re.search('\d+', params['id'] if type(params['id']) is str else str(params['id'])): # noqa: E501
raise ValueError("Invalid value for parameter `id` when calling `delete_datasource_by_id`, must conform to the pattern `/\d+/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['LMv1'] # noqa: E501
return self.api_client.call_api(
'/setting/datasources/{id}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='object', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_device_by_id(self, id, **kwargs): # noqa: E501
"""delete a device # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_device_by_id(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: (required)
:param int start:
:param int end:
:param str netflow_filter:
:param bool delete_hard:
:return: object
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.delete_device_by_id_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.delete_device_by_id_with_http_info(id, **kwargs) # noqa: E501
return data
def delete_device_by_id_with_http_info(self, id, **kwargs): # noqa: E501
"""delete a device # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_device_by_id_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: (required)
:param int start:
:param int end:
:param str netflow_filter:
:param bool delete_hard:
:return: object
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'start', 'end', 'netflow_filter', 'delete_hard'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_device_by_id" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `delete_device_by_id`") # noqa: E501
if 'id' in params and not re.search('\d+', params['id'] if type(params['id']) is str else str(params['id'])): # noqa: E501
raise ValueError("Invalid value for parameter `id` when calling `delete_device_by_id`, must conform to the pattern `/\d+/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
if 'start' in params:
query_params.append(('start', params['start'])) # noqa: E501
if 'end' in params:
query_params.append(('end', params['end'])) # noqa: E501
if 'netflow_filter' in params:
query_params.append(('netflowFilter', params['netflow_filter'])) # noqa: E501
if 'delete_hard' in params:
query_params.append(('deleteHard', params['delete_hard'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['LMv1'] # noqa: E501
return self.api_client.call_api(
'/device/devices/{id}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='object', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_device_datasource_instance_by_id(self, device_id, hds_id, id, **kwargs): # noqa: E501
"""delete a device instance # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_device_datasource_instance_by_id(device_id, hds_id, id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int device_id: (required)
:param int hds_id: The device-datasource ID (required)
:param int id: (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.delete_device_datasource_instance_by_id_with_http_info(device_id, hds_id, id, **kwargs) # noqa: E501
else:
(data) = self.delete_device_datasource_instance_by_id_with_http_info(device_id, hds_id, id, **kwargs) # noqa: E501
return data
def delete_device_datasource_instance_by_id_with_http_info(self, device_id, hds_id, id, **kwargs): # noqa: E501
"""delete a device instance # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_device_datasource_instance_by_id_with_http_info(device_id, hds_id, id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int device_id: (required)
:param int hds_id: The device-datasource ID (required)
:param int id: (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['device_id', 'hds_id', 'id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_device_datasource_instance_by_id" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'device_id' is set
if ('device_id' not in params or
params['device_id'] is None):
raise ValueError("Missing the required parameter `device_id` when calling `delete_device_datasource_instance_by_id`") # noqa: E501
# verify the required parameter 'hds_id' is set
if ('hds_id' not in params or
params['hds_id'] is None):
raise ValueError("Missing the required parameter `hds_id` when calling `delete_device_datasource_instance_by_id`") # noqa: E501
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `delete_device_datasource_instance_by_id`") # noqa: E501
if 'device_id' in params and not re.search('\d+', params['device_id'] if type(params['device_id']) is str else str(params['device_id'])): # noqa: E501
raise ValueError("Invalid value for parameter `device_id` when calling `delete_device_datasource_instance_by_id`, must conform to the pattern `/\d+/`") # noqa: E501
if 'hds_id' in params and not re.search('\d+', params['hds_id'] if type(params['hds_id']) is str else str(params['hds_id'])): # noqa: E501
raise ValueError("Invalid value for parameter `hds_id` when calling `delete_device_datasource_instance_by_id`, must conform to the pattern `/\d+/`") # noqa: E501
if 'id' in params and not re.search('\d+', params['id'] if type(params['id']) is str else str(params['id'])): # noqa: E501
raise ValueError("Invalid value for parameter `id` when calling `delete_device_datasource_instance_by_id`, must conform to the pattern `/\d+/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'device_id' in params:
path_params['deviceId'] = params['device_id'] # noqa: E501
if 'hds_id' in params:
path_params['hdsId'] = params['hds_id'] # noqa: E501
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['LMv1'] # noqa: E501
return self.api_client.call_api(
'/device/devices/{deviceId}/devicedatasources/{hdsId}/instances/{id}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='object', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_device_group_by_id(self, id, **kwargs): # noqa: E501
"""delete device group # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_device_group_by_id(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: (required)
:param bool delete_children:
:param bool delete_hard:
:return: object
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.delete_device_group_by_id_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.delete_device_group_by_id_with_http_info(id, **kwargs) # noqa: E501
return data
def delete_device_group_by_id_with_http_info(self, id, **kwargs): # noqa: E501
"""delete device group # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_device_group_by_id_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: (required)
:param bool delete_children:
:param bool delete_hard:
:return: object
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'delete_children', 'delete_hard'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_device_group_by_id" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `delete_device_group_by_id`") # noqa: E501
if 'id' in params and not re.search('\d+', params['id'] if type(params['id']) is str else str(params['id'])): # noqa: E501
raise ValueError("Invalid value for parameter `id` when calling `delete_device_group_by_id`, must conform to the pattern `/\d+/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
if 'delete_children' in params:
query_params.append(('deleteChildren', params['delete_children'])) # noqa: E501
if 'delete_hard' in params:
query_params.append(('deleteHard', params['delete_hard'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['LMv1'] # noqa: E501
return self.api_client.call_api(
'/device/groups/{id}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='object', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_device_group_cluster_alert_conf_by_id(self, device_group_id, id, **kwargs): # noqa: E501
"""Delete cluster alert configuration # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_device_group_cluster_alert_conf_by_id(device_group_id, id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int device_group_id: (required)
:param int id: (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.delete_device_group_cluster_alert_conf_by_id_with_http_info(device_group_id, id, **kwargs) # noqa: E501
else:
(data) = self.delete_device_group_cluster_alert_conf_by_id_with_http_info(device_group_id, id, **kwargs) # noqa: E501
return data
def delete_device_group_cluster_alert_conf_by_id_with_http_info(self, device_group_id, id, **kwargs): # noqa: E501
"""Delete cluster alert configuration # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_device_group_cluster_alert_conf_by_id_with_http_info(device_group_id, id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int device_group_id: (required)
:param int id: (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['device_group_id', 'id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_device_group_cluster_alert_conf_by_id" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'device_group_id' is set
if ('device_group_id' not in params or
params['device_group_id'] is None):
raise ValueError("Missing the required parameter `device_group_id` when calling `delete_device_group_cluster_alert_conf_by_id`") # noqa: E501
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `delete_device_group_cluster_alert_conf_by_id`") # noqa: E501
if 'device_group_id' in params and not re.search('\d+', params['device_group_id'] if type(params['device_group_id']) is str else str(params['device_group_id'])): # noqa: E501
raise ValueError("Invalid value for parameter `device_group_id` when calling `delete_device_group_cluster_alert_conf_by_id`, must conform to the pattern `/\d+/`") # noqa: E501
if 'id' in params and not re.search('\d+', params['id'] if type(params['id']) is str else str(params['id'])): # noqa: E501
raise ValueError("Invalid value for parameter `id` when calling `delete_device_group_cluster_alert_conf_by_id`, must conform to the pattern `/\d+/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'device_group_id' in params:
path_params['deviceGroupId'] = params['device_group_id'] # noqa: E501
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['LMv1'] # noqa: E501
return self.api_client.call_api(
'/device/groups/{deviceGroupId}/clusterAlertConf/{id}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='object', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_device_group_property_by_name(self, gid, name, **kwargs): # noqa: E501
"""delete device group property # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_device_group_property_by_name(gid, name, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int gid: group ID (required)
:param str name: (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.delete_device_group_property_by_name_with_http_info(gid, name, **kwargs) # noqa: E501
else:
(data) = self.delete_device_group_property_by_name_with_http_info(gid, name, **kwargs) # noqa: E501
return data
def delete_device_group_property_by_name_with_http_info(self, gid, name, **kwargs): # noqa: E501
"""delete device group property # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_device_group_property_by_name_with_http_info(gid, name, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int gid: group ID (required)
:param str name: (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['gid', 'name'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_device_group_property_by_name" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'gid' is set
if ('gid' not in params or
params['gid'] is None):
raise ValueError("Missing the required parameter `gid` when calling `delete_device_group_property_by_name`") # noqa: E501
# verify the required parameter 'name' is set
if ('name' not in params or
params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `delete_device_group_property_by_name`") # noqa: E501
if 'gid' in params and not re.search('\d+', params['gid'] if type(params['gid']) is str else str(params['gid'])): # noqa: E501
raise ValueError("Invalid value for parameter `gid` when calling `delete_device_group_property_by_name`, must conform to the pattern `/\d+/`") # noqa: E501
if 'name' in params and not re.search('[^\/]+', params['name'] if type(params['name']) is str else str(params['name'])): # noqa: E501
raise ValueError("Invalid value for parameter `name` when calling `delete_device_group_property_by_name`, must conform to the pattern `/[^\/]+/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'gid' in params:
path_params['gid'] = params['gid'] # noqa: E501
if 'name' in params:
path_params['name'] = params['name'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['LMv1'] # noqa: E501
return self.api_client.call_api(
'/device/groups/{gid}/properties/{name}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='object', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_device_property_by_name(self, device_id, name, **kwargs): # noqa: E501
"""delete device property # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_device_property_by_name(device_id, name, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int device_id: (required)
:param str name: (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.delete_device_property_by_name_with_http_info(device_id, name, **kwargs) # noqa: E501
else:
(data) = self.delete_device_property_by_name_with_http_info(device_id, name, **kwargs) # noqa: E501
return data
def delete_device_property_by_name_with_http_info(self, device_id, name, **kwargs): # noqa: E501
"""delete device property # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_device_property_by_name_with_http_info(device_id, name, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int device_id: (required)
:param str name: (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['device_id', 'name'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_device_property_by_name" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'device_id' is set
if ('device_id' not in params or
params['device_id'] is None):
raise ValueError("Missing the required parameter `device_id` when calling `delete_device_property_by_name`") # noqa: E501
# verify the required parameter 'name' is set
if ('name' not in params or
params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `delete_device_property_by_name`") # noqa: E501
if 'device_id' in params and not re.search('\d+', params['device_id'] if type(params['device_id']) is str else str(params['device_id'])): # noqa: E501
raise ValueError("Invalid value for parameter `device_id` when calling `delete_device_property_by_name`, must conform to the pattern `/\d+/`") # noqa: E501
if 'name' in params and not re.search('[^\/]+', params['name'] if type(params['name']) is str else str(params['name'])): # noqa: E501
raise ValueError("Invalid value for parameter `name` when calling `delete_device_property_by_name`, must conform to the pattern `/[^\/]+/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'device_id' in params:
path_params['deviceId'] = params['device_id'] # noqa: E501
if 'name' in params:
path_params['name'] = params['name'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['LMv1'] # noqa: E501
return self.api_client.call_api(
'/device/devices/{deviceId}/properties/{name}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='object', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_escalation_chain_by_id(self, id, **kwargs): # noqa: E501
"""delete escalation chain # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_escalation_chain_by_id(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.delete_escalation_chain_by_id_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.delete_escalation_chain_by_id_with_http_info(id, **kwargs) # noqa: E501
return data
def delete_escalation_chain_by_id_with_http_info(self, id, **kwargs): # noqa: E501
"""delete escalation chain # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_escalation_chain_by_id_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_escalation_chain_by_id" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `delete_escalation_chain_by_id`") # noqa: E501
if 'id' in params and not re.search('\d+', params['id'] if type(params['id']) is str else str(params['id'])): # noqa: E501
raise ValueError("Invalid value for parameter `id` when calling `delete_escalation_chain_by_id`, must conform to the pattern `/\d+/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['LMv1'] # noqa: E501
return self.api_client.call_api(
'/setting/alert/chains/{id}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='object', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_netscan_by_id(self, id, **kwargs): # noqa: E501
"""delete a netscan # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_netscan_by_id(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.delete_netscan_by_id_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.delete_netscan_by_id_with_http_info(id, **kwargs) # noqa: E501
return data
def delete_netscan_by_id_with_http_info(self, id, **kwargs): # noqa: E501
"""delete a netscan # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_netscan_by_id_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_netscan_by_id" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `delete_netscan_by_id`") # noqa: E501
if 'id' in params and not re.search('\d+', params['id'] if type(params['id']) is str else str(params['id'])): # noqa: E501
raise ValueError("Invalid value for parameter `id` when calling `delete_netscan_by_id`, must conform to the pattern `/\d+/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['LMv1'] # noqa: E501
return self.api_client.call_api(
'/setting/netscans/{id}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='object', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_ops_note_by_id(self, id, **kwargs): # noqa: E501
"""delete opsnote # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_ops_note_by_id(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.delete_ops_note_by_id_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.delete_ops_note_by_id_with_http_info(id, **kwargs) # noqa: E501
return data
def delete_ops_note_by_id_with_http_info(self, id, **kwargs): # noqa: E501
"""delete opsnote # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_ops_note_by_id_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_ops_note_by_id" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `delete_ops_note_by_id`") # noqa: E501
if 'id' in params and not re.search('[^\/]+', params['id'] if type(params['id']) is str else str(params['id'])): # noqa: E501
raise ValueError("Invalid value for parameter `id` when calling `delete_ops_note_by_id`, must conform to the pattern `/[^\/]+/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['LMv1'] # noqa: E501
return self.api_client.call_api(
'/setting/opsnotes/{id}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='object', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_recipient_group_by_id(self, id, **kwargs): # noqa: E501
"""delete recipient group # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_recipient_group_by_id(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.delete_recipient_group_by_id_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.delete_recipient_group_by_id_with_http_info(id, **kwargs) # noqa: E501
return data
def delete_recipient_group_by_id_with_http_info(self, id, **kwargs): # noqa: E501
"""delete recipient group # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_recipient_group_by_id_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_recipient_group_by_id" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `delete_recipient_group_by_id`") # noqa: E501
if 'id' in params and not re.search('\d+', params['id'] if type(params['id']) is str else str(params['id'])): # noqa: E501
raise ValueError("Invalid value for parameter `id` when calling `delete_recipient_group_by_id`, must conform to the pattern `/\d+/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['LMv1'] # noqa: E501
return self.api_client.call_api(
'/setting/recipientgroups/{id}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='object', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_report_by_id(self, id, **kwargs): # noqa: E501
"""delete report # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_report_by_id(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.delete_report_by_id_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.delete_report_by_id_with_http_info(id, **kwargs) # noqa: E501
return data
def delete_report_by_id_with_http_info(self, id, **kwargs): # noqa: E501
"""delete report # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_report_by_id_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_report_by_id" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `delete_report_by_id`") # noqa: E501
if 'id' in params and not re.search('\d+', params['id'] if type(params['id']) is str else str(params['id'])): # noqa: E501
raise ValueError("Invalid value for parameter `id` when calling `delete_report_by_id`, must conform to the pattern `/\d+/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['LMv1'] # noqa: E501
return self.api_client.call_api(
'/report/reports/{id}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='object', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_report_group_by_id(self, id, **kwargs): # noqa: E501
"""delete report group # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_report_group_by_id(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.delete_report_group_by_id_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.delete_report_group_by_id_with_http_info(id, **kwargs) # noqa: E501
return data
def delete_report_group_by_id_with_http_info(self, id, **kwargs): # noqa: E501
"""delete report group # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_report_group_by_id_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_report_group_by_id" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `delete_report_group_by_id`") # noqa: E501
if 'id' in params and not re.search('\d+', params['id'] if type(params['id']) is str else str(params['id'])): # noqa: E501
raise ValueError("Invalid value for parameter `id` when calling `delete_report_group_by_id`, must conform to the pattern `/\d+/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['LMv1'] # noqa: E501
return self.api_client.call_api(
'/report/groups/{id}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='object', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_role_by_id(self, id, **kwargs): # noqa: E501
"""delete role # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_role_by_id(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.delete_role_by_id_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.delete_role_by_id_with_http_info(id, **kwargs) # noqa: E501
return data
def delete_role_by_id_with_http_info(self, id, **kwargs): # noqa: E501
"""delete role # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_role_by_id_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_role_by_id" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `delete_role_by_id`") # noqa: E501
if 'id' in params and not re.search('\d+', params['id'] if type(params['id']) is str else str(params['id'])): # noqa: E501
raise ValueError("Invalid value for parameter `id` when calling `delete_role_by_id`, must conform to the pattern `/\d+/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['LMv1'] # noqa: E501
return self.api_client.call_api(
'/setting/roles/{id}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='object', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_sdt_by_id(self, id, **kwargs): # noqa: E501
"""delete SDT # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_sdt_by_id(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.delete_sdt_by_id_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.delete_sdt_by_id_with_http_info(id, **kwargs) # noqa: E501
return data
def delete_sdt_by_id_with_http_info(self, id, **kwargs): # noqa: E501
"""delete SDT # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_sdt_by_id_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_sdt_by_id" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `delete_sdt_by_id`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['LMv1'] # noqa: E501
return self.api_client.call_api(
'/sdt/sdts/{id}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='object', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_website_by_id(self, id, **kwargs): # noqa: E501
"""delete website # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_website_by_id(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.delete_website_by_id_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.delete_website_by_id_with_http_info(id, **kwargs) # noqa: E501
return data
def delete_website_by_id_with_http_info(self, id, **kwargs): # noqa: E501
"""delete website # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_website_by_id_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_website_by_id" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `delete_website_by_id`") # noqa: E501
if 'id' in params and not re.search('\d+', params['id'] if type(params['id']) is str else str(params['id'])): # noqa: E501
raise ValueError("Invalid value for parameter `id` when calling `delete_website_by_id`, must conform to the pattern `/\d+/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['LMv1'] # noqa: E501
return self.api_client.call_api(
'/website/websites/{id}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='object', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_website_group_by_id(self, id, **kwargs): # noqa: E501
"""delete website group # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_website_group_by_id(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: (required)
:param int delete_children:
:return: object
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.delete_website_group_by_id_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.delete_website_group_by_id_with_http_info(id, **kwargs) # noqa: E501
return data
def delete_website_group_by_id_with_http_info(self, id, **kwargs): # noqa: E501
"""delete website group # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_website_group_by_id_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: (required)
:param int delete_children:
:return: object
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'delete_children'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_website_group_by_id" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `delete_website_group_by_id`") # noqa: E501
if 'id' in params and not re.search('\d+', params['id'] if type(params['id']) is str else str(params['id'])): # noqa: E501
raise ValueError("Invalid value for parameter `id` when calling `delete_website_group_by_id`, must conform to the pattern `/\d+/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
if 'delete_children' in params:
query_params.append(('deleteChildren', params['delete_children'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['LMv1'] # noqa: E501
return self.api_client.call_api(
'/website/groups/{id}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='object', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_widget_by_id(self, id, **kwargs): # noqa: E501
"""delete widget # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_widget_by_id(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.delete_widget_by_id_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.delete_widget_by_id_with_http_info(id, **kwargs) # noqa: E501
return data
def delete_widget_by_id_with_http_info(self, id, **kwargs): # noqa: E501
"""delete widget # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_widget_by_id_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_widget_by_id" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `delete_widget_by_id`") # noqa: E501
if 'id' in params and not re.search('\d+', params['id'] if type(params['id']) is str else str(params['id'])): # noqa: E501
raise ValueError("Invalid value for parameter `id` when calling `delete_widget_by_id`, must conform to the pattern `/\d+/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['LMv1'] # noqa: E501
return self.api_client.call_api(
'/dashboard/widgets/{id}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='object', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def execute_debug_command(self, **kwargs): # noqa: E501
"""Execute a Collector debug command # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.execute_debug_command(async_req=True)
>>> result = thread.get()
:param async_req bool
:param Debug body:
:param int collector_id:
:return: Debug
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.execute_debug_command_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.execute_debug_command_with_http_info(**kwargs) # noqa: E501
return data
def execute_debug_command_with_http_info(self, **kwargs): # noqa: E501
"""Execute a Collector debug command # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.execute_debug_command_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param Debug body:
:param int collector_id:
:return: Debug
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body', 'collector_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method execute_debug_command" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'collector_id' in params:
query_params.append(('collectorId', params['collector_id'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['LMv1'] # noqa: E501
return self.api_client.call_api(
'/debug', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Debug', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def generate_report_by_id(self, id, **kwargs): # noqa: E501
"""run a report # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.generate_report_by_id(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: (required)
:param GenerateReportRequest body:
:return: GenerateReportResult
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.generate_report_by_id_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.generate_report_by_id_with_http_info(id, **kwargs) # noqa: E501
return data
def generate_report_by_id_with_http_info(self, id, **kwargs): # noqa: E501
"""run a report # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.generate_report_by_id_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: (required)
:param GenerateReportRequest body:
:return: GenerateReportResult
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method generate_report_by_id" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `generate_report_by_id`") # noqa: E501
if 'id' in params and not re.search('\d+', params['id'] if type(params['id']) is str else str(params['id'])): # noqa: E501
raise ValueError("Invalid value for parameter `id` when calling `generate_report_by_id`, must conform to the pattern `/\d+/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['LMv1'] # noqa: E501
return self.api_client.call_api(
'/report/reports/{id}/executions', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='GenerateReportResult', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_admin_by_id(self, id, **kwargs): # noqa: E501
"""get user # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_admin_by_id(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: (required)
:param str fields:
:return: Admin
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_admin_by_id_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.get_admin_by_id_with_http_info(id, **kwargs) # noqa: E501
return data
def get_admin_by_id_with_http_info(self, id, **kwargs): # noqa: E501
"""get user # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_admin_by_id_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: (required)
:param str fields:
:return: Admin
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'fields'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_admin_by_id" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `get_admin_by_id`") # noqa: E501
if 'id' in params and not re.search('\d+', params['id'] if type(params['id']) is str else str(params['id'])): # noqa: E501
raise ValueError("Invalid value for parameter `id` when calling `get_admin_by_id`, must conform to the pattern `/\d+/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['LMv1'] # noqa: E501
return self.api_client.call_api(
'/setting/admins/{id}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Admin', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_admin_list(self, **kwargs): # noqa: E501
"""get user list # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_admin_list(async_req=True)
>>> result = thread.get()
:param async_req bool
:param str fields:
:param int size:
:param int offset:
:param str filter:
:return: AdminPaginationResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_admin_list_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.get_admin_list_with_http_info(**kwargs) # noqa: E501
return data
def get_admin_list_with_http_info(self, **kwargs): # noqa: E501
"""get user list # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_admin_list_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param str fields:
:param int size:
:param int offset:
:param str filter:
:return: AdminPaginationResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['fields', 'size', 'offset', 'filter'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_admin_list" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
if 'size' in params:
query_params.append(('size', params['size'])) # noqa: E501
if 'offset' in params:
query_params.append(('offset', params['offset'])) # noqa: E501
if 'filter' in params:
query_params.append(('filter', params['filter'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['LMv1'] # noqa: E501
return self.api_client.call_api(
'/setting/admins', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='AdminPaginationResponse', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_alert_by_id(self, id, **kwargs): # noqa: E501
"""get alert # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_alert_by_id(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: (required)
:param bool need_message:
:param str custom_columns:
:param str fields:
:return: Alert
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_alert_by_id_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.get_alert_by_id_with_http_info(id, **kwargs) # noqa: E501
return data
def get_alert_by_id_with_http_info(self, id, **kwargs): # noqa: E501
"""get alert # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_alert_by_id_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: (required)
:param bool need_message:
:param str custom_columns:
:param str fields:
:return: Alert
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'need_message', 'custom_columns', 'fields'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_alert_by_id" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `get_alert_by_id`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
if 'need_message' in params:
query_params.append(('needMessage', params['need_message'])) # noqa: E501
if 'custom_columns' in params:
query_params.append(('customColumns', params['custom_columns'])) # noqa: E501
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['LMv1'] # noqa: E501
return self.api_client.call_api(
'/alert/alerts/{id}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Alert', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_alert_list(self, **kwargs): # noqa: E501
"""get alert list # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_alert_list(async_req=True)
>>> result = thread.get()
:param async_req bool
:param str custom_columns:
:param str fields:
:param int size:
:param int offset:
:param str filter:
:return: AlertPaginationResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_alert_list_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.get_alert_list_with_http_info(**kwargs) # noqa: E501
return data
def get_alert_list_with_http_info(self, **kwargs): # noqa: E501
"""get alert list # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_alert_list_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param str custom_columns:
:param str fields:
:param int size:
:param int offset:
:param str filter:
:return: AlertPaginationResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['custom_columns', 'fields', 'size', 'offset', 'filter'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_alert_list" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'custom_columns' in params:
query_params.append(('customColumns', params['custom_columns'])) # noqa: E501
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
if 'size' in params:
query_params.append(('size', params['size'])) # noqa: E501
if 'offset' in params:
query_params.append(('offset', params['offset'])) # noqa: E501
if 'filter' in params:
query_params.append(('filter', params['filter'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['LMv1'] # noqa: E501
return self.api_client.call_api(
'/alert/alerts', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='AlertPaginationResponse', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_alert_list_by_device_group_id(self, id, **kwargs): # noqa: E501
"""get device group alerts # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_alert_list_by_device_group_id(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: (required)
:param bool need_message:
:param str custom_columns:
:param str fields:
:param int size:
:param int offset:
:param str filter:
:return: AlertPaginationResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_alert_list_by_device_group_id_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.get_alert_list_by_device_group_id_with_http_info(id, **kwargs) # noqa: E501
return data
def get_alert_list_by_device_group_id_with_http_info(self, id, **kwargs): # noqa: E501
"""get device group alerts # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_alert_list_by_device_group_id_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: (required)
:param bool need_message:
:param str custom_columns:
:param str fields:
:param int size:
:param int offset:
:param str filter:
:return: AlertPaginationResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'need_message', 'custom_columns', 'fields', 'size', 'offset', 'filter'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_alert_list_by_device_group_id" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `get_alert_list_by_device_group_id`") # noqa: E501
if 'id' in params and not re.search('\d+', params['id'] if type(params['id']) is str else str(params['id'])): # noqa: E501
raise ValueError("Invalid value for parameter `id` when calling `get_alert_list_by_device_group_id`, must conform to the pattern `/\d+/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
if 'need_message' in params:
query_params.append(('needMessage', params['need_message'])) # noqa: E501
if 'custom_columns' in params:
query_params.append(('customColumns', params['custom_columns'])) # noqa: E501
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
if 'size' in params:
query_params.append(('size', params['size'])) # noqa: E501
if 'offset' in params:
query_params.append(('offset', params['offset'])) # noqa: E501
if 'filter' in params:
query_params.append(('filter', params['filter'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['LMv1'] # noqa: E501
return self.api_client.call_api(
'/device/groups/{id}/alerts', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='AlertPaginationResponse', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_alert_list_by_device_id(self, id, **kwargs): # noqa: E501
"""get alerts # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_alert_list_by_device_id(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: (required)
:param int start:
:param int end:
:param str netflow_filter:
:param bool need_message:
:param str custom_columns:
:param str bound:
:param str fields:
:param int size:
:param int offset:
:param str filter:
:return: AlertPaginationResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_alert_list_by_device_id_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.get_alert_list_by_device_id_with_http_info(id, **kwargs) # noqa: E501
return data
def get_alert_list_by_device_id_with_http_info(self, id, **kwargs): # noqa: E501
"""get alerts # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_alert_list_by_device_id_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: (required)
:param int start:
:param int end:
:param str netflow_filter:
:param bool need_message:
:param str custom_columns:
:param str bound:
:param str fields:
:param int size:
:param int offset:
:param str filter:
:return: AlertPaginationResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'start', 'end', 'netflow_filter', 'need_message', 'custom_columns', 'bound', 'fields', 'size', 'offset', 'filter'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_alert_list_by_device_id" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `get_alert_list_by_device_id`") # noqa: E501
if 'id' in params and not re.search('\d+', params['id'] if type(params['id']) is str else str(params['id'])): # noqa: E501
raise ValueError("Invalid value for parameter `id` when calling `get_alert_list_by_device_id`, must conform to the pattern `/\d+/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
if 'start' in params:
query_params.append(('start', params['start'])) # noqa: E501
if 'end' in params:
query_params.append(('end', params['end'])) # noqa: E501
if 'netflow_filter' in params:
query_params.append(('netflowFilter', params['netflow_filter'])) # noqa: E501
if 'need_message' in params:
query_params.append(('needMessage', params['need_message'])) # noqa: E501
if 'custom_columns' in params:
query_params.append(('customColumns', params['custom_columns'])) # noqa: E501
if 'bound' in params:
query_params.append(('bound', params['bound'])) # noqa: E501
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
if 'size' in params:
query_params.append(('size', params['size'])) # noqa: E501
if 'offset' in params:
query_params.append(('offset', params['offset'])) # noqa: E501
if 'filter' in params:
query_params.append(('filter', params['filter'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['LMv1'] # noqa: E501
return self.api_client.call_api(
'/device/devices/{id}/alerts', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='AlertPaginationResponse', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_alert_rule_by_id(self, id, **kwargs): # noqa: E501
"""get alert rule by id # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_alert_rule_by_id(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: (required)
:param str fields:
:return: AlertRule
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_alert_rule_by_id_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.get_alert_rule_by_id_with_http_info(id, **kwargs) # noqa: E501
return data
def get_alert_rule_by_id_with_http_info(self, id, **kwargs): # noqa: E501
"""get alert rule by id # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_alert_rule_by_id_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: (required)
:param str fields:
:return: AlertRule
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'fields'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_alert_rule_by_id" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `get_alert_rule_by_id`") # noqa: E501
if 'id' in params and not re.search('\d+', params['id'] if type(params['id']) is str else str(params['id'])): # noqa: E501
raise ValueError("Invalid value for parameter `id` when calling `get_alert_rule_by_id`, must conform to the pattern `/\d+/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['LMv1'] # noqa: E501
return self.api_client.call_api(
'/setting/alert/rules/{id}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='AlertRule', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_alert_rule_list(self, **kwargs): # noqa: E501
"""get alert rule list # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_alert_rule_list(async_req=True)
>>> result = thread.get()
:param async_req bool
:param str fields:
:param int size:
:param int offset:
:param str filter:
:return: AlertRulePaginationResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_alert_rule_list_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.get_alert_rule_list_with_http_info(**kwargs) # noqa: E501
return data
def get_alert_rule_list_with_http_info(self, **kwargs): # noqa: E501
"""get alert rule list # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_alert_rule_list_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param str fields:
:param int size:
:param int offset:
:param str filter:
:return: AlertRulePaginationResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['fields', 'size', 'offset', 'filter'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_alert_rule_list" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
if 'size' in params:
query_params.append(('size', params['size'])) # noqa: E501
if 'offset' in params:
query_params.append(('offset', params['offset'])) # noqa: E501
if 'filter' in params:
query_params.append(('filter', params['filter'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['LMv1'] # noqa: E501
return self.api_client.call_api(
'/setting/alert/rules', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='AlertRulePaginationResponse', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_all_sdt_list_by_device_id(self, id, **kwargs): # noqa: E501
"""get SDTs for a device # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_all_sdt_list_by_device_id(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: (required)
:param int start:
:param int end:
:param str netflow_filter:
:param str fields:
:param int size:
:param int offset:
:param str filter:
:return: SDTPaginationResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_all_sdt_list_by_device_id_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.get_all_sdt_list_by_device_id_with_http_info(id, **kwargs) # noqa: E501
return data
def get_all_sdt_list_by_device_id_with_http_info(self, id, **kwargs): # noqa: E501
"""get SDTs for a device # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_all_sdt_list_by_device_id_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: (required)
:param int start:
:param int end:
:param str netflow_filter:
:param str fields:
:param int size:
:param int offset:
:param str filter:
:return: SDTPaginationResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'start', 'end', 'netflow_filter', 'fields', 'size', 'offset', 'filter'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_all_sdt_list_by_device_id" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `get_all_sdt_list_by_device_id`") # noqa: E501
if 'id' in params and not re.search('\d+', params['id'] if type(params['id']) is str else str(params['id'])): # noqa: E501
raise ValueError("Invalid value for parameter `id` when calling `get_all_sdt_list_by_device_id`, must conform to the pattern `/\d+/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
if 'start' in params:
query_params.append(('start', params['start'])) # noqa: E501
if 'end' in params:
query_params.append(('end', params['end'])) # noqa: E501
if 'netflow_filter' in params:
query_params.append(('netflowFilter', params['netflow_filter'])) # noqa: E501
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
if 'size' in params:
query_params.append(('size', params['size'])) # noqa: E501
if 'offset' in params:
query_params.append(('offset', params['offset'])) # noqa: E501
if 'filter' in params:
query_params.append(('filter', params['filter'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['LMv1'] # noqa: E501
return self.api_client.call_api(
'/device/devices/{id}/sdts', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='SDTPaginationResponse', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_all_sdt_list_by_website_group_id(self, id, **kwargs): # noqa: E501
"""get a list of SDTs for a website group # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_all_sdt_list_by_website_group_id(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: (required)
:param str fields:
:param int size:
:param int offset:
:param str filter:
:return: SDTPaginationResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_all_sdt_list_by_website_group_id_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.get_all_sdt_list_by_website_group_id_with_http_info(id, **kwargs) # noqa: E501
return data
def get_all_sdt_list_by_website_group_id_with_http_info(self, id, **kwargs): # noqa: E501
"""get a list of SDTs for a website group # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_all_sdt_list_by_website_group_id_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: (required)
:param str fields:
:param int size:
:param int offset:
:param str filter:
:return: SDTPaginationResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'fields', 'size', 'offset', 'filter'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_all_sdt_list_by_website_group_id" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `get_all_sdt_list_by_website_group_id`") # noqa: E501
if 'id' in params and not re.search('\d+', params['id'] if type(params['id']) is str else str(params['id'])): # noqa: E501
raise ValueError("Invalid value for parameter `id` when calling `get_all_sdt_list_by_website_group_id`, must conform to the pattern `/\d+/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
if 'size' in params:
query_params.append(('size', params['size'])) # noqa: E501
if 'offset' in params:
query_params.append(('offset', params['offset'])) # noqa: E501
if 'filter' in params:
query_params.append(('filter', params['filter'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['LMv1'] # noqa: E501
return self.api_client.call_api(
'/website/groups/{id}/sdts', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='SDTPaginationResponse', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_api_token_list(self, **kwargs): # noqa: E501
"""get a list of api tokens across users # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_api_token_list(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: ApiTokenPaginationResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_api_token_list_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.get_api_token_list_with_http_info(**kwargs) # noqa: E501
return data
def get_api_token_list_with_http_info(self, **kwargs): # noqa: E501
"""get a list of api tokens across users # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_api_token_list_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: ApiTokenPaginationResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = [] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_api_token_list" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['LMv1'] # noqa: E501
return self.api_client.call_api(
'/setting/admins/apitokens', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ApiTokenPaginationResponse', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_api_token_list_by_admin_id(self, admin_id, **kwargs): # noqa: E501
"""get api tokens for a user # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_api_token_list_by_admin_id(admin_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int admin_id: (required)
:param str fields:
:param int size:
:param int offset:
:param str filter:
:return: ApiTokenPaginationResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_api_token_list_by_admin_id_with_http_info(admin_id, **kwargs) # noqa: E501
else:
(data) = self.get_api_token_list_by_admin_id_with_http_info(admin_id, **kwargs) # noqa: E501
return data
def get_api_token_list_by_admin_id_with_http_info(self, admin_id, **kwargs): # noqa: E501
"""get api tokens for a user # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_api_token_list_by_admin_id_with_http_info(admin_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int admin_id: (required)
:param str fields:
:param int size:
:param int offset:
:param str filter:
:return: ApiTokenPaginationResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['admin_id', 'fields', 'size', 'offset', 'filter'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_api_token_list_by_admin_id" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'admin_id' is set
if ('admin_id' not in params or
params['admin_id'] is None):
raise ValueError("Missing the required parameter `admin_id` when calling `get_api_token_list_by_admin_id`") # noqa: E501
if 'admin_id' in params and not re.search('\d+', params['admin_id'] if type(params['admin_id']) is str else str(params['admin_id'])): # noqa: E501
raise ValueError("Invalid value for parameter `admin_id` when calling `get_api_token_list_by_admin_id`, must conform to the pattern `/\d+/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'admin_id' in params:
path_params['adminId'] = params['admin_id'] # noqa: E501
query_params = []
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
if 'size' in params:
query_params.append(('size', params['size'])) # noqa: E501
if 'offset' in params:
query_params.append(('offset', params['offset'])) # noqa: E501
if 'filter' in params:
query_params.append(('filter', params['filter'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['LMv1'] # noqa: E501
return self.api_client.call_api(
'/setting/admins/{adminId}/apitokens', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ApiTokenPaginationResponse', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_associated_device_list_by_data_source_id(self, id, **kwargs): # noqa: E501
"""get devices associated with a datasource # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_associated_device_list_by_data_source_id(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: (required)
:param str fields:
:param int size:
:param int offset:
:param str filter:
:return: DeviceDataSourceAssociatedPaginationResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_associated_device_list_by_data_source_id_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.get_associated_device_list_by_data_source_id_with_http_info(id, **kwargs) # noqa: E501
return data
def get_associated_device_list_by_data_source_id_with_http_info(self, id, **kwargs): # noqa: E501
"""get devices associated with a datasource # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_associated_device_list_by_data_source_id_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: (required)
:param str fields:
:param int size:
:param int offset:
:param str filter:
:return: DeviceDataSourceAssociatedPaginationResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'fields', 'size', 'offset', 'filter'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_associated_device_list_by_data_source_id" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `get_associated_device_list_by_data_source_id`") # noqa: E501
if 'id' in params and not re.search('\d+', params['id'] if type(params['id']) is str else str(params['id'])): # noqa: E501
raise ValueError("Invalid value for parameter `id` when calling `get_associated_device_list_by_data_source_id`, must conform to the pattern `/\d+/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
if 'size' in params:
query_params.append(('size', params['size'])) # noqa: E501
if 'offset' in params:
query_params.append(('offset', params['offset'])) # noqa: E501
if 'filter' in params:
query_params.append(('filter', params['filter'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['LMv1'] # noqa: E501
return self.api_client.call_api(
'/setting/datasources/{id}/devices', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='DeviceDataSourceAssociatedPaginationResponse', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_audit_log_by_id(self, id, **kwargs): # noqa: E501
"""Get audit log by id # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_audit_log_by_id(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: (required)
:return: AuditLog
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_audit_log_by_id_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.get_audit_log_by_id_with_http_info(id, **kwargs) # noqa: E501
return data
def get_audit_log_by_id_with_http_info(self, id, **kwargs): # noqa: E501
"""Get audit log by id # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_audit_log_by_id_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: (required)
:return: AuditLog
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_audit_log_by_id" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `get_audit_log_by_id`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['LMv1'] # noqa: E501
return self.api_client.call_api(
'/setting/accesslogs/{id}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='AuditLog', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_audit_log_list(self, **kwargs): # noqa: E501
"""Get audit logs # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_audit_log_list(async_req=True)
>>> result = thread.get()
:param async_req bool
:param str format:
:param str fields:
:param int size:
:param int offset:
:param str filter:
:return: AccessLogPaginationResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_audit_log_list_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.get_audit_log_list_with_http_info(**kwargs) # noqa: E501
return data
def get_audit_log_list_with_http_info(self, **kwargs): # noqa: E501
"""Get audit logs # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_audit_log_list_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param str format:
:param str fields:
:param int size:
:param int offset:
:param str filter:
:return: AccessLogPaginationResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['format', 'fields', 'size', 'offset', 'filter'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_audit_log_list" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'format' in params:
query_params.append(('format', params['format'])) # noqa: E501
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
if 'size' in params:
query_params.append(('size', params['size'])) # noqa: E501
if 'offset' in params:
query_params.append(('offset', params['offset'])) # noqa: E501
if 'filter' in params:
query_params.append(('filter', params['filter'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['LMv1'] # noqa: E501
return self.api_client.call_api(
'/setting/accesslogs', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='AccessLogPaginationResponse', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_aws_external_id(self, **kwargs): # noqa: E501
"""Get AWS external id # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_aws_external_id(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: AwsExternalId
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_aws_external_id_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.get_aws_external_id_with_http_info(**kwargs) # noqa: E501
return data
def get_aws_external_id_with_http_info(self, **kwargs): # noqa: E501
"""Get AWS external id # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_aws_external_id_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: AwsExternalId
If the method is called asynchronously,
returns the request thread.
"""
all_params = [] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_aws_external_id" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['LMv1'] # noqa: E501
return self.api_client.call_api(
'/aws/externalId', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='AwsExternalId', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_collector_by_id(self, id, **kwargs): # noqa: E501
"""get collector # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_collector_by_id(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: (required)
:param str fields:
:return: Collector
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_collector_by_id_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.get_collector_by_id_with_http_info(id, **kwargs) # noqa: E501
return data
def get_collector_by_id_with_http_info(self, id, **kwargs): # noqa: E501
"""get collector # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_collector_by_id_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: (required)
:param str fields:
:return: Collector
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'fields'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_collector_by_id" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `get_collector_by_id`") # noqa: E501
if 'id' in params and not re.search('\d+', params['id'] if type(params['id']) is str else str(params['id'])): # noqa: E501
raise ValueError("Invalid value for parameter `id` when calling `get_collector_by_id`, must conform to the pattern `/\d+/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['LMv1'] # noqa: E501
return self.api_client.call_api(
'/setting/collector/collectors/{id}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Collector', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_collector_group_by_id(self, id, **kwargs): # noqa: E501
"""get collector group # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_collector_group_by_id(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: (required)
:param str fields:
:return: CollectorGroup
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_collector_group_by_id_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.get_collector_group_by_id_with_http_info(id, **kwargs) # noqa: E501
return data
def get_collector_group_by_id_with_http_info(self, id, **kwargs): # noqa: E501
"""get collector group # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_collector_group_by_id_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: (required)
:param str fields:
:return: CollectorGroup
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'fields'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_collector_group_by_id" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `get_collector_group_by_id`") # noqa: E501
if 'id' in params and not re.search('\d+', params['id'] if type(params['id']) is str else str(params['id'])): # noqa: E501
raise ValueError("Invalid value for parameter `id` when calling `get_collector_group_by_id`, must conform to the pattern `/\d+/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['LMv1'] # noqa: E501
return self.api_client.call_api(
'/setting/collector/groups/{id}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='CollectorGroup', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_collector_group_list(self, **kwargs): # noqa: E501
"""get collector group list # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_collector_group_list(async_req=True)
>>> result = thread.get()
:param async_req bool
:param str fields:
:param int size:
:param int offset:
:param str filter:
:return: CollectorGroupPaginationResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_collector_group_list_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.get_collector_group_list_with_http_info(**kwargs) # noqa: E501
return data
def get_collector_group_list_with_http_info(self, **kwargs): # noqa: E501
"""get collector group list # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_collector_group_list_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param str fields:
:param int size:
:param int offset:
:param str filter:
:return: CollectorGroupPaginationResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['fields', 'size', 'offset', 'filter'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_collector_group_list" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
if 'size' in params:
query_params.append(('size', params['size'])) # noqa: E501
if 'offset' in params:
query_params.append(('offset', params['offset'])) # noqa: E501
if 'filter' in params:
query_params.append(('filter', params['filter'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['LMv1'] # noqa: E501
return self.api_client.call_api(
'/setting/collector/groups', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='CollectorGroupPaginationResponse', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_collector_installer(self, collector_id, os_and_arch, **kwargs): # noqa: E501
"""get collector installer # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_collector_installer(collector_id, os_and_arch, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int collector_id: (required)
:param str os_and_arch: (required)
:param int collector_version: The version of the installer you'd like to download. This defaults to the latest GD Collector, unless useEA is true
:param str token:
:param bool monitor_others:
:param str collector_size: The size of the Collector you'd like to install. Options are nano, small (requires 2GB memory), medium (requires 4GB memory), large (requires 8GB memory). Requires collector version 22.180 or higher. Defaults to small
:param bool use_ea: If true, the latest EA Collector version will be used. Defaults to false
:return: file
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_collector_installer_with_http_info(collector_id, os_and_arch, **kwargs) # noqa: E501
else:
(data) = self.get_collector_installer_with_http_info(collector_id, os_and_arch, **kwargs) # noqa: E501
return data
def get_collector_installer_with_http_info(self, collector_id, os_and_arch, **kwargs): # noqa: E501
"""get collector installer # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_collector_installer_with_http_info(collector_id, os_and_arch, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int collector_id: (required)
:param str os_and_arch: (required)
:param int collector_version: The version of the installer you'd like to download. This defaults to the latest GD Collector, unless useEA is true
:param str token:
:param bool monitor_others:
:param str collector_size: The size of the Collector you'd like to install. Options are nano, small (requires 2GB memory), medium (requires 4GB memory), large (requires 8GB memory). Requires collector version 22.180 or higher. Defaults to small
:param bool use_ea: If true, the latest EA Collector version will be used. Defaults to false
:return: file
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['collector_id', 'os_and_arch', 'collector_version', 'token', 'monitor_others', 'collector_size', 'use_ea'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_collector_installer" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'collector_id' is set
if ('collector_id' not in params or
params['collector_id'] is None):
raise ValueError("Missing the required parameter `collector_id` when calling `get_collector_installer`") # noqa: E501
# verify the required parameter 'os_and_arch' is set
if ('os_and_arch' not in params or
params['os_and_arch'] is None):
raise ValueError("Missing the required parameter `os_and_arch` when calling `get_collector_installer`") # noqa: E501
if 'collector_id' in params and not re.search('\d+', params['collector_id'] if type(params['collector_id']) is str else str(params['collector_id'])): # noqa: E501
raise ValueError("Invalid value for parameter `collector_id` when calling `get_collector_installer`, must conform to the pattern `/\d+/`") # noqa: E501
if 'os_and_arch' in params and not re.search('.+', params['os_and_arch'] if type(params['os_and_arch']) is str else str(params['os_and_arch'])): # noqa: E501
raise ValueError("Invalid value for parameter `os_and_arch` when calling `get_collector_installer`, must conform to the pattern `/.+/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'collector_id' in params:
path_params['collectorId'] = params['collector_id'] # noqa: E501
if 'os_and_arch' in params:
path_params['osAndArch'] = params['os_and_arch'] # noqa: E501
query_params = []
if 'collector_version' in params:
query_params.append(('collectorVersion', params['collector_version'])) # noqa: E501
if 'token' in params:
query_params.append(('token', params['token'])) # noqa: E501
if 'monitor_others' in params:
query_params.append(('monitorOthers', params['monitor_others'])) # noqa: E501
if 'collector_size' in params:
query_params.append(('collectorSize', params['collector_size'])) # noqa: E501
if 'use_ea' in params:
query_params.append(('useEA', params['use_ea'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['LMv1'] # noqa: E501
return self.api_client.call_api(
'/setting/collector/collectors/{collectorId}/installers/{osAndArch}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='file', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_collector_list(self, **kwargs): # noqa: E501
"""get collector list # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_collector_list(async_req=True)
>>> result = thread.get()
:param async_req bool
:param str fields:
:param int size:
:param int offset:
:param str filter:
:return: CollectorPaginationResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_collector_list_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.get_collector_list_with_http_info(**kwargs) # noqa: E501
return data
def get_collector_list_with_http_info(self, **kwargs): # noqa: E501
"""get collector list # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_collector_list_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param str fields:
:param int size:
:param int offset:
:param str filter:
:return: CollectorPaginationResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['fields', 'size', 'offset', 'filter'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_collector_list" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
if 'size' in params:
query_params.append(('size', params['size'])) # noqa: E501
if 'offset' in params:
query_params.append(('offset', params['offset'])) # noqa: E501
if 'filter' in params:
query_params.append(('filter', params['filter'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['LMv1'] # noqa: E501
return self.api_client.call_api(
'/setting/collector/collectors', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='CollectorPaginationResponse', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_dashboard_by_id(self, id, **kwargs): # noqa: E501
"""get dashboard # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_dashboard_by_id(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: (required)
:param bool template:
:param str format:
:param str fields:
:return: Dashboard
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_dashboard_by_id_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.get_dashboard_by_id_with_http_info(id, **kwargs) # noqa: E501
return data
def get_dashboard_by_id_with_http_info(self, id, **kwargs): # noqa: E501
"""get dashboard # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_dashboard_by_id_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: (required)
:param bool template:
:param str format:
:param str fields:
:return: Dashboard
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'template', 'format', 'fields'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_dashboard_by_id" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `get_dashboard_by_id`") # noqa: E501
if 'id' in params and not re.search('\d+', params['id'] if type(params['id']) is str else str(params['id'])): # noqa: E501
raise ValueError("Invalid value for parameter `id` when calling `get_dashboard_by_id`, must conform to the pattern `/\d+/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
if 'template' in params:
query_params.append(('template', params['template'])) # noqa: E501
if 'format' in params:
query_params.append(('format', params['format'])) # noqa: E501
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['LMv1'] # noqa: E501
return self.api_client.call_api(
'/dashboard/dashboards/{id}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Dashboard', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_dashboard_group_by_id(self, id, **kwargs): # noqa: E501
"""get dashboard group # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_dashboard_group_by_id(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: (required)
:param bool template:
:param str fields:
:return: DashboardGroup
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_dashboard_group_by_id_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.get_dashboard_group_by_id_with_http_info(id, **kwargs) # noqa: E501
return data
def get_dashboard_group_by_id_with_http_info(self, id, **kwargs): # noqa: E501
"""get dashboard group # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_dashboard_group_by_id_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: (required)
:param bool template:
:param str fields:
:return: DashboardGroup
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'template', 'fields'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_dashboard_group_by_id" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `get_dashboard_group_by_id`") # noqa: E501
if 'id' in params and not re.search('\d+', params['id'] if type(params['id']) is str else str(params['id'])): # noqa: E501
raise ValueError("Invalid value for parameter `id` when calling `get_dashboard_group_by_id`, must conform to the pattern `/\d+/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
if 'template' in params:
query_params.append(('template', params['template'])) # noqa: E501
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['LMv1'] # noqa: E501
return self.api_client.call_api(
'/dashboard/groups/{id}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='DashboardGroup', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_dashboard_group_list(self, **kwargs): # noqa: E501
"""get dashboard group list # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_dashboard_group_list(async_req=True)
>>> result = thread.get()
:param async_req bool
:param str fields:
:param int size:
:param int offset:
:param str filter:
:return: DashboardGroupPaginationResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_dashboard_group_list_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.get_dashboard_group_list_with_http_info(**kwargs) # noqa: E501
return data
def get_dashboard_group_list_with_http_info(self, **kwargs): # noqa: E501
"""get dashboard group list # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_dashboard_group_list_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param str fields:
:param int size:
:param int offset:
:param str filter:
:return: DashboardGroupPaginationResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['fields', 'size', 'offset', 'filter'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_dashboard_group_list" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
if 'size' in params:
query_params.append(('size', params['size'])) # noqa: E501
if 'offset' in params:
query_params.append(('offset', params['offset'])) # noqa: E501
if 'filter' in params:
query_params.append(('filter', params['filter'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['LMv1'] # noqa: E501
return self.api_client.call_api(
'/dashboard/groups', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='DashboardGroupPaginationResponse', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_dashboard_list(self, **kwargs): # noqa: E501
"""get dashboard list # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_dashboard_list(async_req=True)
>>> result = thread.get()
:param async_req bool
:param str fields:
:param int size:
:param int offset:
:param str filter:
:return: DashboardPaginationResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_dashboard_list_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.get_dashboard_list_with_http_info(**kwargs) # noqa: E501
return data
def get_dashboard_list_with_http_info(self, **kwargs): # noqa: E501
"""get dashboard list # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_dashboard_list_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param str fields:
:param int size:
:param int offset:
:param str filter:
:return: DashboardPaginationResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['fields', 'size', 'offset', 'filter'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_dashboard_list" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
if 'size' in params:
query_params.append(('size', params['size'])) # noqa: E501
if 'offset' in params:
query_params.append(('offset', params['offset'])) # noqa: E501
if 'filter' in params:
query_params.append(('filter', params['filter'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['LMv1'] # noqa: E501
return self.api_client.call_api(
'/dashboard/dashboards', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='DashboardPaginationResponse', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_data_source_overview_graph_by_id(self, ds_id, id, **kwargs): # noqa: E501
"""get datasource overview graph by id # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_data_source_overview_graph_by_id(ds_id, id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int ds_id: (required)
:param int id: (required)
:return: DataSourceOverviewGraph
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_data_source_overview_graph_by_id_with_http_info(ds_id, id, **kwargs) # noqa: E501
else:
(data) = self.get_data_source_overview_graph_by_id_with_http_info(ds_id, id, **kwargs) # noqa: E501
return data
def get_data_source_overview_graph_by_id_with_http_info(self, ds_id, id, **kwargs): # noqa: E501
"""get datasource overview graph by id # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_data_source_overview_graph_by_id_with_http_info(ds_id, id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int ds_id: (required)
:param int id: (required)
:return: DataSourceOverviewGraph
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['ds_id', 'id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_data_source_overview_graph_by_id" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'ds_id' is set
if ('ds_id' not in params or
params['ds_id'] is None):
raise ValueError("Missing the required parameter `ds_id` when calling `get_data_source_overview_graph_by_id`") # noqa: E501
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `get_data_source_overview_graph_by_id`") # noqa: E501
if 'ds_id' in params and not re.search('\d+', params['ds_id'] if type(params['ds_id']) is str else str(params['ds_id'])): # noqa: E501
raise ValueError("Invalid value for parameter `ds_id` when calling `get_data_source_overview_graph_by_id`, must conform to the pattern `/\d+/`") # noqa: E501
if 'id' in params and not re.search('\d+', params['id'] if type(params['id']) is str else str(params['id'])): # noqa: E501
raise ValueError("Invalid value for parameter `id` when calling `get_data_source_overview_graph_by_id`, must conform to the pattern `/\d+/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'ds_id' in params:
path_params['dsId'] = params['ds_id'] # noqa: E501
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['LMv1'] # noqa: E501
return self.api_client.call_api(
'/setting/datasources/{dsId}/ographs/{id}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='DataSourceOverviewGraph', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_data_source_overview_graph_list(self, ds_id, **kwargs): # noqa: E501
"""get datasource overview graph list # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_data_source_overview_graph_list(ds_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int ds_id: (required)
:param str fields:
:param int size:
:param int offset:
:param str filter:
:return: DatasourceOverviewGraphPaginationResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_data_source_overview_graph_list_with_http_info(ds_id, **kwargs) # noqa: E501
else:
(data) = self.get_data_source_overview_graph_list_with_http_info(ds_id, **kwargs) # noqa: E501
return data
def get_data_source_overview_graph_list_with_http_info(self, ds_id, **kwargs): # noqa: E501
"""get datasource overview graph list # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_data_source_overview_graph_list_with_http_info(ds_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int ds_id: (required)
:param str fields:
:param int size:
:param int offset:
:param str filter:
:return: DatasourceOverviewGraphPaginationResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['ds_id', 'fields', 'size', 'offset', 'filter'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_data_source_overview_graph_list" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'ds_id' is set
if ('ds_id' not in params or
params['ds_id'] is None):
raise ValueError("Missing the required parameter `ds_id` when calling `get_data_source_overview_graph_list`") # noqa: E501
if 'ds_id' in params and not re.search('\d+', params['ds_id'] if type(params['ds_id']) is str else str(params['ds_id'])): # noqa: E501
raise ValueError("Invalid value for parameter `ds_id` when calling `get_data_source_overview_graph_list`, must conform to the pattern `/\d+/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'ds_id' in params:
path_params['dsId'] = params['ds_id'] # noqa: E501
query_params = []
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
if 'size' in params:
query_params.append(('size', params['size'])) # noqa: E501
if 'offset' in params:
query_params.append(('offset', params['offset'])) # noqa: E501
if 'filter' in params:
query_params.append(('filter', params['filter'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['LMv1'] # noqa: E501
return self.api_client.call_api(
'/setting/datasources/{dsId}/ographs', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='DatasourceOverviewGraphPaginationResponse', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_datasource_by_id(self, id, **kwargs): # noqa: E501
"""get datasource by id # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_datasource_by_id(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: (required)
:param str format:
:param str fields:
:return: DataSource
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_datasource_by_id_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.get_datasource_by_id_with_http_info(id, **kwargs) # noqa: E501
return data
def get_datasource_by_id_with_http_info(self, id, **kwargs): # noqa: E501
"""get datasource by id # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_datasource_by_id_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: (required)
:param str format:
:param str fields:
:return: DataSource
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'format', 'fields'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_datasource_by_id" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `get_datasource_by_id`") # noqa: E501
if 'id' in params and not re.search('\d+', params['id'] if type(params['id']) is str else str(params['id'])): # noqa: E501
raise ValueError("Invalid value for parameter `id` when calling `get_datasource_by_id`, must conform to the pattern `/\d+/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
if 'format' in params:
query_params.append(('format', params['format'])) # noqa: E501
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['LMv1'] # noqa: E501
return self.api_client.call_api(
'/setting/datasources/{id}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='DataSource', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_datasource_list(self, **kwargs): # noqa: E501
"""get datasource list # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_datasource_list(async_req=True)
>>> result = thread.get()
:param async_req bool
:param str format:
:param str fields:
:param int size:
:param int offset:
:param str filter:
:return: DatasourcePaginationResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_datasource_list_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.get_datasource_list_with_http_info(**kwargs) # noqa: E501
return data
def get_datasource_list_with_http_info(self, **kwargs): # noqa: E501
"""get datasource list # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_datasource_list_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param str format:
:param str fields:
:param int size:
:param int offset:
:param str filter:
:return: DatasourcePaginationResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['format', 'fields', 'size', 'offset', 'filter'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_datasource_list" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'format' in params:
query_params.append(('format', params['format'])) # noqa: E501
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
if 'size' in params:
query_params.append(('size', params['size'])) # noqa: E501
if 'offset' in params:
query_params.append(('offset', params['offset'])) # noqa: E501
if 'filter' in params:
query_params.append(('filter', params['filter'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['LMv1'] # noqa: E501
return self.api_client.call_api(
'/setting/datasources', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='DatasourcePaginationResponse', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_debug_command_result(self, id, **kwargs): # noqa: E501
"""Get the result of a Collector debug command # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_debug_command_result(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: (required)
:param int collector_id:
:return: Debug
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_debug_command_result_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.get_debug_command_result_with_http_info(id, **kwargs) # noqa: E501
return data
def get_debug_command_result_with_http_info(self, id, **kwargs): # noqa: E501
"""Get the result of a Collector debug command # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_debug_command_result_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: (required)
:param int collector_id:
:return: Debug
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'collector_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_debug_command_result" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `get_debug_command_result`") # noqa: E501
if 'id' in params and not re.search('.*', params['id'] if type(params['id']) is str else str(params['id'])): # noqa: E501
raise ValueError("Invalid value for parameter `id` when calling `get_debug_command_result`, must conform to the pattern `/.*/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
if 'collector_id' in params:
query_params.append(('collectorId', params['collector_id'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['LMv1'] # noqa: E501
return self.api_client.call_api(
'/debug/{id}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Debug', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_device_by_id(self, id, **kwargs): # noqa: E501
"""get device by id # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_device_by_id(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: (required)
:param int start:
:param int end:
:param str netflow_filter:
:param str fields:
:return: Device
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_device_by_id_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.get_device_by_id_with_http_info(id, **kwargs) # noqa: E501
return data
def get_device_by_id_with_http_info(self, id, **kwargs): # noqa: E501
"""get device by id # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_device_by_id_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: (required)
:param int start:
:param int end:
:param str netflow_filter:
:param str fields:
:return: Device
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'start', 'end', 'netflow_filter', 'fields'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_device_by_id" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `get_device_by_id`") # noqa: E501
if 'id' in params and not re.search('\d+', params['id'] if type(params['id']) is str else str(params['id'])): # noqa: E501
raise ValueError("Invalid value for parameter `id` when calling `get_device_by_id`, must conform to the pattern `/\d+/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
if 'start' in params:
query_params.append(('start', params['start'])) # noqa: E501
if 'end' in params:
query_params.append(('end', params['end'])) # noqa: E501
if 'netflow_filter' in params:
query_params.append(('netflowFilter', params['netflow_filter'])) # noqa: E501
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['LMv1'] # noqa: E501
return self.api_client.call_api(
'/device/devices/{id}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Device', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_device_config_source_config_by_id(self, device_id, hds_id, instance_id, id, **kwargs): # noqa: E501
"""get a config for a device # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_device_config_source_config_by_id(device_id, hds_id, instance_id, id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int device_id: (required)
:param int hds_id: (required)
:param int instance_id: (required)
:param str id: (required)
:param str format:
:param int start_epoch:
:param str fields:
:return: DeviceDataSourceInstanceConfig
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_device_config_source_config_by_id_with_http_info(device_id, hds_id, instance_id, id, **kwargs) # noqa: E501
else:
(data) = self.get_device_config_source_config_by_id_with_http_info(device_id, hds_id, instance_id, id, **kwargs) # noqa: E501
return data
def get_device_config_source_config_by_id_with_http_info(self, device_id, hds_id, instance_id, id, **kwargs): # noqa: E501
"""get a config for a device # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_device_config_source_config_by_id_with_http_info(device_id, hds_id, instance_id, id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int device_id: (required)
:param int hds_id: (required)
:param int instance_id: (required)
:param str id: (required)
:param str format:
:param int start_epoch:
:param str fields:
:return: DeviceDataSourceInstanceConfig
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['device_id', 'hds_id', 'instance_id', 'id', 'format', 'start_epoch', 'fields'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_device_config_source_config_by_id" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'device_id' is set
if ('device_id' not in params or
params['device_id'] is None):
raise ValueError("Missing the required parameter `device_id` when calling `get_device_config_source_config_by_id`") # noqa: E501
# verify the required parameter 'hds_id' is set
if ('hds_id' not in params or
params['hds_id'] is None):
raise ValueError("Missing the required parameter `hds_id` when calling `get_device_config_source_config_by_id`") # noqa: E501
# verify the required parameter 'instance_id' is set
if ('instance_id' not in params or
params['instance_id'] is None):
raise ValueError("Missing the required parameter `instance_id` when calling `get_device_config_source_config_by_id`") # noqa: E501
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `get_device_config_source_config_by_id`") # noqa: E501
if 'device_id' in params and not re.search('\d+', params['device_id'] if type(params['device_id']) is str else str(params['device_id'])): # noqa: E501
raise ValueError("Invalid value for parameter `device_id` when calling `get_device_config_source_config_by_id`, must conform to the pattern `/\d+/`") # noqa: E501
if 'hds_id' in params and not re.search('\d+', params['hds_id'] if type(params['hds_id']) is str else str(params['hds_id'])): # noqa: E501
raise ValueError("Invalid value for parameter `hds_id` when calling `get_device_config_source_config_by_id`, must conform to the pattern `/\d+/`") # noqa: E501
if 'instance_id' in params and not re.search('\d+', params['instance_id'] if type(params['instance_id']) is str else str(params['instance_id'])): # noqa: E501
raise ValueError("Invalid value for parameter `instance_id` when calling `get_device_config_source_config_by_id`, must conform to the pattern `/\d+/`") # noqa: E501
if 'id' in params and not re.search('[-_a-zA-Z0-9]+', params['id'] if type(params['id']) is str else str(params['id'])): # noqa: E501
raise ValueError("Invalid value for parameter `id` when calling `get_device_config_source_config_by_id`, must conform to the pattern `/[-_a-zA-Z0-9]+/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'device_id' in params:
path_params['deviceId'] = params['device_id'] # noqa: E501
if 'hds_id' in params:
path_params['hdsId'] = params['hds_id'] # noqa: E501
if 'instance_id' in params:
path_params['instanceId'] = params['instance_id'] # noqa: E501
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
if 'format' in params:
query_params.append(('format', params['format'])) # noqa: E501
if 'start_epoch' in params:
query_params.append(('startEpoch', params['start_epoch'])) # noqa: E501
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['LMv1'] # noqa: E501
return self.api_client.call_api(
'/device/devices/{deviceId}/devicedatasources/{hdsId}/instances/{instanceId}/config/{id}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='DeviceDataSourceInstanceConfig', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_device_config_source_config_list(self, device_id, hds_id, instance_id, **kwargs): # noqa: E501
"""get config instances for a configsource # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_device_config_source_config_list(device_id, hds_id, instance_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int device_id: (required)
:param int hds_id: (required)
:param int instance_id: (required)
:param str fields:
:param int size:
:param int offset:
:param str filter:
:return: DeviceDatasourceInstanceConfigPaginationResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_device_config_source_config_list_with_http_info(device_id, hds_id, instance_id, **kwargs) # noqa: E501
else:
(data) = self.get_device_config_source_config_list_with_http_info(device_id, hds_id, instance_id, **kwargs) # noqa: E501
return data
def get_device_config_source_config_list_with_http_info(self, device_id, hds_id, instance_id, **kwargs): # noqa: E501
"""get config instances for a configsource # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_device_config_source_config_list_with_http_info(device_id, hds_id, instance_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int device_id: (required)
:param int hds_id: (required)
:param int instance_id: (required)
:param str fields:
:param int size:
:param int offset:
:param str filter:
:return: DeviceDatasourceInstanceConfigPaginationResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['device_id', 'hds_id', 'instance_id', 'fields', 'size', 'offset', 'filter'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_device_config_source_config_list" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'device_id' is set
if ('device_id' not in params or
params['device_id'] is None):
raise ValueError("Missing the required parameter `device_id` when calling `get_device_config_source_config_list`") # noqa: E501
# verify the required parameter 'hds_id' is set
if ('hds_id' not in params or
params['hds_id'] is None):
raise ValueError("Missing the required parameter `hds_id` when calling `get_device_config_source_config_list`") # noqa: E501
# verify the required parameter 'instance_id' is set
if ('instance_id' not in params or
params['instance_id'] is None):
raise ValueError("Missing the required parameter `instance_id` when calling `get_device_config_source_config_list`") # noqa: E501
if 'device_id' in params and not re.search('\d+', params['device_id'] if type(params['device_id']) is str else str(params['device_id'])): # noqa: E501
raise ValueError("Invalid value for parameter `device_id` when calling `get_device_config_source_config_list`, must conform to the pattern `/\d+/`") # noqa: E501
if 'hds_id' in params and not re.search('\d+', params['hds_id'] if type(params['hds_id']) is str else str(params['hds_id'])): # noqa: E501
raise ValueError("Invalid value for parameter `hds_id` when calling `get_device_config_source_config_list`, must conform to the pattern `/\d+/`") # noqa: E501
if 'instance_id' in params and not re.search('\d+', params['instance_id'] if type(params['instance_id']) is str else str(params['instance_id'])): # noqa: E501
raise ValueError("Invalid value for parameter `instance_id` when calling `get_device_config_source_config_list`, must conform to the pattern `/\d+/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'device_id' in params:
path_params['deviceId'] = params['device_id'] # noqa: E501
if 'hds_id' in params:
path_params['hdsId'] = params['hds_id'] # noqa: E501
if 'instance_id' in params:
path_params['instanceId'] = params['instance_id'] # noqa: E501
query_params = []
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
if 'size' in params:
query_params.append(('size', params['size'])) # noqa: E501
if 'offset' in params:
query_params.append(('offset', params['offset'])) # noqa: E501
if 'filter' in params:
query_params.append(('filter', params['filter'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['LMv1'] # noqa: E501
return self.api_client.call_api(
'/device/devices/{deviceId}/devicedatasources/{hdsId}/instances/{instanceId}/config', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='DeviceDatasourceInstanceConfigPaginationResponse', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_device_datasource_by_id(self, device_id, id, **kwargs): # noqa: E501
"""get device datasource # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_device_datasource_by_id(device_id, id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int device_id: (required)
:param int id: (required)
:param str fields:
:return: DeviceDataSource
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_device_datasource_by_id_with_http_info(device_id, id, **kwargs) # noqa: E501
else:
(data) = self.get_device_datasource_by_id_with_http_info(device_id, id, **kwargs) # noqa: E501
return data
def get_device_datasource_by_id_with_http_info(self, device_id, id, **kwargs): # noqa: E501
"""get device datasource # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_device_datasource_by_id_with_http_info(device_id, id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int device_id: (required)
:param int id: (required)
:param str fields:
:return: DeviceDataSource
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['device_id', 'id', 'fields'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_device_datasource_by_id" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'device_id' is set
if ('device_id' not in params or
params['device_id'] is None):
raise ValueError("Missing the required parameter `device_id` when calling `get_device_datasource_by_id`") # noqa: E501
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `get_device_datasource_by_id`") # noqa: E501
if 'device_id' in params and not re.search('\d+', params['device_id'] if type(params['device_id']) is str else str(params['device_id'])): # noqa: E501
raise ValueError("Invalid value for parameter `device_id` when calling `get_device_datasource_by_id`, must conform to the pattern `/\d+/`") # noqa: E501
if 'id' in params and not re.search('\d+', params['id'] if type(params['id']) is str else str(params['id'])): # noqa: E501
raise ValueError("Invalid value for parameter `id` when calling `get_device_datasource_by_id`, must conform to the pattern `/\d+/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'device_id' in params:
path_params['deviceId'] = params['device_id'] # noqa: E501
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['LMv1'] # noqa: E501
return self.api_client.call_api(
'/device/devices/{deviceId}/devicedatasources/{id}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='DeviceDataSource', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_device_datasource_data_by_id(self, device_id, id, **kwargs): # noqa: E501
"""get device datasource data # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_device_datasource_data_by_id(device_id, id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int device_id: (required)
:param int id: (required)
:param float period:
:param int start:
:param int end:
:param str datapoints:
:param str format:
:return: DeviceDataSourceData
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_device_datasource_data_by_id_with_http_info(device_id, id, **kwargs) # noqa: E501
else:
(data) = self.get_device_datasource_data_by_id_with_http_info(device_id, id, **kwargs) # noqa: E501
return data
def get_device_datasource_data_by_id_with_http_info(self, device_id, id, **kwargs): # noqa: E501
"""get device datasource data # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_device_datasource_data_by_id_with_http_info(device_id, id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int device_id: (required)
:param int id: (required)
:param float period:
:param int start:
:param int end:
:param str datapoints:
:param str format:
:return: DeviceDataSourceData
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['device_id', 'id', 'period', 'start', 'end', 'datapoints', 'format'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_device_datasource_data_by_id" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'device_id' is set
if ('device_id' not in params or
params['device_id'] is None):
raise ValueError("Missing the required parameter `device_id` when calling `get_device_datasource_data_by_id`") # noqa: E501
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `get_device_datasource_data_by_id`") # noqa: E501
if 'device_id' in params and not re.search('\d+', params['device_id'] if type(params['device_id']) is str else str(params['device_id'])): # noqa: E501
raise ValueError("Invalid value for parameter `device_id` when calling `get_device_datasource_data_by_id`, must conform to the pattern `/\d+/`") # noqa: E501
if 'id' in params and not re.search('\d+', params['id'] if type(params['id']) is str else str(params['id'])): # noqa: E501
raise ValueError("Invalid value for parameter `id` when calling `get_device_datasource_data_by_id`, must conform to the pattern `/\d+/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'device_id' in params:
path_params['deviceId'] = params['device_id'] # noqa: E501
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
if 'period' in params:
query_params.append(('period', params['period'])) # noqa: E501
if 'start' in params:
query_params.append(('start', params['start'])) # noqa: E501
if 'end' in params:
query_params.append(('end', params['end'])) # noqa: E501
if 'datapoints' in params:
query_params.append(('datapoints', params['datapoints'])) # noqa: E501
if 'format' in params:
query_params.append(('format', params['format'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['LMv1'] # noqa: E501
return self.api_client.call_api(
'/device/devices/{deviceId}/devicedatasources/{id}/data', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='DeviceDataSourceData', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_device_datasource_instance_alert_setting_by_id(self, device_id, hds_id, instance_id, id, **kwargs): # noqa: E501
"""get device instance alert setting # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_device_datasource_instance_alert_setting_by_id(device_id, hds_id, instance_id, id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int device_id: (required)
:param int hds_id: Device-DataSource ID (required)
:param int instance_id: (required)
:param int id: (required)
:param str fields:
:return: DeviceDataSourceInstanceAlertSetting
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_device_datasource_instance_alert_setting_by_id_with_http_info(device_id, hds_id, instance_id, id, **kwargs) # noqa: E501
else:
(data) = self.get_device_datasource_instance_alert_setting_by_id_with_http_info(device_id, hds_id, instance_id, id, **kwargs) # noqa: E501
return data
def get_device_datasource_instance_alert_setting_by_id_with_http_info(self, device_id, hds_id, instance_id, id, **kwargs): # noqa: E501
"""get device instance alert setting # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_device_datasource_instance_alert_setting_by_id_with_http_info(device_id, hds_id, instance_id, id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int device_id: (required)
:param int hds_id: Device-DataSource ID (required)
:param int instance_id: (required)
:param int id: (required)
:param str fields:
:return: DeviceDataSourceInstanceAlertSetting
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['device_id', 'hds_id', 'instance_id', 'id', 'fields'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_device_datasource_instance_alert_setting_by_id" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'device_id' is set
if ('device_id' not in params or
params['device_id'] is None):
raise ValueError("Missing the required parameter `device_id` when calling `get_device_datasource_instance_alert_setting_by_id`") # noqa: E501
# verify the required parameter 'hds_id' is set
if ('hds_id' not in params or
params['hds_id'] is None):
raise ValueError("Missing the required parameter `hds_id` when calling `get_device_datasource_instance_alert_setting_by_id`") # noqa: E501
# verify the required parameter 'instance_id' is set
if ('instance_id' not in params or
params['instance_id'] is None):
raise ValueError("Missing the required parameter `instance_id` when calling `get_device_datasource_instance_alert_setting_by_id`") # noqa: E501
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `get_device_datasource_instance_alert_setting_by_id`") # noqa: E501
if 'device_id' in params and not re.search('\d+', params['device_id'] if type(params['device_id']) is str else str(params['device_id'])): # noqa: E501
raise ValueError("Invalid value for parameter `device_id` when calling `get_device_datasource_instance_alert_setting_by_id`, must conform to the pattern `/\d+/`") # noqa: E501
if 'hds_id' in params and not re.search('\d+', params['hds_id'] if type(params['hds_id']) is str else str(params['hds_id'])): # noqa: E501
raise ValueError("Invalid value for parameter `hds_id` when calling `get_device_datasource_instance_alert_setting_by_id`, must conform to the pattern `/\d+/`") # noqa: E501
if 'instance_id' in params and not re.search('\d+', params['instance_id'] if type(params['instance_id']) is str else str(params['instance_id'])): # noqa: E501
raise ValueError("Invalid value for parameter `instance_id` when calling `get_device_datasource_instance_alert_setting_by_id`, must conform to the pattern `/\d+/`") # noqa: E501
if 'id' in params and not re.search('\d+', params['id'] if type(params['id']) is str else str(params['id'])): # noqa: E501
raise ValueError("Invalid value for parameter `id` when calling `get_device_datasource_instance_alert_setting_by_id`, must conform to the pattern `/\d+/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'device_id' in params:
path_params['deviceId'] = params['device_id'] # noqa: E501
if 'hds_id' in params:
path_params['hdsId'] = params['hds_id'] # noqa: E501
if 'instance_id' in params:
path_params['instanceId'] = params['instance_id'] # noqa: E501
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['LMv1'] # noqa: E501
return self.api_client.call_api(
'/device/devices/{deviceId}/devicedatasources/{hdsId}/instances/{instanceId}/alertsettings/{id}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='DeviceDataSourceInstanceAlertSetting', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_device_datasource_instance_alert_setting_list(self, device_id, hds_id, instance_id, **kwargs): # noqa: E501
"""get a list of alert settings for a device # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_device_datasource_instance_alert_setting_list(device_id, hds_id, instance_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int device_id: (required)
:param int hds_id: Device-DataSource ID (required)
:param int instance_id: (required)
:return: DeviceDataSourceInstanceAlertSettingPaginationResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_device_datasource_instance_alert_setting_list_with_http_info(device_id, hds_id, instance_id, **kwargs) # noqa: E501
else:
(data) = self.get_device_datasource_instance_alert_setting_list_with_http_info(device_id, hds_id, instance_id, **kwargs) # noqa: E501
return data
def get_device_datasource_instance_alert_setting_list_with_http_info(self, device_id, hds_id, instance_id, **kwargs): # noqa: E501
"""get a list of alert settings for a device # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_device_datasource_instance_alert_setting_list_with_http_info(device_id, hds_id, instance_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int device_id: (required)
:param int hds_id: Device-DataSource ID (required)
:param int instance_id: (required)
:return: DeviceDataSourceInstanceAlertSettingPaginationResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['device_id', 'hds_id', 'instance_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_device_datasource_instance_alert_setting_list" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'device_id' is set
if ('device_id' not in params or
params['device_id'] is None):
raise ValueError("Missing the required parameter `device_id` when calling `get_device_datasource_instance_alert_setting_list`") # noqa: E501
# verify the required parameter 'hds_id' is set
if ('hds_id' not in params or
params['hds_id'] is None):
raise ValueError("Missing the required parameter `hds_id` when calling `get_device_datasource_instance_alert_setting_list`") # noqa: E501
# verify the required parameter 'instance_id' is set
if ('instance_id' not in params or
params['instance_id'] is None):
raise ValueError("Missing the required parameter `instance_id` when calling `get_device_datasource_instance_alert_setting_list`") # noqa: E501
if 'device_id' in params and not re.search('\d+', params['device_id'] if type(params['device_id']) is str else str(params['device_id'])): # noqa: E501
raise ValueError("Invalid value for parameter `device_id` when calling `get_device_datasource_instance_alert_setting_list`, must conform to the pattern `/\d+/`") # noqa: E501
if 'hds_id' in params and not re.search('\d+', params['hds_id'] if type(params['hds_id']) is str else str(params['hds_id'])): # noqa: E501
raise ValueError("Invalid value for parameter `hds_id` when calling `get_device_datasource_instance_alert_setting_list`, must conform to the pattern `/\d+/`") # noqa: E501
if 'instance_id' in params and not re.search('\d+', params['instance_id'] if type(params['instance_id']) is str else str(params['instance_id'])): # noqa: E501
raise ValueError("Invalid value for parameter `instance_id` when calling `get_device_datasource_instance_alert_setting_list`, must conform to the pattern `/\d+/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'device_id' in params:
path_params['deviceId'] = params['device_id'] # noqa: E501
if 'hds_id' in params:
path_params['hdsId'] = params['hds_id'] # noqa: E501
if 'instance_id' in params:
path_params['instanceId'] = params['instance_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['LMv1'] # noqa: E501
return self.api_client.call_api(
'/device/devices/{deviceId}/devicedatasources/{hdsId}/instances/{instanceId}/alertsettings', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='DeviceDataSourceInstanceAlertSettingPaginationResponse', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_device_datasource_instance_by_id(self, device_id, hds_id, id, **kwargs): # noqa: E501
"""get device instance # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_device_datasource_instance_by_id(device_id, hds_id, id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int device_id: (required)
:param int hds_id: The device-datasource ID (required)
:param int id: (required)
:param str fields:
:return: DeviceDataSourceInstance
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_device_datasource_instance_by_id_with_http_info(device_id, hds_id, id, **kwargs) # noqa: E501
else:
(data) = self.get_device_datasource_instance_by_id_with_http_info(device_id, hds_id, id, **kwargs) # noqa: E501
return data
def get_device_datasource_instance_by_id_with_http_info(self, device_id, hds_id, id, **kwargs): # noqa: E501
"""get device instance # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_device_datasource_instance_by_id_with_http_info(device_id, hds_id, id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int device_id: (required)
:param int hds_id: The device-datasource ID (required)
:param int id: (required)
:param str fields:
:return: DeviceDataSourceInstance
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['device_id', 'hds_id', 'id', 'fields'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_device_datasource_instance_by_id" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'device_id' is set
if ('device_id' not in params or
params['device_id'] is None):
raise ValueError("Missing the required parameter `device_id` when calling `get_device_datasource_instance_by_id`") # noqa: E501
# verify the required parameter 'hds_id' is set
if ('hds_id' not in params or
params['hds_id'] is None):
raise ValueError("Missing the required parameter `hds_id` when calling `get_device_datasource_instance_by_id`") # noqa: E501
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `get_device_datasource_instance_by_id`") # noqa: E501
if 'device_id' in params and not re.search('\d+', params['device_id'] if type(params['device_id']) is str else str(params['device_id'])): # noqa: E501
raise ValueError("Invalid value for parameter `device_id` when calling `get_device_datasource_instance_by_id`, must conform to the pattern `/\d+/`") # noqa: E501
if 'hds_id' in params and not re.search('\d+', params['hds_id'] if type(params['hds_id']) is str else str(params['hds_id'])): # noqa: E501
raise ValueError("Invalid value for parameter `hds_id` when calling `get_device_datasource_instance_by_id`, must conform to the pattern `/\d+/`") # noqa: E501
if 'id' in params and not re.search('\d+', params['id'] if type(params['id']) is str else str(params['id'])): # noqa: E501
raise ValueError("Invalid value for parameter `id` when calling `get_device_datasource_instance_by_id`, must conform to the pattern `/\d+/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'device_id' in params:
path_params['deviceId'] = params['device_id'] # noqa: E501
if 'hds_id' in params:
path_params['hdsId'] = params['hds_id'] # noqa: E501
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['LMv1'] # noqa: E501
return self.api_client.call_api(
'/device/devices/{deviceId}/devicedatasources/{hdsId}/instances/{id}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='DeviceDataSourceInstance', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_device_datasource_instance_data(self, device_id, hds_id, id, **kwargs): # noqa: E501
"""get device instance data # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_device_datasource_instance_data(device_id, hds_id, id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int device_id: (required)
:param int hds_id: The device-datasource ID (required)
:param int id: (required)
:param float period:
:param int start:
:param int end:
:param str datapoints:
:param str format:
:return: DeviceDataSourceInstanceData
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_device_datasource_instance_data_with_http_info(device_id, hds_id, id, **kwargs) # noqa: E501
else:
(data) = self.get_device_datasource_instance_data_with_http_info(device_id, hds_id, id, **kwargs) # noqa: E501
return data
def get_device_datasource_instance_data_with_http_info(self, device_id, hds_id, id, **kwargs): # noqa: E501
"""get device instance data # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_device_datasource_instance_data_with_http_info(device_id, hds_id, id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int device_id: (required)
:param int hds_id: The device-datasource ID (required)
:param int id: (required)
:param float period:
:param int start:
:param int end:
:param str datapoints:
:param str format:
:return: DeviceDataSourceInstanceData
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['device_id', 'hds_id', 'id', 'period', 'start', 'end', 'datapoints', 'format'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_device_datasource_instance_data" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'device_id' is set
if ('device_id' not in params or
params['device_id'] is None):
raise ValueError("Missing the required parameter `device_id` when calling `get_device_datasource_instance_data`") # noqa: E501
# verify the required parameter 'hds_id' is set
if ('hds_id' not in params or
params['hds_id'] is None):
raise ValueError("Missing the required parameter `hds_id` when calling `get_device_datasource_instance_data`") # noqa: E501
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `get_device_datasource_instance_data`") # noqa: E501
if 'device_id' in params and not re.search('\d+', params['device_id'] if type(params['device_id']) is str else str(params['device_id'])): # noqa: E501
raise ValueError("Invalid value for parameter `device_id` when calling `get_device_datasource_instance_data`, must conform to the pattern `/\d+/`") # noqa: E501
if 'hds_id' in params and not re.search('\d+', params['hds_id'] if type(params['hds_id']) is str else str(params['hds_id'])): # noqa: E501
raise ValueError("Invalid value for parameter `hds_id` when calling `get_device_datasource_instance_data`, must conform to the pattern `/\d+/`") # noqa: E501
if 'id' in params and not re.search('\d+', params['id'] if type(params['id']) is str else str(params['id'])): # noqa: E501
raise ValueError("Invalid value for parameter `id` when calling `get_device_datasource_instance_data`, must conform to the pattern `/\d+/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'device_id' in params:
path_params['deviceId'] = params['device_id'] # noqa: E501
if 'hds_id' in params:
path_params['hdsId'] = params['hds_id'] # noqa: E501
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
if 'period' in params:
query_params.append(('period', params['period'])) # noqa: E501
if 'start' in params:
query_params.append(('start', params['start'])) # noqa: E501
if 'end' in params:
query_params.append(('end', params['end'])) # noqa: E501
if 'datapoints' in params:
query_params.append(('datapoints', params['datapoints'])) # noqa: E501
if 'format' in params:
query_params.append(('format', params['format'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['LMv1'] # noqa: E501
return self.api_client.call_api(
'/device/devices/{deviceId}/devicedatasources/{hdsId}/instances/{id}/data', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='DeviceDataSourceInstanceData', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_device_datasource_instance_graph_data(self, device_id, hds_id, id, graph_id, **kwargs): # noqa: E501
"""get device instance graph data # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_device_datasource_instance_graph_data(device_id, hds_id, id, graph_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int device_id: (required)
:param int hds_id: The device-datasource ID (required)
:param int id: (required)
:param int graph_id: (required)
:param int start:
:param int end:
:param str format:
:return: GraphPlot
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_device_datasource_instance_graph_data_with_http_info(device_id, hds_id, id, graph_id, **kwargs) # noqa: E501
else:
(data) = self.get_device_datasource_instance_graph_data_with_http_info(device_id, hds_id, id, graph_id, **kwargs) # noqa: E501
return data
def get_device_datasource_instance_graph_data_with_http_info(self, device_id, hds_id, id, graph_id, **kwargs): # noqa: E501
"""get device instance graph data # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_device_datasource_instance_graph_data_with_http_info(device_id, hds_id, id, graph_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int device_id: (required)
:param int hds_id: The device-datasource ID (required)
:param int id: (required)
:param int graph_id: (required)
:param int start:
:param int end:
:param str format:
:return: GraphPlot
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['device_id', 'hds_id', 'id', 'graph_id', 'start', 'end', 'format'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_device_datasource_instance_graph_data" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'device_id' is set
if ('device_id' not in params or
params['device_id'] is None):
raise ValueError("Missing the required parameter `device_id` when calling `get_device_datasource_instance_graph_data`") # noqa: E501
# verify the required parameter 'hds_id' is set
if ('hds_id' not in params or
params['hds_id'] is None):
raise ValueError("Missing the required parameter `hds_id` when calling `get_device_datasource_instance_graph_data`") # noqa: E501
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `get_device_datasource_instance_graph_data`") # noqa: E501
# verify the required parameter 'graph_id' is set
if ('graph_id' not in params or
params['graph_id'] is None):
raise ValueError("Missing the required parameter `graph_id` when calling `get_device_datasource_instance_graph_data`") # noqa: E501
if 'device_id' in params and not re.search('\d+', params['device_id'] if type(params['device_id']) is str else str(params['device_id'])): # noqa: E501
raise ValueError("Invalid value for parameter `device_id` when calling `get_device_datasource_instance_graph_data`, must conform to the pattern `/\d+/`") # noqa: E501
if 'hds_id' in params and not re.search('\d+', params['hds_id'] if type(params['hds_id']) is str else str(params['hds_id'])): # noqa: E501
raise ValueError("Invalid value for parameter `hds_id` when calling `get_device_datasource_instance_graph_data`, must conform to the pattern `/\d+/`") # noqa: E501
if 'id' in params and not re.search('\d+', params['id'] if type(params['id']) is str else str(params['id'])): # noqa: E501
raise ValueError("Invalid value for parameter `id` when calling `get_device_datasource_instance_graph_data`, must conform to the pattern `/\d+/`") # noqa: E501
if 'graph_id' in params and not re.search('-?\d+', params['graph_id'] if type(params['graph_id']) is str else str(params['graph_id'])): # noqa: E501
raise ValueError("Invalid value for parameter `graph_id` when calling `get_device_datasource_instance_graph_data`, must conform to the pattern `/-?\d+/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'device_id' in params:
path_params['deviceId'] = params['device_id'] # noqa: E501
if 'hds_id' in params:
path_params['hdsId'] = params['hds_id'] # noqa: E501
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
if 'graph_id' in params:
path_params['graphId'] = params['graph_id'] # noqa: E501
query_params = []
if 'start' in params:
query_params.append(('start', params['start'])) # noqa: E501
if 'end' in params:
query_params.append(('end', params['end'])) # noqa: E501
if 'format' in params:
query_params.append(('format', params['format'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['LMv1'] # noqa: E501
return self.api_client.call_api(
'/device/devices/{deviceId}/devicedatasources/{hdsId}/instances/{id}/graphs/{graphId}/data', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='GraphPlot', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_device_datasource_instance_group_by_id(self, device_id, device_ds_id, id, **kwargs): # noqa: E501
"""get device datasource instance group # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_device_datasource_instance_group_by_id(device_id, device_ds_id, id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int device_id: (required)
:param int device_ds_id: The device-datasource ID you'd like to add an instance group for (required)
:param int id: (required)
:param str fields:
:return: DeviceDataSourceInstanceGroup
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_device_datasource_instance_group_by_id_with_http_info(device_id, device_ds_id, id, **kwargs) # noqa: E501
else:
(data) = self.get_device_datasource_instance_group_by_id_with_http_info(device_id, device_ds_id, id, **kwargs) # noqa: E501
return data
def get_device_datasource_instance_group_by_id_with_http_info(self, device_id, device_ds_id, id, **kwargs): # noqa: E501
"""get device datasource instance group # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_device_datasource_instance_group_by_id_with_http_info(device_id, device_ds_id, id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int device_id: (required)
:param int device_ds_id: The device-datasource ID you'd like to add an instance group for (required)
:param int id: (required)
:param str fields:
:return: DeviceDataSourceInstanceGroup
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['device_id', 'device_ds_id', 'id', 'fields'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_device_datasource_instance_group_by_id" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'device_id' is set
if ('device_id' not in params or
params['device_id'] is None):
raise ValueError("Missing the required parameter `device_id` when calling `get_device_datasource_instance_group_by_id`") # noqa: E501
# verify the required parameter 'device_ds_id' is set
if ('device_ds_id' not in params or
params['device_ds_id'] is None):
raise ValueError("Missing the required parameter `device_ds_id` when calling `get_device_datasource_instance_group_by_id`") # noqa: E501
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `get_device_datasource_instance_group_by_id`") # noqa: E501
if 'device_id' in params and not re.search('\d+', params['device_id'] if type(params['device_id']) is str else str(params['device_id'])): # noqa: E501
raise ValueError("Invalid value for parameter `device_id` when calling `get_device_datasource_instance_group_by_id`, must conform to the pattern `/\d+/`") # noqa: E501
if 'device_ds_id' in params and not re.search('\d+', params['device_ds_id'] if type(params['device_ds_id']) is str else str(params['device_ds_id'])): # noqa: E501
raise ValueError("Invalid value for parameter `device_ds_id` when calling `get_device_datasource_instance_group_by_id`, must conform to the pattern `/\d+/`") # noqa: E501
if 'id' in params and not re.search('\d+', params['id'] if type(params['id']) is str else str(params['id'])): # noqa: E501
raise ValueError("Invalid value for parameter `id` when calling `get_device_datasource_instance_group_by_id`, must conform to the pattern `/\d+/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'device_id' in params:
path_params['deviceId'] = params['device_id'] # noqa: E501
if 'device_ds_id' in params:
path_params['deviceDsId'] = params['device_ds_id'] # noqa: E501
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['LMv1'] # noqa: E501
return self.api_client.call_api(
'/device/devices/{deviceId}/devicedatasources/{deviceDsId}/groups/{id}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='DeviceDataSourceInstanceGroup', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_device_datasource_instance_group_list(self, device_id, device_ds_id, **kwargs): # noqa: E501
"""get device datasource instance group list # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_device_datasource_instance_group_list(device_id, device_ds_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int device_id: (required)
:param int device_ds_id: The device-datasource ID you'd like to add an instance group for (required)
:param str fields:
:param int size:
:param int offset:
:param str filter:
:return: DeviceDatasourceInstanceGroupPaginationResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_device_datasource_instance_group_list_with_http_info(device_id, device_ds_id, **kwargs) # noqa: E501
else:
(data) = self.get_device_datasource_instance_group_list_with_http_info(device_id, device_ds_id, **kwargs) # noqa: E501
return data
def get_device_datasource_instance_group_list_with_http_info(self, device_id, device_ds_id, **kwargs): # noqa: E501
"""get device datasource instance group list # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_device_datasource_instance_group_list_with_http_info(device_id, device_ds_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int device_id: (required)
:param int device_ds_id: The device-datasource ID you'd like to add an instance group for (required)
:param str fields:
:param int size:
:param int offset:
:param str filter:
:return: DeviceDatasourceInstanceGroupPaginationResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['device_id', 'device_ds_id', 'fields', 'size', 'offset', 'filter'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_device_datasource_instance_group_list" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'device_id' is set
if ('device_id' not in params or
params['device_id'] is None):
raise ValueError("Missing the required parameter `device_id` when calling `get_device_datasource_instance_group_list`") # noqa: E501
# verify the required parameter 'device_ds_id' is set
if ('device_ds_id' not in params or
params['device_ds_id'] is None):
raise ValueError("Missing the required parameter `device_ds_id` when calling `get_device_datasource_instance_group_list`") # noqa: E501
if 'device_id' in params and not re.search('\d+', params['device_id'] if type(params['device_id']) is str else str(params['device_id'])): # noqa: E501
raise ValueError("Invalid value for parameter `device_id` when calling `get_device_datasource_instance_group_list`, must conform to the pattern `/\d+/`") # noqa: E501
if 'device_ds_id' in params and not re.search('\d+', params['device_ds_id'] if type(params['device_ds_id']) is str else str(params['device_ds_id'])): # noqa: E501
raise ValueError("Invalid value for parameter `device_ds_id` when calling `get_device_datasource_instance_group_list`, must conform to the pattern `/\d+/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'device_id' in params:
path_params['deviceId'] = params['device_id'] # noqa: E501
if 'device_ds_id' in params:
path_params['deviceDsId'] = params['device_ds_id'] # noqa: E501
query_params = []
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
if 'size' in params:
query_params.append(('size', params['size'])) # noqa: E501
if 'offset' in params:
query_params.append(('offset', params['offset'])) # noqa: E501
if 'filter' in params:
query_params.append(('filter', params['filter'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['LMv1'] # noqa: E501
return self.api_client.call_api(
'/device/devices/{deviceId}/devicedatasources/{deviceDsId}/groups', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='DeviceDatasourceInstanceGroupPaginationResponse', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_device_datasource_instance_group_overview_graph_data(self, device_id, device_ds_id, dsig_id, ograph_id, **kwargs): # noqa: E501
"""get device instance group overview graph data # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_device_datasource_instance_group_overview_graph_data(device_id, device_ds_id, dsig_id, ograph_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int device_id: (required)
:param int device_ds_id: The device-datasource ID you'd like to add an instance group for (required)
:param int dsig_id: (required)
:param int ograph_id: (required)
:param int start:
:param int end:
:param str format:
:return: GraphPlot
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_device_datasource_instance_group_overview_graph_data_with_http_info(device_id, device_ds_id, dsig_id, ograph_id, **kwargs) # noqa: E501
else:
(data) = self.get_device_datasource_instance_group_overview_graph_data_with_http_info(device_id, device_ds_id, dsig_id, ograph_id, **kwargs) # noqa: E501
return data
def get_device_datasource_instance_group_overview_graph_data_with_http_info(self, device_id, device_ds_id, dsig_id, ograph_id, **kwargs): # noqa: E501
"""get device instance group overview graph data # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_device_datasource_instance_group_overview_graph_data_with_http_info(device_id, device_ds_id, dsig_id, ograph_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int device_id: (required)
:param int device_ds_id: The device-datasource ID you'd like to add an instance group for (required)
:param int dsig_id: (required)
:param int ograph_id: (required)
:param int start:
:param int end:
:param str format:
:return: GraphPlot
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['device_id', 'device_ds_id', 'dsig_id', 'ograph_id', 'start', 'end', 'format'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_device_datasource_instance_group_overview_graph_data" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'device_id' is set
if ('device_id' not in params or
params['device_id'] is None):
raise ValueError("Missing the required parameter `device_id` when calling `get_device_datasource_instance_group_overview_graph_data`") # noqa: E501
# verify the required parameter 'device_ds_id' is set
if ('device_ds_id' not in params or
params['device_ds_id'] is None):
raise ValueError("Missing the required parameter `device_ds_id` when calling `get_device_datasource_instance_group_overview_graph_data`") # noqa: E501
# verify the required parameter 'dsig_id' is set
if ('dsig_id' not in params or
params['dsig_id'] is None):
raise ValueError("Missing the required parameter `dsig_id` when calling `get_device_datasource_instance_group_overview_graph_data`") # noqa: E501
# verify the required parameter 'ograph_id' is set
if ('ograph_id' not in params or
params['ograph_id'] is None):
raise ValueError("Missing the required parameter `ograph_id` when calling `get_device_datasource_instance_group_overview_graph_data`") # noqa: E501
if 'device_id' in params and not re.search('\d+', params['device_id'] if type(params['device_id']) is str else str(params['device_id'])): # noqa: E501
raise ValueError("Invalid value for parameter `device_id` when calling `get_device_datasource_instance_group_overview_graph_data`, must conform to the pattern `/\d+/`") # noqa: E501
if 'device_ds_id' in params and not re.search('\d+', params['device_ds_id'] if type(params['device_ds_id']) is str else str(params['device_ds_id'])): # noqa: E501
raise ValueError("Invalid value for parameter `device_ds_id` when calling `get_device_datasource_instance_group_overview_graph_data`, must conform to the pattern `/\d+/`") # noqa: E501
if 'dsig_id' in params and not re.search('\d+', params['dsig_id'] if type(params['dsig_id']) is str else str(params['dsig_id'])): # noqa: E501
raise ValueError("Invalid value for parameter `dsig_id` when calling `get_device_datasource_instance_group_overview_graph_data`, must conform to the pattern `/\d+/`") # noqa: E501
if 'ograph_id' in params and not re.search('\d+', params['ograph_id'] if type(params['ograph_id']) is str else str(params['ograph_id'])): # noqa: E501
raise ValueError("Invalid value for parameter `ograph_id` when calling `get_device_datasource_instance_group_overview_graph_data`, must conform to the pattern `/\d+/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'device_id' in params:
path_params['deviceId'] = params['device_id'] # noqa: E501
if 'device_ds_id' in params:
path_params['deviceDsId'] = params['device_ds_id'] # noqa: E501
if 'dsig_id' in params:
path_params['dsigId'] = params['dsig_id'] # noqa: E501
if 'ograph_id' in params:
path_params['ographId'] = params['ograph_id'] # noqa: E501
query_params = []
if 'start' in params:
query_params.append(('start', params['start'])) # noqa: E501
if 'end' in params:
query_params.append(('end', params['end'])) # noqa: E501
if 'format' in params:
query_params.append(('format', params['format'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['LMv1'] # noqa: E501
return self.api_client.call_api(
'/device/devices/{deviceId}/devicedatasources/{deviceDsId}/groups/{dsigId}/graphs/{ographId}/data', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='GraphPlot', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_device_datasource_instance_list(self, device_id, hds_id, **kwargs): # noqa: E501
"""get device instance list # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_device_datasource_instance_list(device_id, hds_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int device_id: (required)
:param int hds_id: The device-datasource ID (required)
:return: DeviceDatasourceInstancePaginationResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_device_datasource_instance_list_with_http_info(device_id, hds_id, **kwargs) # noqa: E501
else:
(data) = self.get_device_datasource_instance_list_with_http_info(device_id, hds_id, **kwargs) # noqa: E501
return data
def get_device_datasource_instance_list_with_http_info(self, device_id, hds_id, **kwargs): # noqa: E501
"""get device instance list # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_device_datasource_instance_list_with_http_info(device_id, hds_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int device_id: (required)
:param int hds_id: The device-datasource ID (required)
:return: DeviceDatasourceInstancePaginationResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['device_id', 'hds_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_device_datasource_instance_list" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'device_id' is set
if ('device_id' not in params or
params['device_id'] is None):
raise ValueError("Missing the required parameter `device_id` when calling `get_device_datasource_instance_list`") # noqa: E501
# verify the required parameter 'hds_id' is set
if ('hds_id' not in params or
params['hds_id'] is None):
raise ValueError("Missing the required parameter `hds_id` when calling `get_device_datasource_instance_list`") # noqa: E501
if 'device_id' in params and not re.search('\d+', params['device_id'] if type(params['device_id']) is str else str(params['device_id'])): # noqa: E501
raise ValueError("Invalid value for parameter `device_id` when calling `get_device_datasource_instance_list`, must conform to the pattern `/\d+/`") # noqa: E501
if 'hds_id' in params and not re.search('\d+', params['hds_id'] if type(params['hds_id']) is str else str(params['hds_id'])): # noqa: E501
raise ValueError("Invalid value for parameter `hds_id` when calling `get_device_datasource_instance_list`, must conform to the pattern `/\d+/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'device_id' in params:
path_params['deviceId'] = params['device_id'] # noqa: E501
if 'hds_id' in params:
path_params['hdsId'] = params['hds_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['LMv1'] # noqa: E501
return self.api_client.call_api(
'/device/devices/{deviceId}/devicedatasources/{hdsId}/instances', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='DeviceDatasourceInstancePaginationResponse', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_device_datasource_instance_sdt_history(self, device_id, hds_id, id, **kwargs): # noqa: E501
"""get device instance SDT history # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_device_datasource_instance_sdt_history(device_id, hds_id, id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int device_id: (required)
:param int hds_id: The device-datasource ID (required)
:param int id: (required)
:param str fields:
:param int size:
:param int offset:
:param str filter:
:return: DeviceGroupSDTHistoryPaginationResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_device_datasource_instance_sdt_history_with_http_info(device_id, hds_id, id, **kwargs) # noqa: E501
else:
(data) = self.get_device_datasource_instance_sdt_history_with_http_info(device_id, hds_id, id, **kwargs) # noqa: E501
return data
def get_device_datasource_instance_sdt_history_with_http_info(self, device_id, hds_id, id, **kwargs): # noqa: E501
"""get device instance SDT history # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_device_datasource_instance_sdt_history_with_http_info(device_id, hds_id, id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int device_id: (required)
:param int hds_id: The device-datasource ID (required)
:param int id: (required)
:param str fields:
:param int size:
:param int offset:
:param str filter:
:return: DeviceGroupSDTHistoryPaginationResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['device_id', 'hds_id', 'id', 'fields', 'size', 'offset', 'filter'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_device_datasource_instance_sdt_history" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'device_id' is set
if ('device_id' not in params or
params['device_id'] is None):
raise ValueError("Missing the required parameter `device_id` when calling `get_device_datasource_instance_sdt_history`") # noqa: E501
# verify the required parameter 'hds_id' is set
if ('hds_id' not in params or
params['hds_id'] is None):
raise ValueError("Missing the required parameter `hds_id` when calling `get_device_datasource_instance_sdt_history`") # noqa: E501
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `get_device_datasource_instance_sdt_history`") # noqa: E501
if 'device_id' in params and not re.search('\d+', params['device_id'] if type(params['device_id']) is str else str(params['device_id'])): # noqa: E501
raise ValueError("Invalid value for parameter `device_id` when calling `get_device_datasource_instance_sdt_history`, must conform to the pattern `/\d+/`") # noqa: E501
if 'hds_id' in params and not re.search('\d+', params['hds_id'] if type(params['hds_id']) is str else str(params['hds_id'])): # noqa: E501
raise ValueError("Invalid value for parameter `hds_id` when calling `get_device_datasource_instance_sdt_history`, must conform to the pattern `/\d+/`") # noqa: E501
if 'id' in params and not re.search('\d+', params['id'] if type(params['id']) is str else str(params['id'])): # noqa: E501
raise ValueError("Invalid value for parameter `id` when calling `get_device_datasource_instance_sdt_history`, must conform to the pattern `/\d+/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'device_id' in params:
path_params['deviceId'] = params['device_id'] # noqa: E501
if 'hds_id' in params:
path_params['hdsId'] = params['hds_id'] # noqa: E501
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
if 'size' in params:
query_params.append(('size', params['size'])) # noqa: E501
if 'offset' in params:
query_params.append(('offset', params['offset'])) # noqa: E501
if 'filter' in params:
query_params.append(('filter', params['filter'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['LMv1'] # noqa: E501
return self.api_client.call_api(
'/device/devices/{deviceId}/devicedatasources/{hdsId}/instances/{id}/historysdts', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='DeviceGroupSDTHistoryPaginationResponse', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_device_datasource_list(self, device_id, **kwargs): # noqa: E501
"""get device datasource list # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_device_datasource_list(device_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int device_id: (required)
:param str fields:
:param int size:
:param int offset:
:param str filter:
:return: DeviceDatasourcePaginationResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_device_datasource_list_with_http_info(device_id, **kwargs) # noqa: E501
else:
(data) = self.get_device_datasource_list_with_http_info(device_id, **kwargs) # noqa: E501
return data
def get_device_datasource_list_with_http_info(self, device_id, **kwargs): # noqa: E501
"""get device datasource list # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_device_datasource_list_with_http_info(device_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int device_id: (required)
:param str fields:
:param int size:
:param int offset:
:param str filter:
:return: DeviceDatasourcePaginationResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['device_id', 'fields', 'size', 'offset', 'filter'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_device_datasource_list" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'device_id' is set
if ('device_id' not in params or
params['device_id'] is None):
raise ValueError("Missing the required parameter `device_id` when calling `get_device_datasource_list`") # noqa: E501
if 'device_id' in params and not re.search('\d+', params['device_id'] if type(params['device_id']) is str else str(params['device_id'])): # noqa: E501
raise ValueError("Invalid value for parameter `device_id` when calling `get_device_datasource_list`, must conform to the pattern `/\d+/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'device_id' in params:
path_params['deviceId'] = params['device_id'] # noqa: E501
query_params = []
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
if 'size' in params:
query_params.append(('size', params['size'])) # noqa: E501
if 'offset' in params:
query_params.append(('offset', params['offset'])) # noqa: E501
if 'filter' in params:
query_params.append(('filter', params['filter'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['LMv1'] # noqa: E501
return self.api_client.call_api(
'/device/devices/{deviceId}/devicedatasources', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='DeviceDatasourcePaginationResponse', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_device_group_by_id(self, id, **kwargs): # noqa: E501
"""get device group # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_device_group_by_id(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: (required)
:param str fields:
:return: DeviceGroup
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_device_group_by_id_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.get_device_group_by_id_with_http_info(id, **kwargs) # noqa: E501
return data
def get_device_group_by_id_with_http_info(self, id, **kwargs): # noqa: E501
"""get device group # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_device_group_by_id_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: (required)
:param str fields:
:return: DeviceGroup
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'fields'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_device_group_by_id" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `get_device_group_by_id`") # noqa: E501
if 'id' in params and not re.search('\d+', params['id'] if type(params['id']) is str else str(params['id'])): # noqa: E501
raise ValueError("Invalid value for parameter `id` when calling `get_device_group_by_id`, must conform to the pattern `/\d+/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['LMv1'] # noqa: E501
return self.api_client.call_api(
'/device/groups/{id}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='DeviceGroup', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_device_group_cluster_alert_conf_by_id(self, device_group_id, id, **kwargs): # noqa: E501
"""Get cluster alert configuration by id # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_device_group_cluster_alert_conf_by_id(device_group_id, id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int device_group_id: (required)
:param int id: (required)
:return: DeviceClusterAlertConfig
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_device_group_cluster_alert_conf_by_id_with_http_info(device_group_id, id, **kwargs) # noqa: E501
else:
(data) = self.get_device_group_cluster_alert_conf_by_id_with_http_info(device_group_id, id, **kwargs) # noqa: E501
return data
def get_device_group_cluster_alert_conf_by_id_with_http_info(self, device_group_id, id, **kwargs): # noqa: E501
"""Get cluster alert configuration by id # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_device_group_cluster_alert_conf_by_id_with_http_info(device_group_id, id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int device_group_id: (required)
:param int id: (required)
:return: DeviceClusterAlertConfig
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['device_group_id', 'id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_device_group_cluster_alert_conf_by_id" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'device_group_id' is set
if ('device_group_id' not in params or
params['device_group_id'] is None):
raise ValueError("Missing the required parameter `device_group_id` when calling `get_device_group_cluster_alert_conf_by_id`") # noqa: E501
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `get_device_group_cluster_alert_conf_by_id`") # noqa: E501
if 'device_group_id' in params and not re.search('\d+', params['device_group_id'] if type(params['device_group_id']) is str else str(params['device_group_id'])): # noqa: E501
raise ValueError("Invalid value for parameter `device_group_id` when calling `get_device_group_cluster_alert_conf_by_id`, must conform to the pattern `/\d+/`") # noqa: E501
if 'id' in params and not re.search('\d+', params['id'] if type(params['id']) is str else str(params['id'])): # noqa: E501
raise ValueError("Invalid value for parameter `id` when calling `get_device_group_cluster_alert_conf_by_id`, must conform to the pattern `/\d+/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'device_group_id' in params:
path_params['deviceGroupId'] = params['device_group_id'] # noqa: E501
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['LMv1'] # noqa: E501
return self.api_client.call_api(
'/device/groups/{deviceGroupId}/clusterAlertConf/{id}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='DeviceClusterAlertConfig', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_device_group_cluster_alert_conf_list(self, device_group_id, **kwargs): # noqa: E501
"""get a list of cluster alert configurations for a device group # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_device_group_cluster_alert_conf_list(device_group_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int device_group_id: (required)
:param str fields:
:param int size:
:param int offset:
:param str filter:
:return: DeviceClusterAlertConfigPaginationResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_device_group_cluster_alert_conf_list_with_http_info(device_group_id, **kwargs) # noqa: E501
else:
(data) = self.get_device_group_cluster_alert_conf_list_with_http_info(device_group_id, **kwargs) # noqa: E501
return data
def get_device_group_cluster_alert_conf_list_with_http_info(self, device_group_id, **kwargs): # noqa: E501
"""get a list of cluster alert configurations for a device group # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_device_group_cluster_alert_conf_list_with_http_info(device_group_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int device_group_id: (required)
:param str fields:
:param int size:
:param int offset:
:param str filter:
:return: DeviceClusterAlertConfigPaginationResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['device_group_id', 'fields', 'size', 'offset', 'filter'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_device_group_cluster_alert_conf_list" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'device_group_id' is set
if ('device_group_id' not in params or
params['device_group_id'] is None):
raise ValueError("Missing the required parameter `device_group_id` when calling `get_device_group_cluster_alert_conf_list`") # noqa: E501
if 'device_group_id' in params and not re.search('\d+', params['device_group_id'] if type(params['device_group_id']) is str else str(params['device_group_id'])): # noqa: E501
raise ValueError("Invalid value for parameter `device_group_id` when calling `get_device_group_cluster_alert_conf_list`, must conform to the pattern `/\d+/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'device_group_id' in params:
path_params['deviceGroupId'] = params['device_group_id'] # noqa: E501
query_params = []
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
if 'size' in params:
query_params.append(('size', params['size'])) # noqa: E501
if 'offset' in params:
query_params.append(('offset', params['offset'])) # noqa: E501
if 'filter' in params:
query_params.append(('filter', params['filter'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['LMv1'] # noqa: E501
return self.api_client.call_api(
'/device/groups/{deviceGroupId}/clusterAlertConf', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='DeviceClusterAlertConfigPaginationResponse', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_device_group_datasource_alert_setting(self, device_group_id, ds_id, **kwargs): # noqa: E501
"""get device group datasource alert setting # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_device_group_datasource_alert_setting(device_group_id, ds_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int device_group_id: (required)
:param int ds_id: (required)
:param str fields:
:return: DeviceGroupDataSourceAlertConfig
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_device_group_datasource_alert_setting_with_http_info(device_group_id, ds_id, **kwargs) # noqa: E501
else:
(data) = self.get_device_group_datasource_alert_setting_with_http_info(device_group_id, ds_id, **kwargs) # noqa: E501
return data
def get_device_group_datasource_alert_setting_with_http_info(self, device_group_id, ds_id, **kwargs): # noqa: E501
"""get device group datasource alert setting # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_device_group_datasource_alert_setting_with_http_info(device_group_id, ds_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int device_group_id: (required)
:param int ds_id: (required)
:param str fields:
:return: DeviceGroupDataSourceAlertConfig
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['device_group_id', 'ds_id', 'fields'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_device_group_datasource_alert_setting" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'device_group_id' is set
if ('device_group_id' not in params or
params['device_group_id'] is None):
raise ValueError("Missing the required parameter `device_group_id` when calling `get_device_group_datasource_alert_setting`") # noqa: E501
# verify the required parameter 'ds_id' is set
if ('ds_id' not in params or
params['ds_id'] is None):
raise ValueError("Missing the required parameter `ds_id` when calling `get_device_group_datasource_alert_setting`") # noqa: E501
if 'device_group_id' in params and not re.search('\d+', params['device_group_id'] if type(params['device_group_id']) is str else str(params['device_group_id'])): # noqa: E501
raise ValueError("Invalid value for parameter `device_group_id` when calling `get_device_group_datasource_alert_setting`, must conform to the pattern `/\d+/`") # noqa: E501
if 'ds_id' in params and not re.search('\d+', params['ds_id'] if type(params['ds_id']) is str else str(params['ds_id'])): # noqa: E501
raise ValueError("Invalid value for parameter `ds_id` when calling `get_device_group_datasource_alert_setting`, must conform to the pattern `/\d+/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'device_group_id' in params:
path_params['deviceGroupId'] = params['device_group_id'] # noqa: E501
if 'ds_id' in params:
path_params['dsId'] = params['ds_id'] # noqa: E501
query_params = []
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['LMv1'] # noqa: E501
return self.api_client.call_api(
'/device/groups/{deviceGroupId}/datasources/{dsId}/alertsettings', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='DeviceGroupDataSourceAlertConfig', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_device_group_datasource_by_id(self, device_group_id, id, **kwargs): # noqa: E501
"""get device group datasource # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_device_group_datasource_by_id(device_group_id, id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int device_group_id: (required)
:param int id: (required)
:param str fields:
:return: DeviceGroupDataSource
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_device_group_datasource_by_id_with_http_info(device_group_id, id, **kwargs) # noqa: E501
else:
(data) = self.get_device_group_datasource_by_id_with_http_info(device_group_id, id, **kwargs) # noqa: E501
return data
def get_device_group_datasource_by_id_with_http_info(self, device_group_id, id, **kwargs): # noqa: E501
"""get device group datasource # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_device_group_datasource_by_id_with_http_info(device_group_id, id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int device_group_id: (required)
:param int id: (required)
:param str fields:
:return: DeviceGroupDataSource
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['device_group_id', 'id', 'fields'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_device_group_datasource_by_id" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'device_group_id' is set
if ('device_group_id' not in params or
params['device_group_id'] is None):
raise ValueError("Missing the required parameter `device_group_id` when calling `get_device_group_datasource_by_id`") # noqa: E501
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `get_device_group_datasource_by_id`") # noqa: E501
if 'device_group_id' in params and not re.search('\d+', params['device_group_id'] if type(params['device_group_id']) is str else str(params['device_group_id'])): # noqa: E501
raise ValueError("Invalid value for parameter `device_group_id` when calling `get_device_group_datasource_by_id`, must conform to the pattern `/\d+/`") # noqa: E501
if 'id' in params and not re.search('\d+', params['id'] if type(params['id']) is str else str(params['id'])): # noqa: E501
raise ValueError("Invalid value for parameter `id` when calling `get_device_group_datasource_by_id`, must conform to the pattern `/\d+/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'device_group_id' in params:
path_params['deviceGroupId'] = params['device_group_id'] # noqa: E501
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['LMv1'] # noqa: E501
return self.api_client.call_api(
'/device/groups/{deviceGroupId}/datasources/{id}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='DeviceGroupDataSource', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_device_group_datasource_list(self, device_group_id, **kwargs): # noqa: E501
"""get device group datasource list # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_device_group_datasource_list(device_group_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int device_group_id: (required)
:param bool include_disabled_data_source_without_instance:
:param str fields:
:param int size:
:param int offset:
:param str filter:
:return: DeviceGroupDatasourcePaginationResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_device_group_datasource_list_with_http_info(device_group_id, **kwargs) # noqa: E501
else:
(data) = self.get_device_group_datasource_list_with_http_info(device_group_id, **kwargs) # noqa: E501
return data
def get_device_group_datasource_list_with_http_info(self, device_group_id, **kwargs): # noqa: E501
"""get device group datasource list # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_device_group_datasource_list_with_http_info(device_group_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int device_group_id: (required)
:param bool include_disabled_data_source_without_instance:
:param str fields:
:param int size:
:param int offset:
:param str filter:
:return: DeviceGroupDatasourcePaginationResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['device_group_id', 'include_disabled_data_source_without_instance', 'fields', 'size', 'offset', 'filter'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_device_group_datasource_list" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'device_group_id' is set
if ('device_group_id' not in params or
params['device_group_id'] is None):
raise ValueError("Missing the required parameter `device_group_id` when calling `get_device_group_datasource_list`") # noqa: E501
if 'device_group_id' in params and not re.search('\d+', params['device_group_id'] if type(params['device_group_id']) is str else str(params['device_group_id'])): # noqa: E501
raise ValueError("Invalid value for parameter `device_group_id` when calling `get_device_group_datasource_list`, must conform to the pattern `/\d+/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'device_group_id' in params:
path_params['deviceGroupId'] = params['device_group_id'] # noqa: E501
query_params = []
if 'include_disabled_data_source_without_instance' in params:
query_params.append(('includeDisabledDataSourceWithoutInstance', params['include_disabled_data_source_without_instance'])) # noqa: E501
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
if 'size' in params:
query_params.append(('size', params['size'])) # noqa: E501
if 'offset' in params:
query_params.append(('offset', params['offset'])) # noqa: E501
if 'filter' in params:
query_params.append(('filter', params['filter'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['LMv1'] # noqa: E501
return self.api_client.call_api(
'/device/groups/{deviceGroupId}/datasources', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='DeviceGroupDatasourcePaginationResponse', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_device_group_list(self, **kwargs): # noqa: E501
"""get device group list # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_device_group_list(async_req=True)
>>> result = thread.get()
:param async_req bool
:param str fields:
:param int size:
:param int offset:
:param str filter:
:return: DeviceGroupPaginationResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_device_group_list_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.get_device_group_list_with_http_info(**kwargs) # noqa: E501
return data
def get_device_group_list_with_http_info(self, **kwargs): # noqa: E501
"""get device group list # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_device_group_list_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param str fields:
:param int size:
:param int offset:
:param str filter:
:return: DeviceGroupPaginationResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['fields', 'size', 'offset', 'filter'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_device_group_list" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
if 'size' in params:
query_params.append(('size', params['size'])) # noqa: E501
if 'offset' in params:
query_params.append(('offset', params['offset'])) # noqa: E501
if 'filter' in params:
query_params.append(('filter', params['filter'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['LMv1'] # noqa: E501
return self.api_client.call_api(
'/device/groups', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='DeviceGroupPaginationResponse', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_device_group_property_by_name(self, gid, name, **kwargs): # noqa: E501
"""get device group property by name # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_device_group_property_by_name(gid, name, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int gid: group ID (required)
:param str name: (required)
:param str fields:
:return: EntityProperty
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_device_group_property_by_name_with_http_info(gid, name, **kwargs) # noqa: E501
else:
(data) = self.get_device_group_property_by_name_with_http_info(gid, name, **kwargs) # noqa: E501
return data
def get_device_group_property_by_name_with_http_info(self, gid, name, **kwargs): # noqa: E501
"""get device group property by name # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_device_group_property_by_name_with_http_info(gid, name, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int gid: group ID (required)
:param str name: (required)
:param str fields:
:return: EntityProperty
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['gid', 'name', 'fields'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_device_group_property_by_name" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'gid' is set
if ('gid' not in params or
params['gid'] is None):
raise ValueError("Missing the required parameter `gid` when calling `get_device_group_property_by_name`") # noqa: E501
# verify the required parameter 'name' is set
if ('name' not in params or
params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `get_device_group_property_by_name`") # noqa: E501
if 'gid' in params and not re.search('\d+', params['gid'] if type(params['gid']) is str else str(params['gid'])): # noqa: E501
raise ValueError("Invalid value for parameter `gid` when calling `get_device_group_property_by_name`, must conform to the pattern `/\d+/`") # noqa: E501
if 'name' in params and not re.search('[^\/]+', params['name'] if type(params['name']) is str else str(params['name'])): # noqa: E501
raise ValueError("Invalid value for parameter `name` when calling `get_device_group_property_by_name`, must conform to the pattern `/[^\/]+/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'gid' in params:
path_params['gid'] = params['gid'] # noqa: E501
if 'name' in params:
path_params['name'] = params['name'] # noqa: E501
query_params = []
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['LMv1'] # noqa: E501
return self.api_client.call_api(
'/device/groups/{gid}/properties/{name}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='EntityProperty', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_device_group_property_list(self, gid, **kwargs): # noqa: E501
"""get device group properties # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_device_group_property_list(gid, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int gid: group ID (required)
:param str fields:
:param int size:
:param int offset:
:param str filter:
:return: PropertyPaginationResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_device_group_property_list_with_http_info(gid, **kwargs) # noqa: E501
else:
(data) = self.get_device_group_property_list_with_http_info(gid, **kwargs) # noqa: E501
return data
def get_device_group_property_list_with_http_info(self, gid, **kwargs): # noqa: E501
"""get device group properties # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_device_group_property_list_with_http_info(gid, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int gid: group ID (required)
:param str fields:
:param int size:
:param int offset:
:param str filter:
:return: PropertyPaginationResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['gid', 'fields', 'size', 'offset', 'filter'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_device_group_property_list" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'gid' is set
if ('gid' not in params or
params['gid'] is None):
raise ValueError("Missing the required parameter `gid` when calling `get_device_group_property_list`") # noqa: E501
if 'gid' in params and not re.search('\d+', params['gid'] if type(params['gid']) is str else str(params['gid'])): # noqa: E501
raise ValueError("Invalid value for parameter `gid` when calling `get_device_group_property_list`, must conform to the pattern `/\d+/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'gid' in params:
path_params['gid'] = params['gid'] # noqa: E501
query_params = []
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
if 'size' in params:
query_params.append(('size', params['size'])) # noqa: E501
if 'offset' in params:
query_params.append(('offset', params['offset'])) # noqa: E501
if 'filter' in params:
query_params.append(('filter', params['filter'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['LMv1'] # noqa: E501
return self.api_client.call_api(
'/device/groups/{gid}/properties', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='PropertyPaginationResponse', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_device_group_sdt_list(self, id, **kwargs): # noqa: E501
"""get device group SDTs # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_device_group_sdt_list(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: (required)
:param str fields:
:param int size:
:param int offset:
:param str filter:
:return: SDTPaginationResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_device_group_sdt_list_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.get_device_group_sdt_list_with_http_info(id, **kwargs) # noqa: E501
return data
def get_device_group_sdt_list_with_http_info(self, id, **kwargs): # noqa: E501
"""get device group SDTs # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_device_group_sdt_list_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: (required)
:param str fields:
:param int size:
:param int offset:
:param str filter:
:return: SDTPaginationResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'fields', 'size', 'offset', 'filter'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_device_group_sdt_list" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `get_device_group_sdt_list`") # noqa: E501
if 'id' in params and not re.search('\d+', params['id'] if type(params['id']) is str else str(params['id'])): # noqa: E501
raise ValueError("Invalid value for parameter `id` when calling `get_device_group_sdt_list`, must conform to the pattern `/\d+/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
if 'size' in params:
query_params.append(('size', params['size'])) # noqa: E501
if 'offset' in params:
query_params.append(('offset', params['offset'])) # noqa: E501
if 'filter' in params:
query_params.append(('filter', params['filter'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['LMv1'] # noqa: E501
return self.api_client.call_api(
'/device/groups/{id}/sdts', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='SDTPaginationResponse', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_device_instance_graph_data_only_by_instance_id(self, instance_id, graph_id, **kwargs): # noqa: E501
"""get device instance data # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_device_instance_graph_data_only_by_instance_id(instance_id, graph_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int instance_id: (required)
:param int graph_id: (required)
:param int start:
:param int end:
:param str format:
:return: GraphPlot
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_device_instance_graph_data_only_by_instance_id_with_http_info(instance_id, graph_id, **kwargs) # noqa: E501
else:
(data) = self.get_device_instance_graph_data_only_by_instance_id_with_http_info(instance_id, graph_id, **kwargs) # noqa: E501
return data
def get_device_instance_graph_data_only_by_instance_id_with_http_info(self, instance_id, graph_id, **kwargs): # noqa: E501
"""get device instance data # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_device_instance_graph_data_only_by_instance_id_with_http_info(instance_id, graph_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int instance_id: (required)
:param int graph_id: (required)
:param int start:
:param int end:
:param str format:
:return: GraphPlot
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['instance_id', 'graph_id', 'start', 'end', 'format'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_device_instance_graph_data_only_by_instance_id" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'instance_id' is set
if ('instance_id' not in params or
params['instance_id'] is None):
raise ValueError("Missing the required parameter `instance_id` when calling `get_device_instance_graph_data_only_by_instance_id`") # noqa: E501
# verify the required parameter 'graph_id' is set
if ('graph_id' not in params or
params['graph_id'] is None):
raise ValueError("Missing the required parameter `graph_id` when calling `get_device_instance_graph_data_only_by_instance_id`") # noqa: E501
if 'instance_id' in params and not re.search('\d+', params['instance_id'] if type(params['instance_id']) is str else str(params['instance_id'])): # noqa: E501
raise ValueError("Invalid value for parameter `instance_id` when calling `get_device_instance_graph_data_only_by_instance_id`, must conform to the pattern `/\d+/`") # noqa: E501
if 'graph_id' in params and not re.search('-?\d+', params['graph_id'] if type(params['graph_id']) is str else str(params['graph_id'])): # noqa: E501
raise ValueError("Invalid value for parameter `graph_id` when calling `get_device_instance_graph_data_only_by_instance_id`, must conform to the pattern `/-?\d+/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'instance_id' in params:
path_params['instanceId'] = params['instance_id'] # noqa: E501
if 'graph_id' in params:
path_params['graphId'] = params['graph_id'] # noqa: E501
query_params = []
if 'start' in params:
query_params.append(('start', params['start'])) # noqa: E501
if 'end' in params:
query_params.append(('end', params['end'])) # noqa: E501
if 'format' in params:
query_params.append(('format', params['format'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['LMv1'] # noqa: E501
return self.api_client.call_api(
'/device/devicedatasourceinstances/{instanceId}/graphs/{graphId}/data', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='GraphPlot', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_device_list(self, **kwargs): # noqa: E501
"""get device list # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_device_list(async_req=True)
>>> result = thread.get()
:param async_req bool
:param int start:
:param int end:
:param str netflow_filter:
:param str fields:
:param int size:
:param int offset:
:param str filter:
:return: DevicePaginationResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_device_list_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.get_device_list_with_http_info(**kwargs) # noqa: E501
return data
def get_device_list_with_http_info(self, **kwargs): # noqa: E501
"""get device list # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_device_list_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param int start:
:param int end:
:param str netflow_filter:
:param str fields:
:param int size:
:param int offset:
:param str filter:
:return: DevicePaginationResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['start', 'end', 'netflow_filter', 'fields', 'size', 'offset', 'filter'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_device_list" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'start' in params:
query_params.append(('start', params['start'])) # noqa: E501
if 'end' in params:
query_params.append(('end', params['end'])) # noqa: E501
if 'netflow_filter' in params:
query_params.append(('netflowFilter', params['netflow_filter'])) # noqa: E501
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
if 'size' in params:
query_params.append(('size', params['size'])) # noqa: E501
if 'offset' in params:
query_params.append(('offset', params['offset'])) # noqa: E501
if 'filter' in params:
query_params.append(('filter', params['filter'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['LMv1'] # noqa: E501
return self.api_client.call_api(
'/device/devices', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='DevicePaginationResponse', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_device_property_by_name(self, device_id, name, **kwargs): # noqa: E501
"""get device property by name # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_device_property_by_name(device_id, name, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int device_id: (required)
:param str name: (required)
:param str fields:
:return: EntityProperty
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_device_property_by_name_with_http_info(device_id, name, **kwargs) # noqa: E501
else:
(data) = self.get_device_property_by_name_with_http_info(device_id, name, **kwargs) # noqa: E501
return data
def get_device_property_by_name_with_http_info(self, device_id, name, **kwargs): # noqa: E501
"""get device property by name # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_device_property_by_name_with_http_info(device_id, name, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int device_id: (required)
:param str name: (required)
:param str fields:
:return: EntityProperty
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['device_id', 'name', 'fields'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_device_property_by_name" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'device_id' is set
if ('device_id' not in params or
params['device_id'] is None):
raise ValueError("Missing the required parameter `device_id` when calling `get_device_property_by_name`") # noqa: E501
# verify the required parameter 'name' is set
if ('name' not in params or
params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `get_device_property_by_name`") # noqa: E501
if 'device_id' in params and not re.search('\d+', params['device_id'] if type(params['device_id']) is str else str(params['device_id'])): # noqa: E501
raise ValueError("Invalid value for parameter `device_id` when calling `get_device_property_by_name`, must conform to the pattern `/\d+/`") # noqa: E501
if 'name' in params and not re.search('[^\/]+', params['name'] if type(params['name']) is str else str(params['name'])): # noqa: E501
raise ValueError("Invalid value for parameter `name` when calling `get_device_property_by_name`, must conform to the pattern `/[^\/]+/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'device_id' in params:
path_params['deviceId'] = params['device_id'] # noqa: E501
if 'name' in params:
path_params['name'] = params['name'] # noqa: E501
query_params = []
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['LMv1'] # noqa: E501
return self.api_client.call_api(
'/device/devices/{deviceId}/properties/{name}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='EntityProperty', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_device_property_list(self, device_id, **kwargs): # noqa: E501
"""get device properties # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_device_property_list(device_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int device_id: (required)
:param str fields:
:param int size:
:param int offset:
:param str filter:
:return: PropertyPaginationResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_device_property_list_with_http_info(device_id, **kwargs) # noqa: E501
else:
(data) = self.get_device_property_list_with_http_info(device_id, **kwargs) # noqa: E501
return data
def get_device_property_list_with_http_info(self, device_id, **kwargs): # noqa: E501
"""get device properties # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_device_property_list_with_http_info(device_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int device_id: (required)
:param str fields:
:param int size:
:param int offset:
:param str filter:
:return: PropertyPaginationResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['device_id', 'fields', 'size', 'offset', 'filter'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_device_property_list" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'device_id' is set
if ('device_id' not in params or
params['device_id'] is None):
raise ValueError("Missing the required parameter `device_id` when calling `get_device_property_list`") # noqa: E501
if 'device_id' in params and not re.search('\d+', params['device_id'] if type(params['device_id']) is str else str(params['device_id'])): # noqa: E501
raise ValueError("Invalid value for parameter `device_id` when calling `get_device_property_list`, must conform to the pattern `/\d+/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'device_id' in params:
path_params['deviceId'] = params['device_id'] # noqa: E501
query_params = []
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
if 'size' in params:
query_params.append(('size', params['size'])) # noqa: E501
if 'offset' in params:
query_params.append(('offset', params['offset'])) # noqa: E501
if 'filter' in params:
query_params.append(('filter', params['filter'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['LMv1'] # noqa: E501
return self.api_client.call_api(
'/device/devices/{deviceId}/properties', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='PropertyPaginationResponse', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_escalation_chain_by_id(self, id, **kwargs): # noqa: E501
"""get escalation chain by id # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_escalation_chain_by_id(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: (required)
:param str fields:
:return: EscalatingChain
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_escalation_chain_by_id_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.get_escalation_chain_by_id_with_http_info(id, **kwargs) # noqa: E501
return data
def get_escalation_chain_by_id_with_http_info(self, id, **kwargs): # noqa: E501
"""get escalation chain by id # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_escalation_chain_by_id_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: (required)
:param str fields:
:return: EscalatingChain
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'fields'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_escalation_chain_by_id" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `get_escalation_chain_by_id`") # noqa: E501
if 'id' in params and not re.search('\d+', params['id'] if type(params['id']) is str else str(params['id'])): # noqa: E501
raise ValueError("Invalid value for parameter `id` when calling `get_escalation_chain_by_id`, must conform to the pattern `/\d+/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['LMv1'] # noqa: E501
return self.api_client.call_api(
'/setting/alert/chains/{id}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='EscalatingChain', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_escalation_chain_list(self, **kwargs): # noqa: E501
"""get escalation chain list # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_escalation_chain_list(async_req=True)
>>> result = thread.get()
:param async_req bool
:param str fields:
:param int size:
:param int offset:
:param str filter:
:return: EscalationChainPaginationResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_escalation_chain_list_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.get_escalation_chain_list_with_http_info(**kwargs) # noqa: E501
return data
def get_escalation_chain_list_with_http_info(self, **kwargs): # noqa: E501
"""get escalation chain list # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_escalation_chain_list_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param str fields:
:param int size:
:param int offset:
:param str filter:
:return: EscalationChainPaginationResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['fields', 'size', 'offset', 'filter'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_escalation_chain_list" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
if 'size' in params:
query_params.append(('size', params['size'])) # noqa: E501
if 'offset' in params:
query_params.append(('offset', params['offset'])) # noqa: E501
if 'filter' in params:
query_params.append(('filter', params['filter'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['LMv1'] # noqa: E501
return self.api_client.call_api(
'/setting/alert/chains', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='EscalationChainPaginationResponse', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_immediate_device_list_by_device_group_id(self, id, **kwargs): # noqa: E501
"""get immediate devices under group # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_immediate_device_list_by_device_group_id(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: (required)
:param str fields:
:param int size:
:param int offset:
:param str filter:
:return: DevicePaginationResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_immediate_device_list_by_device_group_id_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.get_immediate_device_list_by_device_group_id_with_http_info(id, **kwargs) # noqa: E501
return data
def get_immediate_device_list_by_device_group_id_with_http_info(self, id, **kwargs): # noqa: E501
"""get immediate devices under group # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_immediate_device_list_by_device_group_id_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: (required)
:param str fields:
:param int size:
:param int offset:
:param str filter:
:return: DevicePaginationResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'fields', 'size', 'offset', 'filter'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_immediate_device_list_by_device_group_id" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `get_immediate_device_list_by_device_group_id`") # noqa: E501
if 'id' in params and not re.search('\d+', params['id'] if type(params['id']) is str else str(params['id'])): # noqa: E501
raise ValueError("Invalid value for parameter `id` when calling `get_immediate_device_list_by_device_group_id`, must conform to the pattern `/\d+/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
if 'size' in params:
query_params.append(('size', params['size'])) # noqa: E501
if 'offset' in params:
query_params.append(('offset', params['offset'])) # noqa: E501
if 'filter' in params:
query_params.append(('filter', params['filter'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['LMv1'] # noqa: E501
return self.api_client.call_api(
'/device/groups/{id}/devices', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='DevicePaginationResponse', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_immediate_website_list_by_website_group_id(self, id, **kwargs): # noqa: E501
"""get a list of websites for a group # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_immediate_website_list_by_website_group_id(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: (required)
:param str fields:
:param int size:
:param int offset:
:param str filter:
:return: WebsitePaginationResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_immediate_website_list_by_website_group_id_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.get_immediate_website_list_by_website_group_id_with_http_info(id, **kwargs) # noqa: E501
return data
def get_immediate_website_list_by_website_group_id_with_http_info(self, id, **kwargs): # noqa: E501
"""get a list of websites for a group # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_immediate_website_list_by_website_group_id_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: (required)
:param str fields:
:param int size:
:param int offset:
:param str filter:
:return: WebsitePaginationResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'fields', 'size', 'offset', 'filter'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_immediate_website_list_by_website_group_id" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `get_immediate_website_list_by_website_group_id`") # noqa: E501
if 'id' in params and not re.search('\d+', params['id'] if type(params['id']) is str else str(params['id'])): # noqa: E501
raise ValueError("Invalid value for parameter `id` when calling `get_immediate_website_list_by_website_group_id`, must conform to the pattern `/\d+/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
if 'size' in params:
query_params.append(('size', params['size'])) # noqa: E501
if 'offset' in params:
query_params.append(('offset', params['offset'])) # noqa: E501
if 'filter' in params:
query_params.append(('filter', params['filter'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['LMv1'] # noqa: E501
return self.api_client.call_api(
'/website/groups/{id}/websites', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='WebsitePaginationResponse', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_netflow_endpoint_list(self, id, **kwargs): # noqa: E501
"""get netflow endpoint list # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_netflow_endpoint_list(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: (required)
:param int start:
:param int end:
:param str netflow_filter:
:param str port:
:param str fields:
:param int size:
:param int offset:
:param str filter:
:return: EndpointPaginationResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_netflow_endpoint_list_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.get_netflow_endpoint_list_with_http_info(id, **kwargs) # noqa: E501
return data
def get_netflow_endpoint_list_with_http_info(self, id, **kwargs): # noqa: E501
"""get netflow endpoint list # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_netflow_endpoint_list_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: (required)
:param int start:
:param int end:
:param str netflow_filter:
:param str port:
:param str fields:
:param int size:
:param int offset:
:param str filter:
:return: EndpointPaginationResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'start', 'end', 'netflow_filter', 'port', 'fields', 'size', 'offset', 'filter'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_netflow_endpoint_list" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `get_netflow_endpoint_list`") # noqa: E501
if 'id' in params and not re.search('\d+', params['id'] if type(params['id']) is str else str(params['id'])): # noqa: E501
raise ValueError("Invalid value for parameter `id` when calling `get_netflow_endpoint_list`, must conform to the pattern `/\d+/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
if 'start' in params:
query_params.append(('start', params['start'])) # noqa: E501
if 'end' in params:
query_params.append(('end', params['end'])) # noqa: E501
if 'netflow_filter' in params:
query_params.append(('netflowFilter', params['netflow_filter'])) # noqa: E501
if 'port' in params:
query_params.append(('port', params['port'])) # noqa: E501
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
if 'size' in params:
query_params.append(('size', params['size'])) # noqa: E501
if 'offset' in params:
query_params.append(('offset', params['offset'])) # noqa: E501
if 'filter' in params:
query_params.append(('filter', params['filter'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['LMv1'] # noqa: E501
return self.api_client.call_api(
'/device/devices/{id}/endpoints', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='EndpointPaginationResponse', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_netflow_flow_list(self, id, **kwargs): # noqa: E501
"""get netflow flow list # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_netflow_flow_list(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: (required)
:param int start:
:param int end:
:param str netflow_filter:
:param str fields:
:param int size:
:param int offset:
:param str filter:
:return: FlowRecordPaginationResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_netflow_flow_list_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.get_netflow_flow_list_with_http_info(id, **kwargs) # noqa: E501
return data
def get_netflow_flow_list_with_http_info(self, id, **kwargs): # noqa: E501
"""get netflow flow list # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_netflow_flow_list_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: (required)
:param int start:
:param int end:
:param str netflow_filter:
:param str fields:
:param int size:
:param int offset:
:param str filter:
:return: FlowRecordPaginationResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'start', 'end', 'netflow_filter', 'fields', 'size', 'offset', 'filter'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_netflow_flow_list" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `get_netflow_flow_list`") # noqa: E501
if 'id' in params and not re.search('\d+', params['id'] if type(params['id']) is str else str(params['id'])): # noqa: E501
raise ValueError("Invalid value for parameter `id` when calling `get_netflow_flow_list`, must conform to the pattern `/\d+/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
if 'start' in params:
query_params.append(('start', params['start'])) # noqa: E501
if 'end' in params:
query_params.append(('end', params['end'])) # noqa: E501
if 'netflow_filter' in params:
query_params.append(('netflowFilter', params['netflow_filter'])) # noqa: E501
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
if 'size' in params:
query_params.append(('size', params['size'])) # noqa: E501
if 'offset' in params:
query_params.append(('offset', params['offset'])) # noqa: E501
if 'filter' in params:
query_params.append(('filter', params['filter'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['LMv1'] # noqa: E501
return self.api_client.call_api(
'/device/devices/{id}/flows', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='FlowRecordPaginationResponse', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_netflow_port_list(self, id, **kwargs): # noqa: E501
"""get netflow port list # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_netflow_port_list(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: (required)
:param int start:
:param int end:
:param str netflow_filter:
:param str ip:
:param str fields:
:param int size:
:param int offset:
:param str filter:
:return: PortPaginationResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_netflow_port_list_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.get_netflow_port_list_with_http_info(id, **kwargs) # noqa: E501
return data
def get_netflow_port_list_with_http_info(self, id, **kwargs): # noqa: E501
"""get netflow port list # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_netflow_port_list_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: (required)
:param int start:
:param int end:
:param str netflow_filter:
:param str ip:
:param str fields:
:param int size:
:param int offset:
:param str filter:
:return: PortPaginationResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'start', 'end', 'netflow_filter', 'ip', 'fields', 'size', 'offset', 'filter'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_netflow_port_list" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `get_netflow_port_list`") # noqa: E501
if 'id' in params and not re.search('\d+', params['id'] if type(params['id']) is str else str(params['id'])): # noqa: E501
raise ValueError("Invalid value for parameter `id` when calling `get_netflow_port_list`, must conform to the pattern `/\d+/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
if 'start' in params:
query_params.append(('start', params['start'])) # noqa: E501
if 'end' in params:
query_params.append(('end', params['end'])) # noqa: E501
if 'netflow_filter' in params:
query_params.append(('netflowFilter', params['netflow_filter'])) # noqa: E501
if 'ip' in params:
query_params.append(('ip', params['ip'])) # noqa: E501
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
if 'size' in params:
query_params.append(('size', params['size'])) # noqa: E501
if 'offset' in params:
query_params.append(('offset', params['offset'])) # noqa: E501
if 'filter' in params:
query_params.append(('filter', params['filter'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['LMv1'] # noqa: E501
return self.api_client.call_api(
'/device/devices/{id}/ports', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='PortPaginationResponse', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_netscan_by_id(self, id, **kwargs): # noqa: E501
"""get netscan by id # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_netscan_by_id(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: (required)
:return: Netscan
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_netscan_by_id_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.get_netscan_by_id_with_http_info(id, **kwargs) # noqa: E501
return data
def get_netscan_by_id_with_http_info(self, id, **kwargs): # noqa: E501
"""get netscan by id # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_netscan_by_id_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: (required)
:return: Netscan
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_netscan_by_id" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `get_netscan_by_id`") # noqa: E501
if 'id' in params and not re.search('\d+', params['id'] if type(params['id']) is str else str(params['id'])): # noqa: E501
raise ValueError("Invalid value for parameter `id` when calling `get_netscan_by_id`, must conform to the pattern `/\d+/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['LMv1'] # noqa: E501
return self.api_client.call_api(
'/setting/netscans/{id}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Netscan', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_netscan_list(self, **kwargs): # noqa: E501
"""get netscan list # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_netscan_list(async_req=True)
>>> result = thread.get()
:param async_req bool
:param str fields:
:param int size:
:param int offset:
:param str filter:
:return: NetscanPaginationResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_netscan_list_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.get_netscan_list_with_http_info(**kwargs) # noqa: E501
return data
def get_netscan_list_with_http_info(self, **kwargs): # noqa: E501
"""get netscan list # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_netscan_list_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param str fields:
:param int size:
:param int offset:
:param str filter:
:return: NetscanPaginationResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['fields', 'size', 'offset', 'filter'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_netscan_list" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
if 'size' in params:
query_params.append(('size', params['size'])) # noqa: E501
if 'offset' in params:
query_params.append(('offset', params['offset'])) # noqa: E501
if 'filter' in params:
query_params.append(('filter', params['filter'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['LMv1'] # noqa: E501
return self.api_client.call_api(
'/setting/netscans', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='NetscanPaginationResponse', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_ops_note_by_id(self, id, **kwargs): # noqa: E501
"""get opsnote by id # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_ops_note_by_id(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: (required)
:param str fields:
:return: OpsNote
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_ops_note_by_id_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.get_ops_note_by_id_with_http_info(id, **kwargs) # noqa: E501
return data
def get_ops_note_by_id_with_http_info(self, id, **kwargs): # noqa: E501
"""get opsnote by id # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_ops_note_by_id_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: (required)
:param str fields:
:return: OpsNote
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'fields'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_ops_note_by_id" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `get_ops_note_by_id`") # noqa: E501
if 'id' in params and not re.search('[^\/]+', params['id'] if type(params['id']) is str else str(params['id'])): # noqa: E501
raise ValueError("Invalid value for parameter `id` when calling `get_ops_note_by_id`, must conform to the pattern `/[^\/]+/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['LMv1'] # noqa: E501
return self.api_client.call_api(
'/setting/opsnotes/{id}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='OpsNote', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_ops_note_list(self, **kwargs): # noqa: E501
"""get opsnote list # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_ops_note_list(async_req=True)
>>> result = thread.get()
:param async_req bool
:param str fields:
:param int size:
:param int offset:
:param str filter:
:return: OpsNotePaginationResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_ops_note_list_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.get_ops_note_list_with_http_info(**kwargs) # noqa: E501
return data
def get_ops_note_list_with_http_info(self, **kwargs): # noqa: E501
"""get opsnote list # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_ops_note_list_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param str fields:
:param int size:
:param int offset:
:param str filter:
:return: OpsNotePaginationResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['fields', 'size', 'offset', 'filter'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_ops_note_list" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
if 'size' in params:
query_params.append(('size', params['size'])) # noqa: E501
if 'offset' in params:
query_params.append(('offset', params['offset'])) # noqa: E501
if 'filter' in params:
query_params.append(('filter', params['filter'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['LMv1'] # noqa: E501
return self.api_client.call_api(
'/setting/opsnotes', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='OpsNotePaginationResponse', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_recipient_group_by_id(self, id, **kwargs): # noqa: E501
"""get recipient group by id # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_recipient_group_by_id(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: (required)
:return: RecipientGroup
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_recipient_group_by_id_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.get_recipient_group_by_id_with_http_info(id, **kwargs) # noqa: E501
return data
def get_recipient_group_by_id_with_http_info(self, id, **kwargs): # noqa: E501
"""get recipient group by id # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_recipient_group_by_id_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: (required)
:return: RecipientGroup
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_recipient_group_by_id" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `get_recipient_group_by_id`") # noqa: E501
if 'id' in params and not re.search('\d+', params['id'] if type(params['id']) is str else str(params['id'])): # noqa: E501
raise ValueError("Invalid value for parameter `id` when calling `get_recipient_group_by_id`, must conform to the pattern `/\d+/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['LMv1'] # noqa: E501
return self.api_client.call_api(
'/setting/recipientgroups/{id}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='RecipientGroup', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_recipient_group_list(self, **kwargs): # noqa: E501
"""get recipient group List # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_recipient_group_list(async_req=True)
>>> result = thread.get()
:param async_req bool
:param str fields:
:param int size:
:param int offset:
:param str filter:
:return: RecipientGroupPaginationResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_recipient_group_list_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.get_recipient_group_list_with_http_info(**kwargs) # noqa: E501
return data
def get_recipient_group_list_with_http_info(self, **kwargs): # noqa: E501
"""get recipient group List # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_recipient_group_list_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param str fields:
:param int size:
:param int offset:
:param str filter:
:return: RecipientGroupPaginationResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['fields', 'size', 'offset', 'filter'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_recipient_group_list" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
if 'size' in params:
query_params.append(('size', params['size'])) # noqa: E501
if 'offset' in params:
query_params.append(('offset', params['offset'])) # noqa: E501
if 'filter' in params:
query_params.append(('filter', params['filter'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['LMv1'] # noqa: E501
return self.api_client.call_api(
'/setting/recipientgroups', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='RecipientGroupPaginationResponse', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_report_by_id(self, id, **kwargs): # noqa: E501
"""get report by id # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_report_by_id(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: (required)
:param str fields:
:return: ReportBase
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_report_by_id_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.get_report_by_id_with_http_info(id, **kwargs) # noqa: E501
return data
def get_report_by_id_with_http_info(self, id, **kwargs): # noqa: E501
"""get report by id # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_report_by_id_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: (required)
:param str fields:
:return: ReportBase
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'fields'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_report_by_id" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `get_report_by_id`") # noqa: E501
if 'id' in params and not re.search('\d+', params['id'] if type(params['id']) is str else str(params['id'])): # noqa: E501
raise ValueError("Invalid value for parameter `id` when calling `get_report_by_id`, must conform to the pattern `/\d+/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['LMv1'] # noqa: E501
return self.api_client.call_api(
'/report/reports/{id}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ReportBase', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_report_group_by_id(self, id, **kwargs): # noqa: E501
"""get report group by id # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_report_group_by_id(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: (required)
:return: ReportGroup
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_report_group_by_id_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.get_report_group_by_id_with_http_info(id, **kwargs) # noqa: E501
return data
def get_report_group_by_id_with_http_info(self, id, **kwargs): # noqa: E501
"""get report group by id # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_report_group_by_id_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: (required)
:return: ReportGroup
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_report_group_by_id" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `get_report_group_by_id`") # noqa: E501
if 'id' in params and not re.search('\d+', params['id'] if type(params['id']) is str else str(params['id'])): # noqa: E501
raise ValueError("Invalid value for parameter `id` when calling `get_report_group_by_id`, must conform to the pattern `/\d+/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['LMv1'] # noqa: E501
return self.api_client.call_api(
'/report/groups/{id}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ReportGroup', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_report_group_list(self, **kwargs): # noqa: E501
"""get report group list # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_report_group_list(async_req=True)
>>> result = thread.get()
:param async_req bool
:param str fields:
:param int size:
:param int offset:
:param str filter:
:return: ReportGroupPaginationResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_report_group_list_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.get_report_group_list_with_http_info(**kwargs) # noqa: E501
return data
def get_report_group_list_with_http_info(self, **kwargs): # noqa: E501
"""get report group list # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_report_group_list_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param str fields:
:param int size:
:param int offset:
:param str filter:
:return: ReportGroupPaginationResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['fields', 'size', 'offset', 'filter'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_report_group_list" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
if 'size' in params:
query_params.append(('size', params['size'])) # noqa: E501
if 'offset' in params:
query_params.append(('offset', params['offset'])) # noqa: E501
if 'filter' in params:
query_params.append(('filter', params['filter'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['LMv1'] # noqa: E501
return self.api_client.call_api(
'/report/groups', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ReportGroupPaginationResponse', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_report_list(self, **kwargs): # noqa: E501
"""get report list # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_report_list(async_req=True)
>>> result = thread.get()
:param async_req bool
:param str fields:
:param int size:
:param int offset:
:param str filter:
:return: ReportPaginationResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_report_list_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.get_report_list_with_http_info(**kwargs) # noqa: E501
return data
def get_report_list_with_http_info(self, **kwargs): # noqa: E501
"""get report list # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_report_list_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param str fields:
:param int size:
:param int offset:
:param str filter:
:return: ReportPaginationResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['fields', 'size', 'offset', 'filter'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_report_list" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
if 'size' in params:
query_params.append(('size', params['size'])) # noqa: E501
if 'offset' in params:
query_params.append(('offset', params['offset'])) # noqa: E501
if 'filter' in params:
query_params.append(('filter', params['filter'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['LMv1'] # noqa: E501
return self.api_client.call_api(
'/report/reports', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ReportPaginationResponse', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_role_by_id(self, id, **kwargs): # noqa: E501
"""get role by id # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_role_by_id(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: (required)
:param str fields:
:return: Role
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_role_by_id_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.get_role_by_id_with_http_info(id, **kwargs) # noqa: E501
return data
def get_role_by_id_with_http_info(self, id, **kwargs): # noqa: E501
"""get role by id # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_role_by_id_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: (required)
:param str fields:
:return: Role
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'fields'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_role_by_id" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `get_role_by_id`") # noqa: E501
if 'id' in params and not re.search('\d+', params['id'] if type(params['id']) is str else str(params['id'])): # noqa: E501
raise ValueError("Invalid value for parameter `id` when calling `get_role_by_id`, must conform to the pattern `/\d+/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['LMv1'] # noqa: E501
return self.api_client.call_api(
'/setting/roles/{id}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Role', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_role_list(self, **kwargs): # noqa: E501
"""get role list # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_role_list(async_req=True)
>>> result = thread.get()
:param async_req bool
:param str fields:
:param int size:
:param int offset:
:param str filter:
:return: RolePaginationResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_role_list_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.get_role_list_with_http_info(**kwargs) # noqa: E501
return data
def get_role_list_with_http_info(self, **kwargs): # noqa: E501
"""get role list # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_role_list_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param str fields:
:param int size:
:param int offset:
:param str filter:
:return: RolePaginationResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['fields', 'size', 'offset', 'filter'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_role_list" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
if 'size' in params:
query_params.append(('size', params['size'])) # noqa: E501
if 'offset' in params:
query_params.append(('offset', params['offset'])) # noqa: E501
if 'filter' in params:
query_params.append(('filter', params['filter'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['LMv1'] # noqa: E501
return self.api_client.call_api(
'/setting/roles', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='RolePaginationResponse', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_sdt_by_id(self, id, **kwargs): # noqa: E501
"""get SDT by id # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_sdt_by_id(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: (required)
:param str fields:
:return: SDT
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_sdt_by_id_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.get_sdt_by_id_with_http_info(id, **kwargs) # noqa: E501
return data
def get_sdt_by_id_with_http_info(self, id, **kwargs): # noqa: E501
"""get SDT by id # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_sdt_by_id_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: (required)
:param str fields:
:return: SDT
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'fields'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_sdt_by_id" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `get_sdt_by_id`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['LMv1'] # noqa: E501
return self.api_client.call_api(
'/sdt/sdts/{id}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='SDT', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_sdt_history_by_device_data_source_id(self, device_id, id, **kwargs): # noqa: E501
"""get SDT history for the device dataSource # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_sdt_history_by_device_data_source_id(device_id, id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int device_id: (required)
:param int id: (required)
:param str fields:
:param int size:
:param int offset:
:param str filter:
:return: DeviceDataSourceSDTHistoryPaginationResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_sdt_history_by_device_data_source_id_with_http_info(device_id, id, **kwargs) # noqa: E501
else:
(data) = self.get_sdt_history_by_device_data_source_id_with_http_info(device_id, id, **kwargs) # noqa: E501
return data
def get_sdt_history_by_device_data_source_id_with_http_info(self, device_id, id, **kwargs): # noqa: E501
"""get SDT history for the device dataSource # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_sdt_history_by_device_data_source_id_with_http_info(device_id, id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int device_id: (required)
:param int id: (required)
:param str fields:
:param int size:
:param int offset:
:param str filter:
:return: DeviceDataSourceSDTHistoryPaginationResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['device_id', 'id', 'fields', 'size', 'offset', 'filter'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_sdt_history_by_device_data_source_id" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'device_id' is set
if ('device_id' not in params or
params['device_id'] is None):
raise ValueError("Missing the required parameter `device_id` when calling `get_sdt_history_by_device_data_source_id`") # noqa: E501
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `get_sdt_history_by_device_data_source_id`") # noqa: E501
if 'device_id' in params and not re.search('\d+', params['device_id'] if type(params['device_id']) is str else str(params['device_id'])): # noqa: E501
raise ValueError("Invalid value for parameter `device_id` when calling `get_sdt_history_by_device_data_source_id`, must conform to the pattern `/\d+/`") # noqa: E501
if 'id' in params and not re.search('\d+', params['id'] if type(params['id']) is str else str(params['id'])): # noqa: E501
raise ValueError("Invalid value for parameter `id` when calling `get_sdt_history_by_device_data_source_id`, must conform to the pattern `/\d+/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'device_id' in params:
path_params['deviceId'] = params['device_id'] # noqa: E501
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
if 'size' in params:
query_params.append(('size', params['size'])) # noqa: E501
if 'offset' in params:
query_params.append(('offset', params['offset'])) # noqa: E501
if 'filter' in params:
query_params.append(('filter', params['filter'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['LMv1'] # noqa: E501
return self.api_client.call_api(
'/device/devices/{deviceId}/devicedatasources/{id}/historysdts', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='DeviceDataSourceSDTHistoryPaginationResponse', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_sdt_history_by_device_group_id(self, id, **kwargs): # noqa: E501
"""get SDT history for the group # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_sdt_history_by_device_group_id(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: (required)
:param str fields:
:param int size:
:param int offset:
:param str filter:
:return: DeviceGroupSDTHistoryPaginationResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_sdt_history_by_device_group_id_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.get_sdt_history_by_device_group_id_with_http_info(id, **kwargs) # noqa: E501
return data
def get_sdt_history_by_device_group_id_with_http_info(self, id, **kwargs): # noqa: E501
"""get SDT history for the group # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_sdt_history_by_device_group_id_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: (required)
:param str fields:
:param int size:
:param int offset:
:param str filter:
:return: DeviceGroupSDTHistoryPaginationResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'fields', 'size', 'offset', 'filter'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_sdt_history_by_device_group_id" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `get_sdt_history_by_device_group_id`") # noqa: E501
if 'id' in params and not re.search('\d+', params['id'] if type(params['id']) is str else str(params['id'])): # noqa: E501
raise ValueError("Invalid value for parameter `id` when calling `get_sdt_history_by_device_group_id`, must conform to the pattern `/\d+/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
if 'size' in params:
query_params.append(('size', params['size'])) # noqa: E501
if 'offset' in params:
query_params.append(('offset', params['offset'])) # noqa: E501
if 'filter' in params:
query_params.append(('filter', params['filter'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['LMv1'] # noqa: E501
return self.api_client.call_api(
'/device/groups/{id}/historysdts', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='DeviceGroupSDTHistoryPaginationResponse', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_sdt_history_by_device_id(self, id, **kwargs): # noqa: E501
"""get SDT history for the device # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_sdt_history_by_device_id(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: (required)
:param int start:
:param int end:
:param str netflow_filter:
:param str fields:
:param int size:
:param int offset:
:param str filter:
:return: DeviceSDTHistoryPaginationResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_sdt_history_by_device_id_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.get_sdt_history_by_device_id_with_http_info(id, **kwargs) # noqa: E501
return data
def get_sdt_history_by_device_id_with_http_info(self, id, **kwargs): # noqa: E501
"""get SDT history for the device # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_sdt_history_by_device_id_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: (required)
:param int start:
:param int end:
:param str netflow_filter:
:param str fields:
:param int size:
:param int offset:
:param str filter:
:return: DeviceSDTHistoryPaginationResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'start', 'end', 'netflow_filter', 'fields', 'size', 'offset', 'filter'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_sdt_history_by_device_id" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `get_sdt_history_by_device_id`") # noqa: E501
if 'id' in params and not re.search('\d+', params['id'] if type(params['id']) is str else str(params['id'])): # noqa: E501
raise ValueError("Invalid value for parameter `id` when calling `get_sdt_history_by_device_id`, must conform to the pattern `/\d+/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
if 'start' in params:
query_params.append(('start', params['start'])) # noqa: E501
if 'end' in params:
query_params.append(('end', params['end'])) # noqa: E501
if 'netflow_filter' in params:
query_params.append(('netflowFilter', params['netflow_filter'])) # noqa: E501
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
if 'size' in params:
query_params.append(('size', params['size'])) # noqa: E501
if 'offset' in params:
query_params.append(('offset', params['offset'])) # noqa: E501
if 'filter' in params:
query_params.append(('filter', params['filter'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['LMv1'] # noqa: E501
return self.api_client.call_api(
'/device/devices/{id}/historysdts', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='DeviceSDTHistoryPaginationResponse', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_sdt_list(self, **kwargs): # noqa: E501
"""get SDT list # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_sdt_list(async_req=True)
>>> result = thread.get()
:param async_req bool
:param str fields:
:param int size:
:param int offset:
:param str filter:
:return: SDTPaginationResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_sdt_list_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.get_sdt_list_with_http_info(**kwargs) # noqa: E501
return data
def get_sdt_list_with_http_info(self, **kwargs): # noqa: E501
"""get SDT list # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_sdt_list_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param str fields:
:param int size:
:param int offset:
:param str filter:
:return: SDTPaginationResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['fields', 'size', 'offset', 'filter'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_sdt_list" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
if 'size' in params:
query_params.append(('size', params['size'])) # noqa: E501
if 'offset' in params:
query_params.append(('offset', params['offset'])) # noqa: E501
if 'filter' in params:
query_params.append(('filter', params['filter'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['LMv1'] # noqa: E501
return self.api_client.call_api(
'/sdt/sdts', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='SDTPaginationResponse', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_site_monitor_check_point_list(self, **kwargs): # noqa: E501
"""get website checkpoint list # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_site_monitor_check_point_list(async_req=True)
>>> result = thread.get()
:param async_req bool
:param str fields:
:param int size:
:param int offset:
:param str filter:
:return: SiteMonitorCheckPointPaginationResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_site_monitor_check_point_list_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.get_site_monitor_check_point_list_with_http_info(**kwargs) # noqa: E501
return data
def get_site_monitor_check_point_list_with_http_info(self, **kwargs): # noqa: E501
"""get website checkpoint list # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_site_monitor_check_point_list_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param str fields:
:param int size:
:param int offset:
:param str filter:
:return: SiteMonitorCheckPointPaginationResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['fields', 'size', 'offset', 'filter'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_site_monitor_check_point_list" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
if 'size' in params:
query_params.append(('size', params['size'])) # noqa: E501
if 'offset' in params:
query_params.append(('offset', params['offset'])) # noqa: E501
if 'filter' in params:
query_params.append(('filter', params['filter'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['LMv1'] # noqa: E501
return self.api_client.call_api(
'/website/smcheckpoints', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='SiteMonitorCheckPointPaginationResponse', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_top_talkers_graph(self, id, **kwargs): # noqa: E501
"""get top talkers graph # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_top_talkers_graph(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: (required)
:param int start:
:param int end:
:param str netflow_filter:
:param str format:
:param str keyword:
:return: GraphPlot
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_top_talkers_graph_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.get_top_talkers_graph_with_http_info(id, **kwargs) # noqa: E501
return data
def get_top_talkers_graph_with_http_info(self, id, **kwargs): # noqa: E501
"""get top talkers graph # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_top_talkers_graph_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: (required)
:param int start:
:param int end:
:param str netflow_filter:
:param str format:
:param str keyword:
:return: GraphPlot
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'start', 'end', 'netflow_filter', 'format', 'keyword'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_top_talkers_graph" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `get_top_talkers_graph`") # noqa: E501
if 'id' in params and not re.search('\d+', params['id'] if type(params['id']) is str else str(params['id'])): # noqa: E501
raise ValueError("Invalid value for parameter `id` when calling `get_top_talkers_graph`, must conform to the pattern `/\d+/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
if 'start' in params:
query_params.append(('start', params['start'])) # noqa: E501
if 'end' in params:
query_params.append(('end', params['end'])) # noqa: E501
if 'netflow_filter' in params:
query_params.append(('netflowFilter', params['netflow_filter'])) # noqa: E501
if 'format' in params:
query_params.append(('format', params['format'])) # noqa: E501
if 'keyword' in params:
query_params.append(('keyword', params['keyword'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['LMv1'] # noqa: E501
return self.api_client.call_api(
'/device/devices/{id}/topTalkersGraph', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='GraphPlot', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_unmonitored_device_list(self, **kwargs): # noqa: E501
"""get unmonitored device list # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_unmonitored_device_list(async_req=True)
>>> result = thread.get()
:param async_req bool
:param str fields:
:param int size:
:param int offset:
:param str filter:
:return: UnmonitoredDevicePaginationResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_unmonitored_device_list_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.get_unmonitored_device_list_with_http_info(**kwargs) # noqa: E501
return data
def get_unmonitored_device_list_with_http_info(self, **kwargs): # noqa: E501
"""get unmonitored device list # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_unmonitored_device_list_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param str fields:
:param int size:
:param int offset:
:param str filter:
:return: UnmonitoredDevicePaginationResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['fields', 'size', 'offset', 'filter'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_unmonitored_device_list" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
if 'size' in params:
query_params.append(('size', params['size'])) # noqa: E501
if 'offset' in params:
query_params.append(('offset', params['offset'])) # noqa: E501
if 'filter' in params:
query_params.append(('filter', params['filter'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['LMv1'] # noqa: E501
return self.api_client.call_api(
'/device/unmonitoreddevices', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='UnmonitoredDevicePaginationResponse', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_update_reason_list_by_data_source_id(self, id, **kwargs): # noqa: E501
"""get update history for a datasource # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_update_reason_list_by_data_source_id(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: (required)
:param str fields:
:param int size:
:param int offset:
:param str filter:
:return: DataSourceUpdateReasonsPaginationResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_update_reason_list_by_data_source_id_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.get_update_reason_list_by_data_source_id_with_http_info(id, **kwargs) # noqa: E501
return data
def get_update_reason_list_by_data_source_id_with_http_info(self, id, **kwargs): # noqa: E501
"""get update history for a datasource # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_update_reason_list_by_data_source_id_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: (required)
:param str fields:
:param int size:
:param int offset:
:param str filter:
:return: DataSourceUpdateReasonsPaginationResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'fields', 'size', 'offset', 'filter'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_update_reason_list_by_data_source_id" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `get_update_reason_list_by_data_source_id`") # noqa: E501
if 'id' in params and not re.search('\d+', params['id'] if type(params['id']) is str else str(params['id'])): # noqa: E501
raise ValueError("Invalid value for parameter `id` when calling `get_update_reason_list_by_data_source_id`, must conform to the pattern `/\d+/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
if 'size' in params:
query_params.append(('size', params['size'])) # noqa: E501
if 'offset' in params:
query_params.append(('offset', params['offset'])) # noqa: E501
if 'filter' in params:
query_params.append(('filter', params['filter'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['LMv1'] # noqa: E501
return self.api_client.call_api(
'/setting/datasources/{id}/updatereasons', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='DataSourceUpdateReasonsPaginationResponse', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_website_alert_list_by_website_id(self, id, **kwargs): # noqa: E501
"""get alerts for a website # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_website_alert_list_by_website_id(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: (required)
:param bool need_message:
:param str custom_columns:
:param str fields:
:param int size:
:param int offset:
:param str filter:
:return: AlertPaginationResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_website_alert_list_by_website_id_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.get_website_alert_list_by_website_id_with_http_info(id, **kwargs) # noqa: E501
return data
def get_website_alert_list_by_website_id_with_http_info(self, id, **kwargs): # noqa: E501
"""get alerts for a website # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_website_alert_list_by_website_id_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: (required)
:param bool need_message:
:param str custom_columns:
:param str fields:
:param int size:
:param int offset:
:param str filter:
:return: AlertPaginationResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'need_message', 'custom_columns', 'fields', 'size', 'offset', 'filter'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_website_alert_list_by_website_id" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `get_website_alert_list_by_website_id`") # noqa: E501
if 'id' in params and not re.search('\d+', params['id'] if type(params['id']) is str else str(params['id'])): # noqa: E501
raise ValueError("Invalid value for parameter `id` when calling `get_website_alert_list_by_website_id`, must conform to the pattern `/\d+/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
if 'need_message' in params:
query_params.append(('needMessage', params['need_message'])) # noqa: E501
if 'custom_columns' in params:
query_params.append(('customColumns', params['custom_columns'])) # noqa: E501
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
if 'size' in params:
query_params.append(('size', params['size'])) # noqa: E501
if 'offset' in params:
query_params.append(('offset', params['offset'])) # noqa: E501
if 'filter' in params:
query_params.append(('filter', params['filter'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['LMv1'] # noqa: E501
return self.api_client.call_api(
'/website/websites/{id}/alerts', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='AlertPaginationResponse', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_website_by_id(self, id, **kwargs): # noqa: E501
"""get website by id # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_website_by_id(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: (required)
:param str format:
:return: Website
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_website_by_id_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.get_website_by_id_with_http_info(id, **kwargs) # noqa: E501
return data
def get_website_by_id_with_http_info(self, id, **kwargs): # noqa: E501
"""get website by id # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_website_by_id_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: (required)
:param str format:
:return: Website
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'format'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_website_by_id" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `get_website_by_id`") # noqa: E501
if 'id' in params and not re.search('\d+', params['id'] if type(params['id']) is str else str(params['id'])): # noqa: E501
raise ValueError("Invalid value for parameter `id` when calling `get_website_by_id`, must conform to the pattern `/\d+/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
if 'format' in params:
query_params.append(('format', params['format'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['LMv1'] # noqa: E501
return self.api_client.call_api(
'/website/websites/{id}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Website', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_website_checkpoint_data_by_id(self, srv_id, check_id, **kwargs): # noqa: E501
"""get data for a website checkpoint # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_website_checkpoint_data_by_id(srv_id, check_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int srv_id: (required)
:param int check_id: (required)
:param float period:
:param int start:
:param int end:
:param str datapoints:
:param str format:
:return: WebsiteCheckpointRawData
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_website_checkpoint_data_by_id_with_http_info(srv_id, check_id, **kwargs) # noqa: E501
else:
(data) = self.get_website_checkpoint_data_by_id_with_http_info(srv_id, check_id, **kwargs) # noqa: E501
return data
def get_website_checkpoint_data_by_id_with_http_info(self, srv_id, check_id, **kwargs): # noqa: E501
"""get data for a website checkpoint # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_website_checkpoint_data_by_id_with_http_info(srv_id, check_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int srv_id: (required)
:param int check_id: (required)
:param float period:
:param int start:
:param int end:
:param str datapoints:
:param str format:
:return: WebsiteCheckpointRawData
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['srv_id', 'check_id', 'period', 'start', 'end', 'datapoints', 'format'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_website_checkpoint_data_by_id" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'srv_id' is set
if ('srv_id' not in params or
params['srv_id'] is None):
raise ValueError("Missing the required parameter `srv_id` when calling `get_website_checkpoint_data_by_id`") # noqa: E501
# verify the required parameter 'check_id' is set
if ('check_id' not in params or
params['check_id'] is None):
raise ValueError("Missing the required parameter `check_id` when calling `get_website_checkpoint_data_by_id`") # noqa: E501
if 'srv_id' in params and not re.search('\d+', params['srv_id'] if type(params['srv_id']) is str else str(params['srv_id'])): # noqa: E501
raise ValueError("Invalid value for parameter `srv_id` when calling `get_website_checkpoint_data_by_id`, must conform to the pattern `/\d+/`") # noqa: E501
if 'check_id' in params and not re.search('\d+', params['check_id'] if type(params['check_id']) is str else str(params['check_id'])): # noqa: E501
raise ValueError("Invalid value for parameter `check_id` when calling `get_website_checkpoint_data_by_id`, must conform to the pattern `/\d+/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'srv_id' in params:
path_params['srvId'] = params['srv_id'] # noqa: E501
if 'check_id' in params:
path_params['checkId'] = params['check_id'] # noqa: E501
query_params = []
if 'period' in params:
query_params.append(('period', params['period'])) # noqa: E501
if 'start' in params:
query_params.append(('start', params['start'])) # noqa: E501
if 'end' in params:
query_params.append(('end', params['end'])) # noqa: E501
if 'datapoints' in params:
query_params.append(('datapoints', params['datapoints'])) # noqa: E501
if 'format' in params:
query_params.append(('format', params['format'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['LMv1'] # noqa: E501
return self.api_client.call_api(
'/website/websites/{srvId}/checkpoints/{checkId}/data', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='WebsiteCheckpointRawData', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_website_data_by_graph_name(self, id, graph_name, **kwargs): # noqa: E501
"""get website data by graph name # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_website_data_by_graph_name(id, graph_name, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: (required)
:param str graph_name: (required)
:param int start:
:param int end:
:param str format:
:return: GraphPlot
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_website_data_by_graph_name_with_http_info(id, graph_name, **kwargs) # noqa: E501
else:
(data) = self.get_website_data_by_graph_name_with_http_info(id, graph_name, **kwargs) # noqa: E501
return data
def get_website_data_by_graph_name_with_http_info(self, id, graph_name, **kwargs): # noqa: E501
"""get website data by graph name # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_website_data_by_graph_name_with_http_info(id, graph_name, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: (required)
:param str graph_name: (required)
:param int start:
:param int end:
:param str format:
:return: GraphPlot
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'graph_name', 'start', 'end', 'format'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_website_data_by_graph_name" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `get_website_data_by_graph_name`") # noqa: E501
# verify the required parameter 'graph_name' is set
if ('graph_name' not in params or
params['graph_name'] is None):
raise ValueError("Missing the required parameter `graph_name` when calling `get_website_data_by_graph_name`") # noqa: E501
if 'id' in params and not re.search('\d+', params['id'] if type(params['id']) is str else str(params['id'])): # noqa: E501
raise ValueError("Invalid value for parameter `id` when calling `get_website_data_by_graph_name`, must conform to the pattern `/\d+/`") # noqa: E501
if 'graph_name' in params and not re.search('.+', params['graph_name'] if type(params['graph_name']) is str else str(params['graph_name'])): # noqa: E501
raise ValueError("Invalid value for parameter `graph_name` when calling `get_website_data_by_graph_name`, must conform to the pattern `/.+/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
if 'graph_name' in params:
path_params['graphName'] = params['graph_name'] # noqa: E501
query_params = []
if 'start' in params:
query_params.append(('start', params['start'])) # noqa: E501
if 'end' in params:
query_params.append(('end', params['end'])) # noqa: E501
if 'format' in params:
query_params.append(('format', params['format'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['LMv1'] # noqa: E501
return self.api_client.call_api(
'/website/websites/{id}/graphs/{graphName}/data', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='GraphPlot', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_website_graph_data(self, website_id, checkpoint_id, graph_name, **kwargs): # noqa: E501
"""get website graph data # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_website_graph_data(website_id, checkpoint_id, graph_name, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int website_id: (required)
:param int checkpoint_id: (required)
:param str graph_name: (required)
:param int start:
:param int end:
:param str format:
:return: GraphPlot
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_website_graph_data_with_http_info(website_id, checkpoint_id, graph_name, **kwargs) # noqa: E501
else:
(data) = self.get_website_graph_data_with_http_info(website_id, checkpoint_id, graph_name, **kwargs) # noqa: E501
return data
def get_website_graph_data_with_http_info(self, website_id, checkpoint_id, graph_name, **kwargs): # noqa: E501
"""get website graph data # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_website_graph_data_with_http_info(website_id, checkpoint_id, graph_name, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int website_id: (required)
:param int checkpoint_id: (required)
:param str graph_name: (required)
:param int start:
:param int end:
:param str format:
:return: GraphPlot
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['website_id', 'checkpoint_id', 'graph_name', 'start', 'end', 'format'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_website_graph_data" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'website_id' is set
if ('website_id' not in params or
params['website_id'] is None):
raise ValueError("Missing the required parameter `website_id` when calling `get_website_graph_data`") # noqa: E501
# verify the required parameter 'checkpoint_id' is set
if ('checkpoint_id' not in params or
params['checkpoint_id'] is None):
raise ValueError("Missing the required parameter `checkpoint_id` when calling `get_website_graph_data`") # noqa: E501
# verify the required parameter 'graph_name' is set
if ('graph_name' not in params or
params['graph_name'] is None):
raise ValueError("Missing the required parameter `graph_name` when calling `get_website_graph_data`") # noqa: E501
if 'website_id' in params and not re.search('\d+', params['website_id'] if type(params['website_id']) is str else str(params['website_id'])): # noqa: E501
raise ValueError("Invalid value for parameter `website_id` when calling `get_website_graph_data`, must conform to the pattern `/\d+/`") # noqa: E501
if 'checkpoint_id' in params and not re.search('\d+', params['checkpoint_id'] if type(params['checkpoint_id']) is str else str(params['checkpoint_id'])): # noqa: E501
raise ValueError("Invalid value for parameter `checkpoint_id` when calling `get_website_graph_data`, must conform to the pattern `/\d+/`") # noqa: E501
if 'graph_name' in params and not re.search('.+', params['graph_name'] if type(params['graph_name']) is str else str(params['graph_name'])): # noqa: E501
raise ValueError("Invalid value for parameter `graph_name` when calling `get_website_graph_data`, must conform to the pattern `/.+/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'website_id' in params:
path_params['websiteId'] = params['website_id'] # noqa: E501
if 'checkpoint_id' in params:
path_params['checkpointId'] = params['checkpoint_id'] # noqa: E501
if 'graph_name' in params:
path_params['graphName'] = params['graph_name'] # noqa: E501
query_params = []
if 'start' in params:
query_params.append(('start', params['start'])) # noqa: E501
if 'end' in params:
query_params.append(('end', params['end'])) # noqa: E501
if 'format' in params:
query_params.append(('format', params['format'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['LMv1'] # noqa: E501
return self.api_client.call_api(
'/website/websites/{websiteId}/checkpoints/{checkpointId}/graphs/{graphName}/data', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='GraphPlot', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_website_group_by_id(self, id, **kwargs): # noqa: E501
"""get website group # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_website_group_by_id(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: (required)
:return: WebsiteGroup
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_website_group_by_id_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.get_website_group_by_id_with_http_info(id, **kwargs) # noqa: E501
return data
def get_website_group_by_id_with_http_info(self, id, **kwargs): # noqa: E501
"""get website group # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_website_group_by_id_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: (required)
:return: WebsiteGroup
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_website_group_by_id" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `get_website_group_by_id`") # noqa: E501
if 'id' in params and not re.search('\d+', params['id'] if type(params['id']) is str else str(params['id'])): # noqa: E501
raise ValueError("Invalid value for parameter `id` when calling `get_website_group_by_id`, must conform to the pattern `/\d+/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['LMv1'] # noqa: E501
return self.api_client.call_api(
'/website/groups/{id}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='WebsiteGroup', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_website_group_list(self, **kwargs): # noqa: E501
"""get website group list # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_website_group_list(async_req=True)
>>> result = thread.get()
:param async_req bool
:param str fields:
:param int size:
:param int offset:
:param str filter:
:return: WebsiteGroupPaginationResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_website_group_list_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.get_website_group_list_with_http_info(**kwargs) # noqa: E501
return data
def get_website_group_list_with_http_info(self, **kwargs): # noqa: E501
"""get website group list # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_website_group_list_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param str fields:
:param int size:
:param int offset:
:param str filter:
:return: WebsiteGroupPaginationResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['fields', 'size', 'offset', 'filter'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_website_group_list" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
if 'size' in params:
query_params.append(('size', params['size'])) # noqa: E501
if 'offset' in params:
query_params.append(('offset', params['offset'])) # noqa: E501
if 'filter' in params:
query_params.append(('filter', params['filter'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['LMv1'] # noqa: E501
return self.api_client.call_api(
'/website/groups', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='WebsiteGroupPaginationResponse', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_website_list(self, **kwargs): # noqa: E501
"""get website list # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_website_list(async_req=True)
>>> result = thread.get()
:param async_req bool
:param str collector_ids:
:param str fields:
:param int size:
:param int offset:
:param str filter:
:return: WebsitePaginationResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_website_list_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.get_website_list_with_http_info(**kwargs) # noqa: E501
return data
def get_website_list_with_http_info(self, **kwargs): # noqa: E501
"""get website list # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_website_list_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param str collector_ids:
:param str fields:
:param int size:
:param int offset:
:param str filter:
:return: WebsitePaginationResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['collector_ids', 'fields', 'size', 'offset', 'filter'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_website_list" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'collector_ids' in params:
query_params.append(('collectorIds', params['collector_ids'])) # noqa: E501
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
if 'size' in params:
query_params.append(('size', params['size'])) # noqa: E501
if 'offset' in params:
query_params.append(('offset', params['offset'])) # noqa: E501
if 'filter' in params:
query_params.append(('filter', params['filter'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['LMv1'] # noqa: E501
return self.api_client.call_api(
'/website/websites', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='WebsitePaginationResponse', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_website_property_list_by_website_id(self, id, **kwargs): # noqa: E501
"""get a list of properties for a website # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_website_property_list_by_website_id(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: (required)
:param str fields:
:param int size:
:param int offset:
:param str filter:
:return: PropertyPaginationResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_website_property_list_by_website_id_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.get_website_property_list_by_website_id_with_http_info(id, **kwargs) # noqa: E501
return data
def get_website_property_list_by_website_id_with_http_info(self, id, **kwargs): # noqa: E501
"""get a list of properties for a website # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_website_property_list_by_website_id_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: (required)
:param str fields:
:param int size:
:param int offset:
:param str filter:
:return: PropertyPaginationResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'fields', 'size', 'offset', 'filter'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_website_property_list_by_website_id" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `get_website_property_list_by_website_id`") # noqa: E501
if 'id' in params and not re.search('\d+', params['id'] if type(params['id']) is str else str(params['id'])): # noqa: E501
raise ValueError("Invalid value for parameter `id` when calling `get_website_property_list_by_website_id`, must conform to the pattern `/\d+/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
if 'size' in params:
query_params.append(('size', params['size'])) # noqa: E501
if 'offset' in params:
query_params.append(('offset', params['offset'])) # noqa: E501
if 'filter' in params:
query_params.append(('filter', params['filter'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['LMv1'] # noqa: E501
return self.api_client.call_api(
'/website/websites/{id}/properties', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='PropertyPaginationResponse', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_website_sdt_list_by_website_id(self, id, **kwargs): # noqa: E501
"""get a list of SDTs for a website # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_website_sdt_list_by_website_id(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: (required)
:param str fields:
:param int size:
:param int offset:
:param str filter:
:return: SDTPaginationResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_website_sdt_list_by_website_id_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.get_website_sdt_list_by_website_id_with_http_info(id, **kwargs) # noqa: E501
return data
def get_website_sdt_list_by_website_id_with_http_info(self, id, **kwargs): # noqa: E501
"""get a list of SDTs for a website # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_website_sdt_list_by_website_id_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: (required)
:param str fields:
:param int size:
:param int offset:
:param str filter:
:return: SDTPaginationResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'fields', 'size', 'offset', 'filter'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_website_sdt_list_by_website_id" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `get_website_sdt_list_by_website_id`") # noqa: E501
if 'id' in params and not re.search('\d+', params['id'] if type(params['id']) is str else str(params['id'])): # noqa: E501
raise ValueError("Invalid value for parameter `id` when calling `get_website_sdt_list_by_website_id`, must conform to the pattern `/\d+/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
if 'size' in params:
query_params.append(('size', params['size'])) # noqa: E501
if 'offset' in params:
query_params.append(('offset', params['offset'])) # noqa: E501
if 'filter' in params:
query_params.append(('filter', params['filter'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['LMv1'] # noqa: E501
return self.api_client.call_api(
'/website/websites/{id}/sdts', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='SDTPaginationResponse', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_widget_by_id(self, id, **kwargs): # noqa: E501
"""get widget by id # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_widget_by_id(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: (required)
:param str fields:
:return: Widget
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_widget_by_id_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.get_widget_by_id_with_http_info(id, **kwargs) # noqa: E501
return data
def get_widget_by_id_with_http_info(self, id, **kwargs): # noqa: E501
"""get widget by id # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_widget_by_id_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: (required)
:param str fields:
:return: Widget
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'fields'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_widget_by_id" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `get_widget_by_id`") # noqa: E501
if 'id' in params and not re.search('\d+', params['id'] if type(params['id']) is str else str(params['id'])): # noqa: E501
raise ValueError("Invalid value for parameter `id` when calling `get_widget_by_id`, must conform to the pattern `/\d+/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['LMv1'] # noqa: E501
return self.api_client.call_api(
'/dashboard/widgets/{id}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Widget', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_widget_data_by_id(self, id, **kwargs): # noqa: E501
"""get widget data # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_widget_data_by_id(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: (required)
:param int start:
:param int end:
:param str format:
:return: WidgetData
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_widget_data_by_id_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.get_widget_data_by_id_with_http_info(id, **kwargs) # noqa: E501
return data
def get_widget_data_by_id_with_http_info(self, id, **kwargs): # noqa: E501
"""get widget data # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_widget_data_by_id_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: (required)
:param int start:
:param int end:
:param str format:
:return: WidgetData
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'start', 'end', 'format'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_widget_data_by_id" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `get_widget_data_by_id`") # noqa: E501
if 'id' in params and not re.search('\d+', params['id'] if type(params['id']) is str else str(params['id'])): # noqa: E501
raise ValueError("Invalid value for parameter `id` when calling `get_widget_data_by_id`, must conform to the pattern `/\d+/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
if 'start' in params:
query_params.append(('start', params['start'])) # noqa: E501
if 'end' in params:
query_params.append(('end', params['end'])) # noqa: E501
if 'format' in params:
query_params.append(('format', params['format'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['LMv1'] # noqa: E501
return self.api_client.call_api(
'/dashboard/widgets/{id}/data', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='WidgetData', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_widget_list(self, **kwargs): # noqa: E501
"""get widget list # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_widget_list(async_req=True)
>>> result = thread.get()
:param async_req bool
:param str fields:
:param int size:
:param int offset:
:param str filter:
:return: WidgetPaginationResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_widget_list_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.get_widget_list_with_http_info(**kwargs) # noqa: E501
return data
def get_widget_list_with_http_info(self, **kwargs): # noqa: E501
"""get widget list # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_widget_list_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param str fields:
:param int size:
:param int offset:
:param str filter:
:return: WidgetPaginationResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['fields', 'size', 'offset', 'filter'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_widget_list" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
if 'size' in params:
query_params.append(('size', params['size'])) # noqa: E501
if 'offset' in params:
query_params.append(('offset', params['offset'])) # noqa: E501
if 'filter' in params:
query_params.append(('filter', params['filter'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['LMv1'] # noqa: E501
return self.api_client.call_api(
'/dashboard/widgets', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='WidgetPaginationResponse', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_widget_list_by_dashboard_id(self, id, **kwargs): # noqa: E501
"""get widget list by DashboardId # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_widget_list_by_dashboard_id(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: (required)
:param str fields:
:param int size:
:param int offset:
:param str filter:
:return: WidgetPaginationResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_widget_list_by_dashboard_id_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.get_widget_list_by_dashboard_id_with_http_info(id, **kwargs) # noqa: E501
return data
def get_widget_list_by_dashboard_id_with_http_info(self, id, **kwargs): # noqa: E501
"""get widget list by DashboardId # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_widget_list_by_dashboard_id_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: (required)
:param str fields:
:param int size:
:param int offset:
:param str filter:
:return: WidgetPaginationResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'fields', 'size', 'offset', 'filter'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_widget_list_by_dashboard_id" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `get_widget_list_by_dashboard_id`") # noqa: E501
if 'id' in params and not re.search('\d+', params['id'] if type(params['id']) is str else str(params['id'])): # noqa: E501
raise ValueError("Invalid value for parameter `id` when calling `get_widget_list_by_dashboard_id`, must conform to the pattern `/\d+/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
if 'size' in params:
query_params.append(('size', params['size'])) # noqa: E501
if 'offset' in params:
query_params.append(('offset', params['offset'])) # noqa: E501
if 'filter' in params:
query_params.append(('filter', params['filter'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['LMv1'] # noqa: E501
return self.api_client.call_api(
'/dashboard/dashboards/{id}/widgets', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='WidgetPaginationResponse', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def import_batch_job(self, file, **kwargs): # noqa: E501
"""import batch job via xml # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.import_batch_job(file, async_req=True)
>>> result = thread.get()
:param async_req bool
:param file file: (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.import_batch_job_with_http_info(file, **kwargs) # noqa: E501
else:
(data) = self.import_batch_job_with_http_info(file, **kwargs) # noqa: E501
return data
def import_batch_job_with_http_info(self, file, **kwargs): # noqa: E501
"""import batch job via xml # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.import_batch_job_with_http_info(file, async_req=True)
>>> result = thread.get()
:param async_req bool
:param file file: (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['file'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method import_batch_job" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'file' is set
if ('file' not in params or
params['file'] is None):
raise ValueError("Missing the required parameter `file` when calling `import_batch_job`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
if 'file' in params:
local_var_files['file'] = params['file'] # noqa: E501
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['multipart/form-data']) # noqa: E501
# Authentication setting
auth_settings = ['LMv1'] # noqa: E501
return self.api_client.call_api(
'/setting/batchjobs/importxml', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='object', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def import_config_source(self, file, **kwargs): # noqa: E501
"""import config source via xml # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.import_config_source(file, async_req=True)
>>> result = thread.get()
:param async_req bool
:param file file: (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.import_config_source_with_http_info(file, **kwargs) # noqa: E501
else:
(data) = self.import_config_source_with_http_info(file, **kwargs) # noqa: E501
return data
def import_config_source_with_http_info(self, file, **kwargs): # noqa: E501
"""import config source via xml # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.import_config_source_with_http_info(file, async_req=True)
>>> result = thread.get()
:param async_req bool
:param file file: (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['file'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method import_config_source" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'file' is set
if ('file' not in params or
params['file'] is None):
raise ValueError("Missing the required parameter `file` when calling `import_config_source`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
if 'file' in params:
local_var_files['file'] = params['file'] # noqa: E501
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['multipart/form-data']) # noqa: E501
# Authentication setting
auth_settings = ['LMv1'] # noqa: E501
return self.api_client.call_api(
'/setting/configsources/importxml', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='object', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def import_data_source(self, file, **kwargs): # noqa: E501
"""import datasource via xml # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.import_data_source(file, async_req=True)
>>> result = thread.get()
:param async_req bool
:param file file: (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.import_data_source_with_http_info(file, **kwargs) # noqa: E501
else:
(data) = self.import_data_source_with_http_info(file, **kwargs) # noqa: E501
return data
def import_data_source_with_http_info(self, file, **kwargs): # noqa: E501
"""import datasource via xml # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.import_data_source_with_http_info(file, async_req=True)
>>> result = thread.get()
:param async_req bool
:param file file: (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['file'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method import_data_source" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'file' is set
if ('file' not in params or
params['file'] is None):
raise ValueError("Missing the required parameter `file` when calling `import_data_source`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
if 'file' in params:
local_var_files['file'] = params['file'] # noqa: E501
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['multipart/form-data']) # noqa: E501
# Authentication setting
auth_settings = ['LMv1'] # noqa: E501
return self.api_client.call_api(
'/setting/datasources/importxml', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='object', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def import_event_source(self, file, **kwargs): # noqa: E501
"""import eventsource via xml # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.import_event_source(file, async_req=True)
>>> result = thread.get()
:param async_req bool
:param file file: (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.import_event_source_with_http_info(file, **kwargs) # noqa: E501
else:
(data) = self.import_event_source_with_http_info(file, **kwargs) # noqa: E501
return data
def import_event_source_with_http_info(self, file, **kwargs): # noqa: E501
"""import eventsource via xml # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.import_event_source_with_http_info(file, async_req=True)
>>> result = thread.get()
:param async_req bool
:param file file: (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['file'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method import_event_source" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'file' is set
if ('file' not in params or
params['file'] is None):
raise ValueError("Missing the required parameter `file` when calling `import_event_source`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
if 'file' in params:
local_var_files['file'] = params['file'] # noqa: E501
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['multipart/form-data']) # noqa: E501
# Authentication setting
auth_settings = ['LMv1'] # noqa: E501
return self.api_client.call_api(
'/setting/eventsources/importxml', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='object', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def patch_admin_by_id(self, id, body, **kwargs): # noqa: E501
"""update user # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.patch_admin_by_id(id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: (required)
:param Admin body: (required)
:param bool change_password:
:return: Admin
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.patch_admin_by_id_with_http_info(id, body, **kwargs) # noqa: E501
else:
(data) = self.patch_admin_by_id_with_http_info(id, body, **kwargs) # noqa: E501
return data
def patch_admin_by_id_with_http_info(self, id, body, **kwargs): # noqa: E501
"""update user # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.patch_admin_by_id_with_http_info(id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: (required)
:param Admin body: (required)
:param bool change_password:
:return: Admin
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'body', 'change_password'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method patch_admin_by_id" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `patch_admin_by_id`") # noqa: E501
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `patch_admin_by_id`") # noqa: E501
if 'id' in params and not re.search('\d+', params['id'] if type(params['id']) is str else str(params['id'])): # noqa: E501
raise ValueError("Invalid value for parameter `id` when calling `patch_admin_by_id`, must conform to the pattern `/\d+/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
if 'change_password' in params:
query_params.append(('changePassword', params['change_password'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['LMv1'] # noqa: E501
return self.api_client.call_api(
'/setting/admins/{id}', 'PATCH',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Admin', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def patch_alert_rule_by_id(self, id, body, **kwargs): # noqa: E501
"""update alert rule # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.patch_alert_rule_by_id(id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: (required)
:param AlertRule body: (required)
:return: AlertRule
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.patch_alert_rule_by_id_with_http_info(id, body, **kwargs) # noqa: E501
else:
(data) = self.patch_alert_rule_by_id_with_http_info(id, body, **kwargs) # noqa: E501
return data
def patch_alert_rule_by_id_with_http_info(self, id, body, **kwargs): # noqa: E501
"""update alert rule # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.patch_alert_rule_by_id_with_http_info(id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: (required)
:param AlertRule body: (required)
:return: AlertRule
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method patch_alert_rule_by_id" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `patch_alert_rule_by_id`") # noqa: E501
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `patch_alert_rule_by_id`") # noqa: E501
if 'id' in params and not re.search('\d+', params['id'] if type(params['id']) is str else str(params['id'])): # noqa: E501
raise ValueError("Invalid value for parameter `id` when calling `patch_alert_rule_by_id`, must conform to the pattern `/\d+/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['LMv1'] # noqa: E501
return self.api_client.call_api(
'/setting/alert/rules/{id}', 'PATCH',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='AlertRule', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def patch_api_token_by_admin_id(self, admin_id, apitoken_id, body, **kwargs): # noqa: E501
"""update api tokens for a user # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.patch_api_token_by_admin_id(admin_id, apitoken_id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int admin_id: (required)
:param int apitoken_id: (required)
:param APIToken body: (required)
:return: APIToken
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.patch_api_token_by_admin_id_with_http_info(admin_id, apitoken_id, body, **kwargs) # noqa: E501
else:
(data) = self.patch_api_token_by_admin_id_with_http_info(admin_id, apitoken_id, body, **kwargs) # noqa: E501
return data
def patch_api_token_by_admin_id_with_http_info(self, admin_id, apitoken_id, body, **kwargs): # noqa: E501
"""update api tokens for a user # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.patch_api_token_by_admin_id_with_http_info(admin_id, apitoken_id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int admin_id: (required)
:param int apitoken_id: (required)
:param APIToken body: (required)
:return: APIToken
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['admin_id', 'apitoken_id', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method patch_api_token_by_admin_id" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'admin_id' is set
if ('admin_id' not in params or
params['admin_id'] is None):
raise ValueError("Missing the required parameter `admin_id` when calling `patch_api_token_by_admin_id`") # noqa: E501
# verify the required parameter 'apitoken_id' is set
if ('apitoken_id' not in params or
params['apitoken_id'] is None):
raise ValueError("Missing the required parameter `apitoken_id` when calling `patch_api_token_by_admin_id`") # noqa: E501
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `patch_api_token_by_admin_id`") # noqa: E501
if 'admin_id' in params and not re.search('\d+', params['admin_id'] if type(params['admin_id']) is str else str(params['admin_id'])): # noqa: E501
raise ValueError("Invalid value for parameter `admin_id` when calling `patch_api_token_by_admin_id`, must conform to the pattern `/\d+/`") # noqa: E501
if 'apitoken_id' in params and not re.search('\d+', params['apitoken_id'] if type(params['apitoken_id']) is str else str(params['apitoken_id'])): # noqa: E501
raise ValueError("Invalid value for parameter `apitoken_id` when calling `patch_api_token_by_admin_id`, must conform to the pattern `/\d+/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'admin_id' in params:
path_params['adminId'] = params['admin_id'] # noqa: E501
if 'apitoken_id' in params:
path_params['apitokenId'] = params['apitoken_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['LMv1'] # noqa: E501
return self.api_client.call_api(
'/setting/admins/{adminId}/apitokens/{apitokenId}', 'PATCH',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='APIToken', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def patch_collector_by_id(self, id, body, **kwargs): # noqa: E501
"""update collector # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.patch_collector_by_id(id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: (required)
:param Collector body: (required)
:param bool collector_load_balanced:
:param bool force_update_failed_over_devices:
:return: Collector
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.patch_collector_by_id_with_http_info(id, body, **kwargs) # noqa: E501
else:
(data) = self.patch_collector_by_id_with_http_info(id, body, **kwargs) # noqa: E501
return data
def patch_collector_by_id_with_http_info(self, id, body, **kwargs): # noqa: E501
"""update collector # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.patch_collector_by_id_with_http_info(id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: (required)
:param Collector body: (required)
:param bool collector_load_balanced:
:param bool force_update_failed_over_devices:
:return: Collector
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'body', 'collector_load_balanced', 'force_update_failed_over_devices'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method patch_collector_by_id" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `patch_collector_by_id`") # noqa: E501
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `patch_collector_by_id`") # noqa: E501
if 'id' in params and not re.search('\d+', params['id'] if type(params['id']) is str else str(params['id'])): # noqa: E501
raise ValueError("Invalid value for parameter `id` when calling `patch_collector_by_id`, must conform to the pattern `/\d+/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
if 'collector_load_balanced' in params:
query_params.append(('collectorLoadBalanced', params['collector_load_balanced'])) # noqa: E501
if 'force_update_failed_over_devices' in params:
query_params.append(('forceUpdateFailedOverDevices', params['force_update_failed_over_devices'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['LMv1'] # noqa: E501
return self.api_client.call_api(
'/setting/collector/collectors/{id}', 'PATCH',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Collector', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def patch_collector_group_by_id(self, id, body, **kwargs): # noqa: E501
"""update collector group # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.patch_collector_group_by_id(id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: (required)
:param CollectorGroup body: (required)
:param bool collector_load_balanced:
:param bool force_update_failed_over_devices:
:return: CollectorGroup
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.patch_collector_group_by_id_with_http_info(id, body, **kwargs) # noqa: E501
else:
(data) = self.patch_collector_group_by_id_with_http_info(id, body, **kwargs) # noqa: E501
return data
def patch_collector_group_by_id_with_http_info(self, id, body, **kwargs): # noqa: E501
"""update collector group # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.patch_collector_group_by_id_with_http_info(id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: (required)
:param CollectorGroup body: (required)
:param bool collector_load_balanced:
:param bool force_update_failed_over_devices:
:return: CollectorGroup
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'body', 'collector_load_balanced', 'force_update_failed_over_devices'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method patch_collector_group_by_id" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `patch_collector_group_by_id`") # noqa: E501
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `patch_collector_group_by_id`") # noqa: E501
if 'id' in params and not re.search('\d+', params['id'] if type(params['id']) is str else str(params['id'])): # noqa: E501
raise ValueError("Invalid value for parameter `id` when calling `patch_collector_group_by_id`, must conform to the pattern `/\d+/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
if 'collector_load_balanced' in params:
query_params.append(('collectorLoadBalanced', params['collector_load_balanced'])) # noqa: E501
if 'force_update_failed_over_devices' in params:
query_params.append(('forceUpdateFailedOverDevices', params['force_update_failed_over_devices'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['LMv1'] # noqa: E501
return self.api_client.call_api(
'/setting/collector/groups/{id}', 'PATCH',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='CollectorGroup', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def patch_dashboard_by_id(self, id, body, **kwargs): # noqa: E501
"""update dashboard # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.patch_dashboard_by_id(id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: (required)
:param Dashboard body: (required)
:param bool overwrite_group_fields:
:return: Dashboard
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.patch_dashboard_by_id_with_http_info(id, body, **kwargs) # noqa: E501
else:
(data) = self.patch_dashboard_by_id_with_http_info(id, body, **kwargs) # noqa: E501
return data
def patch_dashboard_by_id_with_http_info(self, id, body, **kwargs): # noqa: E501
"""update dashboard # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.patch_dashboard_by_id_with_http_info(id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: (required)
:param Dashboard body: (required)
:param bool overwrite_group_fields:
:return: Dashboard
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'body', 'overwrite_group_fields'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method patch_dashboard_by_id" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `patch_dashboard_by_id`") # noqa: E501
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `patch_dashboard_by_id`") # noqa: E501
if 'id' in params and not re.search('\d+', params['id'] if type(params['id']) is str else str(params['id'])): # noqa: E501
raise ValueError("Invalid value for parameter `id` when calling `patch_dashboard_by_id`, must conform to the pattern `/\d+/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
if 'overwrite_group_fields' in params:
query_params.append(('overwriteGroupFields', params['overwrite_group_fields'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['LMv1'] # noqa: E501
return self.api_client.call_api(
'/dashboard/dashboards/{id}', 'PATCH',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Dashboard', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def patch_dashboard_group_by_id(self, id, body, **kwargs): # noqa: E501
"""update dashboard group # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.patch_dashboard_group_by_id(id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: (required)
:param DashboardGroup body: (required)
:return: DashboardGroup
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.patch_dashboard_group_by_id_with_http_info(id, body, **kwargs) # noqa: E501
else:
(data) = self.patch_dashboard_group_by_id_with_http_info(id, body, **kwargs) # noqa: E501
return data
def patch_dashboard_group_by_id_with_http_info(self, id, body, **kwargs): # noqa: E501
"""update dashboard group # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.patch_dashboard_group_by_id_with_http_info(id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: (required)
:param DashboardGroup body: (required)
:return: DashboardGroup
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method patch_dashboard_group_by_id" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `patch_dashboard_group_by_id`") # noqa: E501
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `patch_dashboard_group_by_id`") # noqa: E501
if 'id' in params and not re.search('\d+', params['id'] if type(params['id']) is str else str(params['id'])): # noqa: E501
raise ValueError("Invalid value for parameter `id` when calling `patch_dashboard_group_by_id`, must conform to the pattern `/\d+/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['LMv1'] # noqa: E501
return self.api_client.call_api(
'/dashboard/groups/{id}', 'PATCH',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='DashboardGroup', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def patch_device(self, id, body, **kwargs): # noqa: E501
"""update a device # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.patch_device(id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: (required)
:param Device body: (required)
:param int start:
:param int end:
:param str netflow_filter:
:param str op_type:
:return: Device
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.patch_device_with_http_info(id, body, **kwargs) # noqa: E501
else:
(data) = self.patch_device_with_http_info(id, body, **kwargs) # noqa: E501
return data
def patch_device_with_http_info(self, id, body, **kwargs): # noqa: E501
"""update a device # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.patch_device_with_http_info(id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: (required)
:param Device body: (required)
:param int start:
:param int end:
:param str netflow_filter:
:param str op_type:
:return: Device
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'body', 'start', 'end', 'netflow_filter', 'op_type'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method patch_device" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `patch_device`") # noqa: E501
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `patch_device`") # noqa: E501
if 'id' in params and not re.search('\d+', params['id'] if type(params['id']) is str else str(params['id'])): # noqa: E501
raise ValueError("Invalid value for parameter `id` when calling `patch_device`, must conform to the pattern `/\d+/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
if 'start' in params:
query_params.append(('start', params['start'])) # noqa: E501
if 'end' in params:
query_params.append(('end', params['end'])) # noqa: E501
if 'netflow_filter' in params:
query_params.append(('netflowFilter', params['netflow_filter'])) # noqa: E501
if 'op_type' in params:
query_params.append(('opType', params['op_type'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['LMv1'] # noqa: E501
return self.api_client.call_api(
'/device/devices/{id}', 'PATCH',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Device', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def patch_device_datasource_instance_alert_setting_by_id(self, device_id, hds_id, instance_id, id, body, **kwargs): # noqa: E501
"""update device instance alert setting # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.patch_device_datasource_instance_alert_setting_by_id(device_id, hds_id, instance_id, id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int device_id: (required)
:param int hds_id: Device-DataSource ID (required)
:param int instance_id: (required)
:param int id: (required)
:param DeviceDataSourceInstanceAlertSetting body: (required)
:return: DeviceDataSourceInstanceAlertSetting
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.patch_device_datasource_instance_alert_setting_by_id_with_http_info(device_id, hds_id, instance_id, id, body, **kwargs) # noqa: E501
else:
(data) = self.patch_device_datasource_instance_alert_setting_by_id_with_http_info(device_id, hds_id, instance_id, id, body, **kwargs) # noqa: E501
return data
def patch_device_datasource_instance_alert_setting_by_id_with_http_info(self, device_id, hds_id, instance_id, id, body, **kwargs): # noqa: E501
"""update device instance alert setting # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.patch_device_datasource_instance_alert_setting_by_id_with_http_info(device_id, hds_id, instance_id, id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int device_id: (required)
:param int hds_id: Device-DataSource ID (required)
:param int instance_id: (required)
:param int id: (required)
:param DeviceDataSourceInstanceAlertSetting body: (required)
:return: DeviceDataSourceInstanceAlertSetting
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['device_id', 'hds_id', 'instance_id', 'id', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method patch_device_datasource_instance_alert_setting_by_id" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'device_id' is set
if ('device_id' not in params or
params['device_id'] is None):
raise ValueError("Missing the required parameter `device_id` when calling `patch_device_datasource_instance_alert_setting_by_id`") # noqa: E501
# verify the required parameter 'hds_id' is set
if ('hds_id' not in params or
params['hds_id'] is None):
raise ValueError("Missing the required parameter `hds_id` when calling `patch_device_datasource_instance_alert_setting_by_id`") # noqa: E501
# verify the required parameter 'instance_id' is set
if ('instance_id' not in params or
params['instance_id'] is None):
raise ValueError("Missing the required parameter `instance_id` when calling `patch_device_datasource_instance_alert_setting_by_id`") # noqa: E501
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `patch_device_datasource_instance_alert_setting_by_id`") # noqa: E501
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `patch_device_datasource_instance_alert_setting_by_id`") # noqa: E501
if 'device_id' in params and not re.search('\d+', params['device_id'] if type(params['device_id']) is str else str(params['device_id'])): # noqa: E501
raise ValueError("Invalid value for parameter `device_id` when calling `patch_device_datasource_instance_alert_setting_by_id`, must conform to the pattern `/\d+/`") # noqa: E501
if 'hds_id' in params and not re.search('\d+', params['hds_id'] if type(params['hds_id']) is str else str(params['hds_id'])): # noqa: E501
raise ValueError("Invalid value for parameter `hds_id` when calling `patch_device_datasource_instance_alert_setting_by_id`, must conform to the pattern `/\d+/`") # noqa: E501
if 'instance_id' in params and not re.search('\d+', params['instance_id'] if type(params['instance_id']) is str else str(params['instance_id'])): # noqa: E501
raise ValueError("Invalid value for parameter `instance_id` when calling `patch_device_datasource_instance_alert_setting_by_id`, must conform to the pattern `/\d+/`") # noqa: E501
if 'id' in params and not re.search('\d+', params['id'] if type(params['id']) is str else str(params['id'])): # noqa: E501
raise ValueError("Invalid value for parameter `id` when calling `patch_device_datasource_instance_alert_setting_by_id`, must conform to the pattern `/\d+/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'device_id' in params:
path_params['deviceId'] = params['device_id'] # noqa: E501
if 'hds_id' in params:
path_params['hdsId'] = params['hds_id'] # noqa: E501
if 'instance_id' in params:
path_params['instanceId'] = params['instance_id'] # noqa: E501
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['LMv1'] # noqa: E501
return self.api_client.call_api(
'/device/devices/{deviceId}/devicedatasources/{hdsId}/instances/{instanceId}/alertsettings/{id}', 'PATCH',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='DeviceDataSourceInstanceAlertSetting', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def patch_device_datasource_instance_by_id(self, device_id, hds_id, id, body, **kwargs): # noqa: E501
"""update device instance # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.patch_device_datasource_instance_by_id(device_id, hds_id, id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int device_id: (required)
:param int hds_id: The device-datasource ID (required)
:param int id: (required)
:param DeviceDataSourceInstance body: (required)
:param str op_type:
:return: DeviceDataSourceInstance
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.patch_device_datasource_instance_by_id_with_http_info(device_id, hds_id, id, body, **kwargs) # noqa: E501
else:
(data) = self.patch_device_datasource_instance_by_id_with_http_info(device_id, hds_id, id, body, **kwargs) # noqa: E501
return data
def patch_device_datasource_instance_by_id_with_http_info(self, device_id, hds_id, id, body, **kwargs): # noqa: E501
"""update device instance # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.patch_device_datasource_instance_by_id_with_http_info(device_id, hds_id, id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int device_id: (required)
:param int hds_id: The device-datasource ID (required)
:param int id: (required)
:param DeviceDataSourceInstance body: (required)
:param str op_type:
:return: DeviceDataSourceInstance
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['device_id', 'hds_id', 'id', 'body', 'op_type'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method patch_device_datasource_instance_by_id" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'device_id' is set
if ('device_id' not in params or
params['device_id'] is None):
raise ValueError("Missing the required parameter `device_id` when calling `patch_device_datasource_instance_by_id`") # noqa: E501
# verify the required parameter 'hds_id' is set
if ('hds_id' not in params or
params['hds_id'] is None):
raise ValueError("Missing the required parameter `hds_id` when calling `patch_device_datasource_instance_by_id`") # noqa: E501
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `patch_device_datasource_instance_by_id`") # noqa: E501
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `patch_device_datasource_instance_by_id`") # noqa: E501
if 'device_id' in params and not re.search('\d+', params['device_id'] if type(params['device_id']) is str else str(params['device_id'])): # noqa: E501
raise ValueError("Invalid value for parameter `device_id` when calling `patch_device_datasource_instance_by_id`, must conform to the pattern `/\d+/`") # noqa: E501
if 'hds_id' in params and not re.search('\d+', params['hds_id'] if type(params['hds_id']) is str else str(params['hds_id'])): # noqa: E501
raise ValueError("Invalid value for parameter `hds_id` when calling `patch_device_datasource_instance_by_id`, must conform to the pattern `/\d+/`") # noqa: E501
if 'id' in params and not re.search('\d+', params['id'] if type(params['id']) is str else str(params['id'])): # noqa: E501
raise ValueError("Invalid value for parameter `id` when calling `patch_device_datasource_instance_by_id`, must conform to the pattern `/\d+/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'device_id' in params:
path_params['deviceId'] = params['device_id'] # noqa: E501
if 'hds_id' in params:
path_params['hdsId'] = params['hds_id'] # noqa: E501
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
if 'op_type' in params:
query_params.append(('opType', params['op_type'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['LMv1'] # noqa: E501
return self.api_client.call_api(
'/device/devices/{deviceId}/devicedatasources/{hdsId}/instances/{id}', 'PATCH',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='DeviceDataSourceInstance', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def patch_device_datasource_instance_group_by_id(self, device_id, device_ds_id, id, body, **kwargs): # noqa: E501
"""update device datasource instance group # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.patch_device_datasource_instance_group_by_id(device_id, device_ds_id, id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int device_id: (required)
:param int device_ds_id: The device-datasource ID you'd like to add an instance group for (required)
:param int id: (required)
:param DeviceDataSourceInstanceGroup body: (required)
:return: DeviceDataSourceInstanceGroup
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.patch_device_datasource_instance_group_by_id_with_http_info(device_id, device_ds_id, id, body, **kwargs) # noqa: E501
else:
(data) = self.patch_device_datasource_instance_group_by_id_with_http_info(device_id, device_ds_id, id, body, **kwargs) # noqa: E501
return data
def patch_device_datasource_instance_group_by_id_with_http_info(self, device_id, device_ds_id, id, body, **kwargs): # noqa: E501
"""update device datasource instance group # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.patch_device_datasource_instance_group_by_id_with_http_info(device_id, device_ds_id, id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int device_id: (required)
:param int device_ds_id: The device-datasource ID you'd like to add an instance group for (required)
:param int id: (required)
:param DeviceDataSourceInstanceGroup body: (required)
:return: DeviceDataSourceInstanceGroup
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['device_id', 'device_ds_id', 'id', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method patch_device_datasource_instance_group_by_id" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'device_id' is set
if ('device_id' not in params or
params['device_id'] is None):
raise ValueError("Missing the required parameter `device_id` when calling `patch_device_datasource_instance_group_by_id`") # noqa: E501
# verify the required parameter 'device_ds_id' is set
if ('device_ds_id' not in params or
params['device_ds_id'] is None):
raise ValueError("Missing the required parameter `device_ds_id` when calling `patch_device_datasource_instance_group_by_id`") # noqa: E501
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `patch_device_datasource_instance_group_by_id`") # noqa: E501
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `patch_device_datasource_instance_group_by_id`") # noqa: E501
if 'device_id' in params and not re.search('\d+', params['device_id'] if type(params['device_id']) is str else str(params['device_id'])): # noqa: E501
raise ValueError("Invalid value for parameter `device_id` when calling `patch_device_datasource_instance_group_by_id`, must conform to the pattern `/\d+/`") # noqa: E501
if 'device_ds_id' in params and not re.search('\d+', params['device_ds_id'] if type(params['device_ds_id']) is str else str(params['device_ds_id'])): # noqa: E501
raise ValueError("Invalid value for parameter `device_ds_id` when calling `patch_device_datasource_instance_group_by_id`, must conform to the pattern `/\d+/`") # noqa: E501
if 'id' in params and not re.search('\d+', params['id'] if type(params['id']) is str else str(params['id'])): # noqa: E501
raise ValueError("Invalid value for parameter `id` when calling `patch_device_datasource_instance_group_by_id`, must conform to the pattern `/\d+/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'device_id' in params:
path_params['deviceId'] = params['device_id'] # noqa: E501
if 'device_ds_id' in params:
path_params['deviceDsId'] = params['device_ds_id'] # noqa: E501
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['LMv1'] # noqa: E501
return self.api_client.call_api(
'/device/devices/{deviceId}/devicedatasources/{deviceDsId}/groups/{id}', 'PATCH',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='DeviceDataSourceInstanceGroup', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def patch_device_group_by_id(self, id, body, **kwargs): # noqa: E501
"""update device group # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.patch_device_group_by_id(id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: (required)
:param DeviceGroup body: (required)
:return: DeviceGroup
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.patch_device_group_by_id_with_http_info(id, body, **kwargs) # noqa: E501
else:
(data) = self.patch_device_group_by_id_with_http_info(id, body, **kwargs) # noqa: E501
return data
def patch_device_group_by_id_with_http_info(self, id, body, **kwargs): # noqa: E501
"""update device group # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.patch_device_group_by_id_with_http_info(id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: (required)
:param DeviceGroup body: (required)
:return: DeviceGroup
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method patch_device_group_by_id" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `patch_device_group_by_id`") # noqa: E501
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `patch_device_group_by_id`") # noqa: E501
if 'id' in params and not re.search('\d+', params['id'] if type(params['id']) is str else str(params['id'])): # noqa: E501
raise ValueError("Invalid value for parameter `id` when calling `patch_device_group_by_id`, must conform to the pattern `/\d+/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['LMv1'] # noqa: E501
return self.api_client.call_api(
'/device/groups/{id}', 'PATCH',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='DeviceGroup', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def patch_device_group_cluster_alert_conf_by_id(self, device_group_id, id, body, **kwargs): # noqa: E501
"""Update cluster alert configuration # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.patch_device_group_cluster_alert_conf_by_id(device_group_id, id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int device_group_id: (required)
:param int id: (required)
:param DeviceClusterAlertConfig body: (required)
:return: DeviceClusterAlertConfig
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.patch_device_group_cluster_alert_conf_by_id_with_http_info(device_group_id, id, body, **kwargs) # noqa: E501
else:
(data) = self.patch_device_group_cluster_alert_conf_by_id_with_http_info(device_group_id, id, body, **kwargs) # noqa: E501
return data
def patch_device_group_cluster_alert_conf_by_id_with_http_info(self, device_group_id, id, body, **kwargs): # noqa: E501
"""Update cluster alert configuration # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.patch_device_group_cluster_alert_conf_by_id_with_http_info(device_group_id, id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int device_group_id: (required)
:param int id: (required)
:param DeviceClusterAlertConfig body: (required)
:return: DeviceClusterAlertConfig
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['device_group_id', 'id', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method patch_device_group_cluster_alert_conf_by_id" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'device_group_id' is set
if ('device_group_id' not in params or
params['device_group_id'] is None):
raise ValueError("Missing the required parameter `device_group_id` when calling `patch_device_group_cluster_alert_conf_by_id`") # noqa: E501
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `patch_device_group_cluster_alert_conf_by_id`") # noqa: E501
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `patch_device_group_cluster_alert_conf_by_id`") # noqa: E501
if 'device_group_id' in params and not re.search('\d+', params['device_group_id'] if type(params['device_group_id']) is str else str(params['device_group_id'])): # noqa: E501
raise ValueError("Invalid value for parameter `device_group_id` when calling `patch_device_group_cluster_alert_conf_by_id`, must conform to the pattern `/\d+/`") # noqa: E501
if 'id' in params and not re.search('\d+', params['id'] if type(params['id']) is str else str(params['id'])): # noqa: E501
raise ValueError("Invalid value for parameter `id` when calling `patch_device_group_cluster_alert_conf_by_id`, must conform to the pattern `/\d+/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'device_group_id' in params:
path_params['deviceGroupId'] = params['device_group_id'] # noqa: E501
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['LMv1'] # noqa: E501
return self.api_client.call_api(
'/device/groups/{deviceGroupId}/clusterAlertConf/{id}', 'PATCH',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='DeviceClusterAlertConfig', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def patch_device_group_datasource_alert_setting(self, device_group_id, ds_id, body, **kwargs): # noqa: E501
"""update device group datasource alert setting # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.patch_device_group_datasource_alert_setting(device_group_id, ds_id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int device_group_id: (required)
:param int ds_id: (required)
:param DeviceGroupDataSourceAlertConfig body: (required)
:return: DeviceGroupDataSourceAlertConfig
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.patch_device_group_datasource_alert_setting_with_http_info(device_group_id, ds_id, body, **kwargs) # noqa: E501
else:
(data) = self.patch_device_group_datasource_alert_setting_with_http_info(device_group_id, ds_id, body, **kwargs) # noqa: E501
return data
def patch_device_group_datasource_alert_setting_with_http_info(self, device_group_id, ds_id, body, **kwargs): # noqa: E501
"""update device group datasource alert setting # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.patch_device_group_datasource_alert_setting_with_http_info(device_group_id, ds_id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int device_group_id: (required)
:param int ds_id: (required)
:param DeviceGroupDataSourceAlertConfig body: (required)
:return: DeviceGroupDataSourceAlertConfig
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['device_group_id', 'ds_id', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method patch_device_group_datasource_alert_setting" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'device_group_id' is set
if ('device_group_id' not in params or
params['device_group_id'] is None):
raise ValueError("Missing the required parameter `device_group_id` when calling `patch_device_group_datasource_alert_setting`") # noqa: E501
# verify the required parameter 'ds_id' is set
if ('ds_id' not in params or
params['ds_id'] is None):
raise ValueError("Missing the required parameter `ds_id` when calling `patch_device_group_datasource_alert_setting`") # noqa: E501
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `patch_device_group_datasource_alert_setting`") # noqa: E501
if 'device_group_id' in params and not re.search('\d+', params['device_group_id'] if type(params['device_group_id']) is str else str(params['device_group_id'])): # noqa: E501
raise ValueError("Invalid value for parameter `device_group_id` when calling `patch_device_group_datasource_alert_setting`, must conform to the pattern `/\d+/`") # noqa: E501
if 'ds_id' in params and not re.search('\d+', params['ds_id'] if type(params['ds_id']) is str else str(params['ds_id'])): # noqa: E501
raise ValueError("Invalid value for parameter `ds_id` when calling `patch_device_group_datasource_alert_setting`, must conform to the pattern `/\d+/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'device_group_id' in params:
path_params['deviceGroupId'] = params['device_group_id'] # noqa: E501
if 'ds_id' in params:
path_params['dsId'] = params['ds_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['LMv1'] # noqa: E501
return self.api_client.call_api(
'/device/groups/{deviceGroupId}/datasources/{dsId}/alertsettings', 'PATCH',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='DeviceGroupDataSourceAlertConfig', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def patch_device_group_property_by_name(self, gid, name, body, **kwargs): # noqa: E501
"""update device group property # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.patch_device_group_property_by_name(gid, name, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int gid: group ID (required)
:param str name: (required)
:param EntityProperty body: (required)
:return: EntityProperty
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.patch_device_group_property_by_name_with_http_info(gid, name, body, **kwargs) # noqa: E501
else:
(data) = self.patch_device_group_property_by_name_with_http_info(gid, name, body, **kwargs) # noqa: E501
return data
def patch_device_group_property_by_name_with_http_info(self, gid, name, body, **kwargs): # noqa: E501
"""update device group property # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.patch_device_group_property_by_name_with_http_info(gid, name, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int gid: group ID (required)
:param str name: (required)
:param EntityProperty body: (required)
:return: EntityProperty
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['gid', 'name', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method patch_device_group_property_by_name" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'gid' is set
if ('gid' not in params or
params['gid'] is None):
raise ValueError("Missing the required parameter `gid` when calling `patch_device_group_property_by_name`") # noqa: E501
# verify the required parameter 'name' is set
if ('name' not in params or
params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `patch_device_group_property_by_name`") # noqa: E501
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `patch_device_group_property_by_name`") # noqa: E501
if 'gid' in params and not re.search('\d+', params['gid'] if type(params['gid']) is str else str(params['gid'])): # noqa: E501
raise ValueError("Invalid value for parameter `gid` when calling `patch_device_group_property_by_name`, must conform to the pattern `/\d+/`") # noqa: E501
if 'name' in params and not re.search('[^\/]+', params['name'] if type(params['name']) is str else str(params['name'])): # noqa: E501
raise ValueError("Invalid value for parameter `name` when calling `patch_device_group_property_by_name`, must conform to the pattern `/[^\/]+/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'gid' in params:
path_params['gid'] = params['gid'] # noqa: E501
if 'name' in params:
path_params['name'] = params['name'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['LMv1'] # noqa: E501
return self.api_client.call_api(
'/device/groups/{gid}/properties/{name}', 'PATCH',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='EntityProperty', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def patch_device_property_by_name(self, device_id, name, body, **kwargs): # noqa: E501
"""update device property # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.patch_device_property_by_name(device_id, name, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int device_id: (required)
:param str name: (required)
:param EntityProperty body: (required)
:return: EntityProperty
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.patch_device_property_by_name_with_http_info(device_id, name, body, **kwargs) # noqa: E501
else:
(data) = self.patch_device_property_by_name_with_http_info(device_id, name, body, **kwargs) # noqa: E501
return data
def patch_device_property_by_name_with_http_info(self, device_id, name, body, **kwargs): # noqa: E501
"""update device property # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.patch_device_property_by_name_with_http_info(device_id, name, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int device_id: (required)
:param str name: (required)
:param EntityProperty body: (required)
:return: EntityProperty
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['device_id', 'name', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method patch_device_property_by_name" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'device_id' is set
if ('device_id' not in params or
params['device_id'] is None):
raise ValueError("Missing the required parameter `device_id` when calling `patch_device_property_by_name`") # noqa: E501
# verify the required parameter 'name' is set
if ('name' not in params or
params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `patch_device_property_by_name`") # noqa: E501
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `patch_device_property_by_name`") # noqa: E501
if 'device_id' in params and not re.search('\d+', params['device_id'] if type(params['device_id']) is str else str(params['device_id'])): # noqa: E501
raise ValueError("Invalid value for parameter `device_id` when calling `patch_device_property_by_name`, must conform to the pattern `/\d+/`") # noqa: E501
if 'name' in params and not re.search('[^\/]+', params['name'] if type(params['name']) is str else str(params['name'])): # noqa: E501
raise ValueError("Invalid value for parameter `name` when calling `patch_device_property_by_name`, must conform to the pattern `/[^\/]+/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'device_id' in params:
path_params['deviceId'] = params['device_id'] # noqa: E501
if 'name' in params:
path_params['name'] = params['name'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['LMv1'] # noqa: E501
return self.api_client.call_api(
'/device/devices/{deviceId}/properties/{name}', 'PATCH',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='EntityProperty', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def patch_escalation_chain_by_id(self, id, body, **kwargs): # noqa: E501
"""update escalation chain # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.patch_escalation_chain_by_id(id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: (required)
:param EscalatingChain body: (required)
:return: EscalatingChain
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.patch_escalation_chain_by_id_with_http_info(id, body, **kwargs) # noqa: E501
else:
(data) = self.patch_escalation_chain_by_id_with_http_info(id, body, **kwargs) # noqa: E501
return data
def patch_escalation_chain_by_id_with_http_info(self, id, body, **kwargs): # noqa: E501
"""update escalation chain # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.patch_escalation_chain_by_id_with_http_info(id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: (required)
:param EscalatingChain body: (required)
:return: EscalatingChain
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method patch_escalation_chain_by_id" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `patch_escalation_chain_by_id`") # noqa: E501
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `patch_escalation_chain_by_id`") # noqa: E501
if 'id' in params and not re.search('\d+', params['id'] if type(params['id']) is str else str(params['id'])): # noqa: E501
raise ValueError("Invalid value for parameter `id` when calling `patch_escalation_chain_by_id`, must conform to the pattern `/\d+/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['LMv1'] # noqa: E501
return self.api_client.call_api(
'/setting/alert/chains/{id}', 'PATCH',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='EscalatingChain', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def patch_netscan(self, id, **kwargs): # noqa: E501
"""update a netscan # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.patch_netscan(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: (required)
:param Netscan body:
:param str reason:
:return: Netscan
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.patch_netscan_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.patch_netscan_with_http_info(id, **kwargs) # noqa: E501
return data
def patch_netscan_with_http_info(self, id, **kwargs): # noqa: E501
"""update a netscan # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.patch_netscan_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: (required)
:param Netscan body:
:param str reason:
:return: Netscan
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'body', 'reason'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method patch_netscan" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `patch_netscan`") # noqa: E501
if 'id' in params and not re.search('\d+', params['id'] if type(params['id']) is str else str(params['id'])): # noqa: E501
raise ValueError("Invalid value for parameter `id` when calling `patch_netscan`, must conform to the pattern `/\d+/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
if 'reason' in params:
query_params.append(('reason', params['reason'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['LMv1'] # noqa: E501
return self.api_client.call_api(
'/setting/netscans/{id}', 'PATCH',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Netscan', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def patch_ops_note_by_id(self, id, body, **kwargs): # noqa: E501
"""update opsnote # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.patch_ops_note_by_id(id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: (required)
:param OpsNote body: (required)
:return: OpsNote
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.patch_ops_note_by_id_with_http_info(id, body, **kwargs) # noqa: E501
else:
(data) = self.patch_ops_note_by_id_with_http_info(id, body, **kwargs) # noqa: E501
return data
def patch_ops_note_by_id_with_http_info(self, id, body, **kwargs): # noqa: E501
"""update opsnote # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.patch_ops_note_by_id_with_http_info(id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: (required)
:param OpsNote body: (required)
:return: OpsNote
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method patch_ops_note_by_id" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `patch_ops_note_by_id`") # noqa: E501
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `patch_ops_note_by_id`") # noqa: E501
if 'id' in params and not re.search('[^\/]+', params['id'] if type(params['id']) is str else str(params['id'])): # noqa: E501
raise ValueError("Invalid value for parameter `id` when calling `patch_ops_note_by_id`, must conform to the pattern `/[^\/]+/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['LMv1'] # noqa: E501
return self.api_client.call_api(
'/setting/opsnotes/{id}', 'PATCH',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='OpsNote', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def patch_recipient_group_by_id(self, id, body, **kwargs): # noqa: E501
"""update recipient group # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.patch_recipient_group_by_id(id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: (required)
:param RecipientGroup body: (required)
:return: RecipientGroup
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.patch_recipient_group_by_id_with_http_info(id, body, **kwargs) # noqa: E501
else:
(data) = self.patch_recipient_group_by_id_with_http_info(id, body, **kwargs) # noqa: E501
return data
def patch_recipient_group_by_id_with_http_info(self, id, body, **kwargs): # noqa: E501
"""update recipient group # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.patch_recipient_group_by_id_with_http_info(id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: (required)
:param RecipientGroup body: (required)
:return: RecipientGroup
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method patch_recipient_group_by_id" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `patch_recipient_group_by_id`") # noqa: E501
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `patch_recipient_group_by_id`") # noqa: E501
if 'id' in params and not re.search('\d+', params['id'] if type(params['id']) is str else str(params['id'])): # noqa: E501
raise ValueError("Invalid value for parameter `id` when calling `patch_recipient_group_by_id`, must conform to the pattern `/\d+/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['LMv1'] # noqa: E501
return self.api_client.call_api(
'/setting/recipientgroups/{id}', 'PATCH',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='RecipientGroup', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def patch_report_by_id(self, id, body, **kwargs): # noqa: E501
"""update report # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.patch_report_by_id(id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: (required)
:param ReportBase body: (required)
:return: ReportBase
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.patch_report_by_id_with_http_info(id, body, **kwargs) # noqa: E501
else:
(data) = self.patch_report_by_id_with_http_info(id, body, **kwargs) # noqa: E501
return data
def patch_report_by_id_with_http_info(self, id, body, **kwargs): # noqa: E501
"""update report # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.patch_report_by_id_with_http_info(id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: (required)
:param ReportBase body: (required)
:return: ReportBase
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method patch_report_by_id" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `patch_report_by_id`") # noqa: E501
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `patch_report_by_id`") # noqa: E501
if 'id' in params and not re.search('\d+', params['id'] if type(params['id']) is str else str(params['id'])): # noqa: E501
raise ValueError("Invalid value for parameter `id` when calling `patch_report_by_id`, must conform to the pattern `/\d+/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['LMv1'] # noqa: E501
return self.api_client.call_api(
'/report/reports/{id}', 'PATCH',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ReportBase', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def patch_report_group_by_id(self, id, body, **kwargs): # noqa: E501
"""update report group # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.patch_report_group_by_id(id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: (required)
:param ReportGroup body: (required)
:return: ReportGroup
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.patch_report_group_by_id_with_http_info(id, body, **kwargs) # noqa: E501
else:
(data) = self.patch_report_group_by_id_with_http_info(id, body, **kwargs) # noqa: E501
return data
def patch_report_group_by_id_with_http_info(self, id, body, **kwargs): # noqa: E501
"""update report group # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.patch_report_group_by_id_with_http_info(id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: (required)
:param ReportGroup body: (required)
:return: ReportGroup
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method patch_report_group_by_id" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `patch_report_group_by_id`") # noqa: E501
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `patch_report_group_by_id`") # noqa: E501
if 'id' in params and not re.search('\d+', params['id'] if type(params['id']) is str else str(params['id'])): # noqa: E501
raise ValueError("Invalid value for parameter `id` when calling `patch_report_group_by_id`, must conform to the pattern `/\d+/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['LMv1'] # noqa: E501
return self.api_client.call_api(
'/report/groups/{id}', 'PATCH',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ReportGroup', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def patch_role_by_id(self, id, body, **kwargs): # noqa: E501
"""update role # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.patch_role_by_id(id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: (required)
:param Role body: (required)
:return: Role
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.patch_role_by_id_with_http_info(id, body, **kwargs) # noqa: E501
else:
(data) = self.patch_role_by_id_with_http_info(id, body, **kwargs) # noqa: E501
return data
def patch_role_by_id_with_http_info(self, id, body, **kwargs): # noqa: E501
"""update role # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.patch_role_by_id_with_http_info(id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: (required)
:param Role body: (required)
:return: Role
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method patch_role_by_id" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `patch_role_by_id`") # noqa: E501
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `patch_role_by_id`") # noqa: E501
if 'id' in params and not re.search('\d+', params['id'] if type(params['id']) is str else str(params['id'])): # noqa: E501
raise ValueError("Invalid value for parameter `id` when calling `patch_role_by_id`, must conform to the pattern `/\d+/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['LMv1'] # noqa: E501
return self.api_client.call_api(
'/setting/roles/{id}', 'PATCH',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Role', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def patch_sdt_by_id(self, id, body, **kwargs): # noqa: E501
"""update SDT # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.patch_sdt_by_id(id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: (required)
:param SDT body: (required)
:return: SDT
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.patch_sdt_by_id_with_http_info(id, body, **kwargs) # noqa: E501
else:
(data) = self.patch_sdt_by_id_with_http_info(id, body, **kwargs) # noqa: E501
return data
def patch_sdt_by_id_with_http_info(self, id, body, **kwargs): # noqa: E501
"""update SDT # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.patch_sdt_by_id_with_http_info(id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: (required)
:param SDT body: (required)
:return: SDT
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method patch_sdt_by_id" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `patch_sdt_by_id`") # noqa: E501
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `patch_sdt_by_id`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['LMv1'] # noqa: E501
return self.api_client.call_api(
'/sdt/sdts/{id}', 'PATCH',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='SDT', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def patch_website_by_id(self, id, body, **kwargs): # noqa: E501
"""update website # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.patch_website_by_id(id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: (required)
:param Website body: (required)
:return: Website
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.patch_website_by_id_with_http_info(id, body, **kwargs) # noqa: E501
else:
(data) = self.patch_website_by_id_with_http_info(id, body, **kwargs) # noqa: E501
return data
def patch_website_by_id_with_http_info(self, id, body, **kwargs): # noqa: E501
"""update website # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.patch_website_by_id_with_http_info(id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: (required)
:param Website body: (required)
:return: Website
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method patch_website_by_id" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `patch_website_by_id`") # noqa: E501
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `patch_website_by_id`") # noqa: E501
if 'id' in params and not re.search('\d+', params['id'] if type(params['id']) is str else str(params['id'])): # noqa: E501
raise ValueError("Invalid value for parameter `id` when calling `patch_website_by_id`, must conform to the pattern `/\d+/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['LMv1'] # noqa: E501
return self.api_client.call_api(
'/website/websites/{id}', 'PATCH',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Website', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def patch_website_group_by_id(self, id, body, **kwargs): # noqa: E501
"""update website group # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.patch_website_group_by_id(id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: (required)
:param WebsiteGroup body: (required)
:return: WebsiteGroup
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.patch_website_group_by_id_with_http_info(id, body, **kwargs) # noqa: E501
else:
(data) = self.patch_website_group_by_id_with_http_info(id, body, **kwargs) # noqa: E501
return data
def patch_website_group_by_id_with_http_info(self, id, body, **kwargs): # noqa: E501
"""update website group # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.patch_website_group_by_id_with_http_info(id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: (required)
:param WebsiteGroup body: (required)
:return: WebsiteGroup
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method patch_website_group_by_id" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `patch_website_group_by_id`") # noqa: E501
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `patch_website_group_by_id`") # noqa: E501
if 'id' in params and not re.search('\d+', params['id'] if type(params['id']) is str else str(params['id'])): # noqa: E501
raise ValueError("Invalid value for parameter `id` when calling `patch_website_group_by_id`, must conform to the pattern `/\d+/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['LMv1'] # noqa: E501
return self.api_client.call_api(
'/website/groups/{id}', 'PATCH',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='WebsiteGroup', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def patch_widget_by_id(self, id, body, **kwargs): # noqa: E501
"""update widget # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.patch_widget_by_id(id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: (required)
:param Widget body: (required)
:return: Widget
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.patch_widget_by_id_with_http_info(id, body, **kwargs) # noqa: E501
else:
(data) = self.patch_widget_by_id_with_http_info(id, body, **kwargs) # noqa: E501
return data
def patch_widget_by_id_with_http_info(self, id, body, **kwargs): # noqa: E501
"""update widget # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.patch_widget_by_id_with_http_info(id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: (required)
:param Widget body: (required)
:return: Widget
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method patch_widget_by_id" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `patch_widget_by_id`") # noqa: E501
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `patch_widget_by_id`") # noqa: E501
if 'id' in params and not re.search('\d+', params['id'] if type(params['id']) is str else str(params['id'])): # noqa: E501
raise ValueError("Invalid value for parameter `id` when calling `patch_widget_by_id`, must conform to the pattern `/\d+/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['LMv1'] # noqa: E501
return self.api_client.call_api(
'/dashboard/widgets/{id}', 'PATCH',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Widget', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def schedule_auto_discovery_by_device_id(self, id, **kwargs): # noqa: E501
"""schedule active discovery for a device # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.schedule_auto_discovery_by_device_id(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: (required)
:param int start:
:param int end:
:param str netflow_filter:
:return: object
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.schedule_auto_discovery_by_device_id_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.schedule_auto_discovery_by_device_id_with_http_info(id, **kwargs) # noqa: E501
return data
def schedule_auto_discovery_by_device_id_with_http_info(self, id, **kwargs): # noqa: E501
"""schedule active discovery for a device # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.schedule_auto_discovery_by_device_id_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: (required)
:param int start:
:param int end:
:param str netflow_filter:
:return: object
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'start', 'end', 'netflow_filter'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method schedule_auto_discovery_by_device_id" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `schedule_auto_discovery_by_device_id`") # noqa: E501
if 'id' in params and not re.search('\d+', params['id'] if type(params['id']) is str else str(params['id'])): # noqa: E501
raise ValueError("Invalid value for parameter `id` when calling `schedule_auto_discovery_by_device_id`, must conform to the pattern `/\d+/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
if 'start' in params:
query_params.append(('start', params['start'])) # noqa: E501
if 'end' in params:
query_params.append(('end', params['end'])) # noqa: E501
if 'netflow_filter' in params:
query_params.append(('netflowFilter', params['netflow_filter'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['LMv1'] # noqa: E501
return self.api_client.call_api(
'/device/devices/{id}/scheduleAutoDiscovery', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='object', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def update_admin_by_id(self, id, body, **kwargs): # noqa: E501
"""update user # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_admin_by_id(id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: (required)
:param Admin body: (required)
:param bool change_password:
:return: Admin
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.update_admin_by_id_with_http_info(id, body, **kwargs) # noqa: E501
else:
(data) = self.update_admin_by_id_with_http_info(id, body, **kwargs) # noqa: E501
return data
def update_admin_by_id_with_http_info(self, id, body, **kwargs): # noqa: E501
"""update user # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_admin_by_id_with_http_info(id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: (required)
:param Admin body: (required)
:param bool change_password:
:return: Admin
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'body', 'change_password'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method update_admin_by_id" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `update_admin_by_id`") # noqa: E501
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `update_admin_by_id`") # noqa: E501
if 'id' in params and not re.search('\d+', params['id'] if type(params['id']) is str else str(params['id'])): # noqa: E501
raise ValueError("Invalid value for parameter `id` when calling `update_admin_by_id`, must conform to the pattern `/\d+/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
if 'change_password' in params:
query_params.append(('changePassword', params['change_password'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['LMv1'] # noqa: E501
return self.api_client.call_api(
'/setting/admins/{id}', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Admin', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def update_alert_rule_by_id(self, id, body, **kwargs): # noqa: E501
"""update alert rule # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_alert_rule_by_id(id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: (required)
:param AlertRule body: (required)
:return: AlertRule
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.update_alert_rule_by_id_with_http_info(id, body, **kwargs) # noqa: E501
else:
(data) = self.update_alert_rule_by_id_with_http_info(id, body, **kwargs) # noqa: E501
return data
def update_alert_rule_by_id_with_http_info(self, id, body, **kwargs): # noqa: E501
"""update alert rule # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_alert_rule_by_id_with_http_info(id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: (required)
:param AlertRule body: (required)
:return: AlertRule
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method update_alert_rule_by_id" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `update_alert_rule_by_id`") # noqa: E501
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `update_alert_rule_by_id`") # noqa: E501
if 'id' in params and not re.search('\d+', params['id'] if type(params['id']) is str else str(params['id'])): # noqa: E501
raise ValueError("Invalid value for parameter `id` when calling `update_alert_rule_by_id`, must conform to the pattern `/\d+/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['LMv1'] # noqa: E501
return self.api_client.call_api(
'/setting/alert/rules/{id}', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='AlertRule', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def update_api_token_by_admin_id(self, admin_id, apitoken_id, body, **kwargs): # noqa: E501
"""update api tokens for a user # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_api_token_by_admin_id(admin_id, apitoken_id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int admin_id: (required)
:param int apitoken_id: (required)
:param APIToken body: (required)
:return: APIToken
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.update_api_token_by_admin_id_with_http_info(admin_id, apitoken_id, body, **kwargs) # noqa: E501
else:
(data) = self.update_api_token_by_admin_id_with_http_info(admin_id, apitoken_id, body, **kwargs) # noqa: E501
return data
def update_api_token_by_admin_id_with_http_info(self, admin_id, apitoken_id, body, **kwargs): # noqa: E501
"""update api tokens for a user # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_api_token_by_admin_id_with_http_info(admin_id, apitoken_id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int admin_id: (required)
:param int apitoken_id: (required)
:param APIToken body: (required)
:return: APIToken
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['admin_id', 'apitoken_id', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method update_api_token_by_admin_id" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'admin_id' is set
if ('admin_id' not in params or
params['admin_id'] is None):
raise ValueError("Missing the required parameter `admin_id` when calling `update_api_token_by_admin_id`") # noqa: E501
# verify the required parameter 'apitoken_id' is set
if ('apitoken_id' not in params or
params['apitoken_id'] is None):
raise ValueError("Missing the required parameter `apitoken_id` when calling `update_api_token_by_admin_id`") # noqa: E501
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `update_api_token_by_admin_id`") # noqa: E501
if 'admin_id' in params and not re.search('\d+', params['admin_id'] if type(params['admin_id']) is str else str(params['admin_id'])): # noqa: E501
raise ValueError("Invalid value for parameter `admin_id` when calling `update_api_token_by_admin_id`, must conform to the pattern `/\d+/`") # noqa: E501
if 'apitoken_id' in params and not re.search('\d+', params['apitoken_id'] if type(params['apitoken_id']) is str else str(params['apitoken_id'])): # noqa: E501
raise ValueError("Invalid value for parameter `apitoken_id` when calling `update_api_token_by_admin_id`, must conform to the pattern `/\d+/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'admin_id' in params:
path_params['adminId'] = params['admin_id'] # noqa: E501
if 'apitoken_id' in params:
path_params['apitokenId'] = params['apitoken_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['LMv1'] # noqa: E501
return self.api_client.call_api(
'/setting/admins/{adminId}/apitokens/{apitokenId}', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='APIToken', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def update_collector_by_id(self, id, body, **kwargs): # noqa: E501
"""update collector # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_collector_by_id(id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: (required)
:param Collector body: (required)
:param bool collector_load_balanced:
:param bool force_update_failed_over_devices:
:return: Collector
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.update_collector_by_id_with_http_info(id, body, **kwargs) # noqa: E501
else:
(data) = self.update_collector_by_id_with_http_info(id, body, **kwargs) # noqa: E501
return data
def update_collector_by_id_with_http_info(self, id, body, **kwargs): # noqa: E501
"""update collector # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_collector_by_id_with_http_info(id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: (required)
:param Collector body: (required)
:param bool collector_load_balanced:
:param bool force_update_failed_over_devices:
:return: Collector
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'body', 'collector_load_balanced', 'force_update_failed_over_devices'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method update_collector_by_id" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `update_collector_by_id`") # noqa: E501
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `update_collector_by_id`") # noqa: E501
if 'id' in params and not re.search('\d+', params['id'] if type(params['id']) is str else str(params['id'])): # noqa: E501
raise ValueError("Invalid value for parameter `id` when calling `update_collector_by_id`, must conform to the pattern `/\d+/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
if 'collector_load_balanced' in params:
query_params.append(('collectorLoadBalanced', params['collector_load_balanced'])) # noqa: E501
if 'force_update_failed_over_devices' in params:
query_params.append(('forceUpdateFailedOverDevices', params['force_update_failed_over_devices'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['LMv1'] # noqa: E501
return self.api_client.call_api(
'/setting/collector/collectors/{id}', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Collector', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def update_collector_group_by_id(self, id, body, **kwargs): # noqa: E501
"""update collector group # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_collector_group_by_id(id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: (required)
:param CollectorGroup body: (required)
:param bool collector_load_balanced:
:param bool force_update_failed_over_devices:
:return: CollectorGroup
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.update_collector_group_by_id_with_http_info(id, body, **kwargs) # noqa: E501
else:
(data) = self.update_collector_group_by_id_with_http_info(id, body, **kwargs) # noqa: E501
return data
def update_collector_group_by_id_with_http_info(self, id, body, **kwargs): # noqa: E501
"""update collector group # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_collector_group_by_id_with_http_info(id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: (required)
:param CollectorGroup body: (required)
:param bool collector_load_balanced:
:param bool force_update_failed_over_devices:
:return: CollectorGroup
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'body', 'collector_load_balanced', 'force_update_failed_over_devices'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method update_collector_group_by_id" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `update_collector_group_by_id`") # noqa: E501
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `update_collector_group_by_id`") # noqa: E501
if 'id' in params and not re.search('\d+', params['id'] if type(params['id']) is str else str(params['id'])): # noqa: E501
raise ValueError("Invalid value for parameter `id` when calling `update_collector_group_by_id`, must conform to the pattern `/\d+/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
if 'collector_load_balanced' in params:
query_params.append(('collectorLoadBalanced', params['collector_load_balanced'])) # noqa: E501
if 'force_update_failed_over_devices' in params:
query_params.append(('forceUpdateFailedOverDevices', params['force_update_failed_over_devices'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['LMv1'] # noqa: E501
return self.api_client.call_api(
'/setting/collector/groups/{id}', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='CollectorGroup', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def update_dashboard_by_id(self, id, body, **kwargs): # noqa: E501
"""update dashboard # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_dashboard_by_id(id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: (required)
:param Dashboard body: (required)
:param bool overwrite_group_fields:
:return: Dashboard
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.update_dashboard_by_id_with_http_info(id, body, **kwargs) # noqa: E501
else:
(data) = self.update_dashboard_by_id_with_http_info(id, body, **kwargs) # noqa: E501
return data
def update_dashboard_by_id_with_http_info(self, id, body, **kwargs): # noqa: E501
"""update dashboard # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_dashboard_by_id_with_http_info(id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: (required)
:param Dashboard body: (required)
:param bool overwrite_group_fields:
:return: Dashboard
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'body', 'overwrite_group_fields'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method update_dashboard_by_id" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `update_dashboard_by_id`") # noqa: E501
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `update_dashboard_by_id`") # noqa: E501
if 'id' in params and not re.search('\d+', params['id'] if type(params['id']) is str else str(params['id'])): # noqa: E501
raise ValueError("Invalid value for parameter `id` when calling `update_dashboard_by_id`, must conform to the pattern `/\d+/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
if 'overwrite_group_fields' in params:
query_params.append(('overwriteGroupFields', params['overwrite_group_fields'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['LMv1'] # noqa: E501
return self.api_client.call_api(
'/dashboard/dashboards/{id}', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Dashboard', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def update_dashboard_group_by_id(self, id, body, **kwargs): # noqa: E501
"""update dashboard group # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_dashboard_group_by_id(id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: (required)
:param DashboardGroup body: (required)
:return: DashboardGroup
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.update_dashboard_group_by_id_with_http_info(id, body, **kwargs) # noqa: E501
else:
(data) = self.update_dashboard_group_by_id_with_http_info(id, body, **kwargs) # noqa: E501
return data
def update_dashboard_group_by_id_with_http_info(self, id, body, **kwargs): # noqa: E501
"""update dashboard group # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_dashboard_group_by_id_with_http_info(id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: (required)
:param DashboardGroup body: (required)
:return: DashboardGroup
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method update_dashboard_group_by_id" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `update_dashboard_group_by_id`") # noqa: E501
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `update_dashboard_group_by_id`") # noqa: E501
if 'id' in params and not re.search('\d+', params['id'] if type(params['id']) is str else str(params['id'])): # noqa: E501
raise ValueError("Invalid value for parameter `id` when calling `update_dashboard_group_by_id`, must conform to the pattern `/\d+/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['LMv1'] # noqa: E501
return self.api_client.call_api(
'/dashboard/groups/{id}', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='DashboardGroup', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def update_device(self, id, body, **kwargs): # noqa: E501
"""update a device # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_device(id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: (required)
:param Device body: (required)
:param int start:
:param int end:
:param str netflow_filter:
:param str op_type:
:return: Device
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.update_device_with_http_info(id, body, **kwargs) # noqa: E501
else:
(data) = self.update_device_with_http_info(id, body, **kwargs) # noqa: E501
return data
def update_device_with_http_info(self, id, body, **kwargs): # noqa: E501
"""update a device # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_device_with_http_info(id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: (required)
:param Device body: (required)
:param int start:
:param int end:
:param str netflow_filter:
:param str op_type:
:return: Device
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'body', 'start', 'end', 'netflow_filter', 'op_type'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method update_device" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `update_device`") # noqa: E501
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `update_device`") # noqa: E501
if 'id' in params and not re.search('\d+', params['id'] if type(params['id']) is str else str(params['id'])): # noqa: E501
raise ValueError("Invalid value for parameter `id` when calling `update_device`, must conform to the pattern `/\d+/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
if 'start' in params:
query_params.append(('start', params['start'])) # noqa: E501
if 'end' in params:
query_params.append(('end', params['end'])) # noqa: E501
if 'netflow_filter' in params:
query_params.append(('netflowFilter', params['netflow_filter'])) # noqa: E501
if 'op_type' in params:
query_params.append(('opType', params['op_type'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['LMv1'] # noqa: E501
return self.api_client.call_api(
'/device/devices/{id}', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Device', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def update_device_datasource_instance_alert_setting_by_id(self, device_id, hds_id, instance_id, id, body, **kwargs): # noqa: E501
"""update device instance alert setting # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_device_datasource_instance_alert_setting_by_id(device_id, hds_id, instance_id, id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int device_id: (required)
:param int hds_id: Device-DataSource ID (required)
:param int instance_id: (required)
:param int id: (required)
:param DeviceDataSourceInstanceAlertSetting body: (required)
:return: DeviceDataSourceInstanceAlertSetting
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.update_device_datasource_instance_alert_setting_by_id_with_http_info(device_id, hds_id, instance_id, id, body, **kwargs) # noqa: E501
else:
(data) = self.update_device_datasource_instance_alert_setting_by_id_with_http_info(device_id, hds_id, instance_id, id, body, **kwargs) # noqa: E501
return data
def update_device_datasource_instance_alert_setting_by_id_with_http_info(self, device_id, hds_id, instance_id, id, body, **kwargs): # noqa: E501
"""update device instance alert setting # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_device_datasource_instance_alert_setting_by_id_with_http_info(device_id, hds_id, instance_id, id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int device_id: (required)
:param int hds_id: Device-DataSource ID (required)
:param int instance_id: (required)
:param int id: (required)
:param DeviceDataSourceInstanceAlertSetting body: (required)
:return: DeviceDataSourceInstanceAlertSetting
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['device_id', 'hds_id', 'instance_id', 'id', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method update_device_datasource_instance_alert_setting_by_id" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'device_id' is set
if ('device_id' not in params or
params['device_id'] is None):
raise ValueError("Missing the required parameter `device_id` when calling `update_device_datasource_instance_alert_setting_by_id`") # noqa: E501
# verify the required parameter 'hds_id' is set
if ('hds_id' not in params or
params['hds_id'] is None):
raise ValueError("Missing the required parameter `hds_id` when calling `update_device_datasource_instance_alert_setting_by_id`") # noqa: E501
# verify the required parameter 'instance_id' is set
if ('instance_id' not in params or
params['instance_id'] is None):
raise ValueError("Missing the required parameter `instance_id` when calling `update_device_datasource_instance_alert_setting_by_id`") # noqa: E501
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `update_device_datasource_instance_alert_setting_by_id`") # noqa: E501
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `update_device_datasource_instance_alert_setting_by_id`") # noqa: E501
if 'device_id' in params and not re.search('\d+', params['device_id'] if type(params['device_id']) is str else str(params['device_id'])): # noqa: E501
raise ValueError("Invalid value for parameter `device_id` when calling `update_device_datasource_instance_alert_setting_by_id`, must conform to the pattern `/\d+/`") # noqa: E501
if 'hds_id' in params and not re.search('\d+', params['hds_id'] if type(params['hds_id']) is str else str(params['hds_id'])): # noqa: E501
raise ValueError("Invalid value for parameter `hds_id` when calling `update_device_datasource_instance_alert_setting_by_id`, must conform to the pattern `/\d+/`") # noqa: E501
if 'instance_id' in params and not re.search('\d+', params['instance_id'] if type(params['instance_id']) is str else str(params['instance_id'])): # noqa: E501
raise ValueError("Invalid value for parameter `instance_id` when calling `update_device_datasource_instance_alert_setting_by_id`, must conform to the pattern `/\d+/`") # noqa: E501
if 'id' in params and not re.search('\d+', params['id'] if type(params['id']) is str else str(params['id'])): # noqa: E501
raise ValueError("Invalid value for parameter `id` when calling `update_device_datasource_instance_alert_setting_by_id`, must conform to the pattern `/\d+/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'device_id' in params:
path_params['deviceId'] = params['device_id'] # noqa: E501
if 'hds_id' in params:
path_params['hdsId'] = params['hds_id'] # noqa: E501
if 'instance_id' in params:
path_params['instanceId'] = params['instance_id'] # noqa: E501
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['LMv1'] # noqa: E501
return self.api_client.call_api(
'/device/devices/{deviceId}/devicedatasources/{hdsId}/instances/{instanceId}/alertsettings/{id}', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='DeviceDataSourceInstanceAlertSetting', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def update_device_datasource_instance_by_id(self, device_id, hds_id, id, body, **kwargs): # noqa: E501
"""update device instance # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_device_datasource_instance_by_id(device_id, hds_id, id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int device_id: (required)
:param int hds_id: The device-datasource ID (required)
:param int id: (required)
:param DeviceDataSourceInstance body: (required)
:param str op_type:
:return: DeviceDataSourceInstance
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.update_device_datasource_instance_by_id_with_http_info(device_id, hds_id, id, body, **kwargs) # noqa: E501
else:
(data) = self.update_device_datasource_instance_by_id_with_http_info(device_id, hds_id, id, body, **kwargs) # noqa: E501
return data
def update_device_datasource_instance_by_id_with_http_info(self, device_id, hds_id, id, body, **kwargs): # noqa: E501
"""update device instance # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_device_datasource_instance_by_id_with_http_info(device_id, hds_id, id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int device_id: (required)
:param int hds_id: The device-datasource ID (required)
:param int id: (required)
:param DeviceDataSourceInstance body: (required)
:param str op_type:
:return: DeviceDataSourceInstance
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['device_id', 'hds_id', 'id', 'body', 'op_type'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method update_device_datasource_instance_by_id" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'device_id' is set
if ('device_id' not in params or
params['device_id'] is None):
raise ValueError("Missing the required parameter `device_id` when calling `update_device_datasource_instance_by_id`") # noqa: E501
# verify the required parameter 'hds_id' is set
if ('hds_id' not in params or
params['hds_id'] is None):
raise ValueError("Missing the required parameter `hds_id` when calling `update_device_datasource_instance_by_id`") # noqa: E501
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `update_device_datasource_instance_by_id`") # noqa: E501
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `update_device_datasource_instance_by_id`") # noqa: E501
if 'device_id' in params and not re.search('\d+', params['device_id'] if type(params['device_id']) is str else str(params['device_id'])): # noqa: E501
raise ValueError("Invalid value for parameter `device_id` when calling `update_device_datasource_instance_by_id`, must conform to the pattern `/\d+/`") # noqa: E501
if 'hds_id' in params and not re.search('\d+', params['hds_id'] if type(params['hds_id']) is str else str(params['hds_id'])): # noqa: E501
raise ValueError("Invalid value for parameter `hds_id` when calling `update_device_datasource_instance_by_id`, must conform to the pattern `/\d+/`") # noqa: E501
if 'id' in params and not re.search('\d+', params['id'] if type(params['id']) is str else str(params['id'])): # noqa: E501
raise ValueError("Invalid value for parameter `id` when calling `update_device_datasource_instance_by_id`, must conform to the pattern `/\d+/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'device_id' in params:
path_params['deviceId'] = params['device_id'] # noqa: E501
if 'hds_id' in params:
path_params['hdsId'] = params['hds_id'] # noqa: E501
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
if 'op_type' in params:
query_params.append(('opType', params['op_type'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['LMv1'] # noqa: E501
return self.api_client.call_api(
'/device/devices/{deviceId}/devicedatasources/{hdsId}/instances/{id}', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='DeviceDataSourceInstance', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def update_device_datasource_instance_group_by_id(self, device_id, device_ds_id, id, body, **kwargs): # noqa: E501
"""update device datasource instance group # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_device_datasource_instance_group_by_id(device_id, device_ds_id, id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int device_id: (required)
:param int device_ds_id: The device-datasource ID you'd like to add an instance group for (required)
:param int id: (required)
:param DeviceDataSourceInstanceGroup body: (required)
:return: DeviceDataSourceInstanceGroup
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.update_device_datasource_instance_group_by_id_with_http_info(device_id, device_ds_id, id, body, **kwargs) # noqa: E501
else:
(data) = self.update_device_datasource_instance_group_by_id_with_http_info(device_id, device_ds_id, id, body, **kwargs) # noqa: E501
return data
def update_device_datasource_instance_group_by_id_with_http_info(self, device_id, device_ds_id, id, body, **kwargs): # noqa: E501
"""update device datasource instance group # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_device_datasource_instance_group_by_id_with_http_info(device_id, device_ds_id, id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int device_id: (required)
:param int device_ds_id: The device-datasource ID you'd like to add an instance group for (required)
:param int id: (required)
:param DeviceDataSourceInstanceGroup body: (required)
:return: DeviceDataSourceInstanceGroup
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['device_id', 'device_ds_id', 'id', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method update_device_datasource_instance_group_by_id" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'device_id' is set
if ('device_id' not in params or
params['device_id'] is None):
raise ValueError("Missing the required parameter `device_id` when calling `update_device_datasource_instance_group_by_id`") # noqa: E501
# verify the required parameter 'device_ds_id' is set
if ('device_ds_id' not in params or
params['device_ds_id'] is None):
raise ValueError("Missing the required parameter `device_ds_id` when calling `update_device_datasource_instance_group_by_id`") # noqa: E501
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `update_device_datasource_instance_group_by_id`") # noqa: E501
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `update_device_datasource_instance_group_by_id`") # noqa: E501
if 'device_id' in params and not re.search('\d+', params['device_id'] if type(params['device_id']) is str else str(params['device_id'])): # noqa: E501
raise ValueError("Invalid value for parameter `device_id` when calling `update_device_datasource_instance_group_by_id`, must conform to the pattern `/\d+/`") # noqa: E501
if 'device_ds_id' in params and not re.search('\d+', params['device_ds_id'] if type(params['device_ds_id']) is str else str(params['device_ds_id'])): # noqa: E501
raise ValueError("Invalid value for parameter `device_ds_id` when calling `update_device_datasource_instance_group_by_id`, must conform to the pattern `/\d+/`") # noqa: E501
if 'id' in params and not re.search('\d+', params['id'] if type(params['id']) is str else str(params['id'])): # noqa: E501
raise ValueError("Invalid value for parameter `id` when calling `update_device_datasource_instance_group_by_id`, must conform to the pattern `/\d+/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'device_id' in params:
path_params['deviceId'] = params['device_id'] # noqa: E501
if 'device_ds_id' in params:
path_params['deviceDsId'] = params['device_ds_id'] # noqa: E501
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['LMv1'] # noqa: E501
return self.api_client.call_api(
'/device/devices/{deviceId}/devicedatasources/{deviceDsId}/groups/{id}', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='DeviceDataSourceInstanceGroup', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def update_device_group_by_id(self, id, body, **kwargs): # noqa: E501
"""update device group # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_device_group_by_id(id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: (required)
:param DeviceGroup body: (required)
:return: DeviceGroup
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.update_device_group_by_id_with_http_info(id, body, **kwargs) # noqa: E501
else:
(data) = self.update_device_group_by_id_with_http_info(id, body, **kwargs) # noqa: E501
return data
def update_device_group_by_id_with_http_info(self, id, body, **kwargs): # noqa: E501
"""update device group # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_device_group_by_id_with_http_info(id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: (required)
:param DeviceGroup body: (required)
:return: DeviceGroup
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method update_device_group_by_id" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `update_device_group_by_id`") # noqa: E501
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `update_device_group_by_id`") # noqa: E501
if 'id' in params and not re.search('\d+', params['id'] if type(params['id']) is str else str(params['id'])): # noqa: E501
raise ValueError("Invalid value for parameter `id` when calling `update_device_group_by_id`, must conform to the pattern `/\d+/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['LMv1'] # noqa: E501
return self.api_client.call_api(
'/device/groups/{id}', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='DeviceGroup', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def update_device_group_cluster_alert_conf_by_id(self, device_group_id, id, body, **kwargs): # noqa: E501
"""Update cluster alert configuration # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_device_group_cluster_alert_conf_by_id(device_group_id, id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int device_group_id: (required)
:param int id: (required)
:param DeviceClusterAlertConfig body: (required)
:return: DeviceClusterAlertConfig
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.update_device_group_cluster_alert_conf_by_id_with_http_info(device_group_id, id, body, **kwargs) # noqa: E501
else:
(data) = self.update_device_group_cluster_alert_conf_by_id_with_http_info(device_group_id, id, body, **kwargs) # noqa: E501
return data
def update_device_group_cluster_alert_conf_by_id_with_http_info(self, device_group_id, id, body, **kwargs): # noqa: E501
"""Update cluster alert configuration # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_device_group_cluster_alert_conf_by_id_with_http_info(device_group_id, id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int device_group_id: (required)
:param int id: (required)
:param DeviceClusterAlertConfig body: (required)
:return: DeviceClusterAlertConfig
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['device_group_id', 'id', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method update_device_group_cluster_alert_conf_by_id" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'device_group_id' is set
if ('device_group_id' not in params or
params['device_group_id'] is None):
raise ValueError("Missing the required parameter `device_group_id` when calling `update_device_group_cluster_alert_conf_by_id`") # noqa: E501
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `update_device_group_cluster_alert_conf_by_id`") # noqa: E501
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `update_device_group_cluster_alert_conf_by_id`") # noqa: E501
if 'device_group_id' in params and not re.search('\d+', params['device_group_id'] if type(params['device_group_id']) is str else str(params['device_group_id'])): # noqa: E501
raise ValueError("Invalid value for parameter `device_group_id` when calling `update_device_group_cluster_alert_conf_by_id`, must conform to the pattern `/\d+/`") # noqa: E501
if 'id' in params and not re.search('\d+', params['id'] if type(params['id']) is str else str(params['id'])): # noqa: E501
raise ValueError("Invalid value for parameter `id` when calling `update_device_group_cluster_alert_conf_by_id`, must conform to the pattern `/\d+/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'device_group_id' in params:
path_params['deviceGroupId'] = params['device_group_id'] # noqa: E501
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['LMv1'] # noqa: E501
return self.api_client.call_api(
'/device/groups/{deviceGroupId}/clusterAlertConf/{id}', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='DeviceClusterAlertConfig', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def update_device_group_datasource_alert_setting(self, device_group_id, ds_id, body, **kwargs): # noqa: E501
"""update device group datasource alert setting # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_device_group_datasource_alert_setting(device_group_id, ds_id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int device_group_id: (required)
:param int ds_id: (required)
:param DeviceGroupDataSourceAlertConfig body: (required)
:return: DeviceGroupDataSourceAlertConfig
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.update_device_group_datasource_alert_setting_with_http_info(device_group_id, ds_id, body, **kwargs) # noqa: E501
else:
(data) = self.update_device_group_datasource_alert_setting_with_http_info(device_group_id, ds_id, body, **kwargs) # noqa: E501
return data
def update_device_group_datasource_alert_setting_with_http_info(self, device_group_id, ds_id, body, **kwargs): # noqa: E501
"""update device group datasource alert setting # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_device_group_datasource_alert_setting_with_http_info(device_group_id, ds_id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int device_group_id: (required)
:param int ds_id: (required)
:param DeviceGroupDataSourceAlertConfig body: (required)
:return: DeviceGroupDataSourceAlertConfig
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['device_group_id', 'ds_id', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method update_device_group_datasource_alert_setting" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'device_group_id' is set
if ('device_group_id' not in params or
params['device_group_id'] is None):
raise ValueError("Missing the required parameter `device_group_id` when calling `update_device_group_datasource_alert_setting`") # noqa: E501
# verify the required parameter 'ds_id' is set
if ('ds_id' not in params or
params['ds_id'] is None):
raise ValueError("Missing the required parameter `ds_id` when calling `update_device_group_datasource_alert_setting`") # noqa: E501
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `update_device_group_datasource_alert_setting`") # noqa: E501
if 'device_group_id' in params and not re.search('\d+', params['device_group_id'] if type(params['device_group_id']) is str else str(params['device_group_id'])): # noqa: E501
raise ValueError("Invalid value for parameter `device_group_id` when calling `update_device_group_datasource_alert_setting`, must conform to the pattern `/\d+/`") # noqa: E501
if 'ds_id' in params and not re.search('\d+', params['ds_id'] if type(params['ds_id']) is str else str(params['ds_id'])): # noqa: E501
raise ValueError("Invalid value for parameter `ds_id` when calling `update_device_group_datasource_alert_setting`, must conform to the pattern `/\d+/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'device_group_id' in params:
path_params['deviceGroupId'] = params['device_group_id'] # noqa: E501
if 'ds_id' in params:
path_params['dsId'] = params['ds_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['LMv1'] # noqa: E501
return self.api_client.call_api(
'/device/groups/{deviceGroupId}/datasources/{dsId}/alertsettings', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='DeviceGroupDataSourceAlertConfig', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def update_device_group_property_by_name(self, gid, name, body, **kwargs): # noqa: E501
"""update device group property # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_device_group_property_by_name(gid, name, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int gid: group ID (required)
:param str name: (required)
:param EntityProperty body: (required)
:return: EntityProperty
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.update_device_group_property_by_name_with_http_info(gid, name, body, **kwargs) # noqa: E501
else:
(data) = self.update_device_group_property_by_name_with_http_info(gid, name, body, **kwargs) # noqa: E501
return data
def update_device_group_property_by_name_with_http_info(self, gid, name, body, **kwargs): # noqa: E501
"""update device group property # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_device_group_property_by_name_with_http_info(gid, name, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int gid: group ID (required)
:param str name: (required)
:param EntityProperty body: (required)
:return: EntityProperty
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['gid', 'name', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method update_device_group_property_by_name" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'gid' is set
if ('gid' not in params or
params['gid'] is None):
raise ValueError("Missing the required parameter `gid` when calling `update_device_group_property_by_name`") # noqa: E501
# verify the required parameter 'name' is set
if ('name' not in params or
params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `update_device_group_property_by_name`") # noqa: E501
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `update_device_group_property_by_name`") # noqa: E501
if 'gid' in params and not re.search('\d+', params['gid'] if type(params['gid']) is str else str(params['gid'])): # noqa: E501
raise ValueError("Invalid value for parameter `gid` when calling `update_device_group_property_by_name`, must conform to the pattern `/\d+/`") # noqa: E501
if 'name' in params and not re.search('[^\/]+', params['name'] if type(params['name']) is str else str(params['name'])): # noqa: E501
raise ValueError("Invalid value for parameter `name` when calling `update_device_group_property_by_name`, must conform to the pattern `/[^\/]+/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'gid' in params:
path_params['gid'] = params['gid'] # noqa: E501
if 'name' in params:
path_params['name'] = params['name'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['LMv1'] # noqa: E501
return self.api_client.call_api(
'/device/groups/{gid}/properties/{name}', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='EntityProperty', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def update_device_property_by_name(self, device_id, name, body, **kwargs): # noqa: E501
"""update device property # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_device_property_by_name(device_id, name, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int device_id: (required)
:param str name: (required)
:param EntityProperty body: (required)
:return: EntityProperty
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.update_device_property_by_name_with_http_info(device_id, name, body, **kwargs) # noqa: E501
else:
(data) = self.update_device_property_by_name_with_http_info(device_id, name, body, **kwargs) # noqa: E501
return data
def update_device_property_by_name_with_http_info(self, device_id, name, body, **kwargs): # noqa: E501
"""update device property # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_device_property_by_name_with_http_info(device_id, name, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int device_id: (required)
:param str name: (required)
:param EntityProperty body: (required)
:return: EntityProperty
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['device_id', 'name', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method update_device_property_by_name" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'device_id' is set
if ('device_id' not in params or
params['device_id'] is None):
raise ValueError("Missing the required parameter `device_id` when calling `update_device_property_by_name`") # noqa: E501
# verify the required parameter 'name' is set
if ('name' not in params or
params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `update_device_property_by_name`") # noqa: E501
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `update_device_property_by_name`") # noqa: E501
if 'device_id' in params and not re.search('\d+', params['device_id'] if type(params['device_id']) is str else str(params['device_id'])): # noqa: E501
raise ValueError("Invalid value for parameter `device_id` when calling `update_device_property_by_name`, must conform to the pattern `/\d+/`") # noqa: E501
if 'name' in params and not re.search('[^\/]+', params['name'] if type(params['name']) is str else str(params['name'])): # noqa: E501
raise ValueError("Invalid value for parameter `name` when calling `update_device_property_by_name`, must conform to the pattern `/[^\/]+/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'device_id' in params:
path_params['deviceId'] = params['device_id'] # noqa: E501
if 'name' in params:
path_params['name'] = params['name'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['LMv1'] # noqa: E501
return self.api_client.call_api(
'/device/devices/{deviceId}/properties/{name}', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='EntityProperty', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def update_escalation_chain_by_id(self, id, body, **kwargs): # noqa: E501
"""update escalation chain # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_escalation_chain_by_id(id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: (required)
:param EscalatingChain body: (required)
:return: EscalatingChain
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.update_escalation_chain_by_id_with_http_info(id, body, **kwargs) # noqa: E501
else:
(data) = self.update_escalation_chain_by_id_with_http_info(id, body, **kwargs) # noqa: E501
return data
def update_escalation_chain_by_id_with_http_info(self, id, body, **kwargs): # noqa: E501
"""update escalation chain # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_escalation_chain_by_id_with_http_info(id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: (required)
:param EscalatingChain body: (required)
:return: EscalatingChain
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method update_escalation_chain_by_id" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `update_escalation_chain_by_id`") # noqa: E501
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `update_escalation_chain_by_id`") # noqa: E501
if 'id' in params and not re.search('\d+', params['id'] if type(params['id']) is str else str(params['id'])): # noqa: E501
raise ValueError("Invalid value for parameter `id` when calling `update_escalation_chain_by_id`, must conform to the pattern `/\d+/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['LMv1'] # noqa: E501
return self.api_client.call_api(
'/setting/alert/chains/{id}', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='EscalatingChain', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def update_netscan(self, id, **kwargs): # noqa: E501
"""update a netscan # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_netscan(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: (required)
:param Netscan body:
:param str reason:
:return: Netscan
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.update_netscan_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.update_netscan_with_http_info(id, **kwargs) # noqa: E501
return data
def update_netscan_with_http_info(self, id, **kwargs): # noqa: E501
"""update a netscan # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_netscan_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: (required)
:param Netscan body:
:param str reason:
:return: Netscan
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'body', 'reason'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method update_netscan" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `update_netscan`") # noqa: E501
if 'id' in params and not re.search('\d+', params['id'] if type(params['id']) is str else str(params['id'])): # noqa: E501
raise ValueError("Invalid value for parameter `id` when calling `update_netscan`, must conform to the pattern `/\d+/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
if 'reason' in params:
query_params.append(('reason', params['reason'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['LMv1'] # noqa: E501
return self.api_client.call_api(
'/setting/netscans/{id}', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Netscan', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def update_ops_note_by_id(self, id, body, **kwargs): # noqa: E501
"""update opsnote # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_ops_note_by_id(id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: (required)
:param OpsNote body: (required)
:return: OpsNote
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.update_ops_note_by_id_with_http_info(id, body, **kwargs) # noqa: E501
else:
(data) = self.update_ops_note_by_id_with_http_info(id, body, **kwargs) # noqa: E501
return data
def update_ops_note_by_id_with_http_info(self, id, body, **kwargs): # noqa: E501
"""update opsnote # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_ops_note_by_id_with_http_info(id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: (required)
:param OpsNote body: (required)
:return: OpsNote
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method update_ops_note_by_id" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `update_ops_note_by_id`") # noqa: E501
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `update_ops_note_by_id`") # noqa: E501
if 'id' in params and not re.search('[^\/]+', params['id'] if type(params['id']) is str else str(params['id'])): # noqa: E501
raise ValueError("Invalid value for parameter `id` when calling `update_ops_note_by_id`, must conform to the pattern `/[^\/]+/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['LMv1'] # noqa: E501
return self.api_client.call_api(
'/setting/opsnotes/{id}', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='OpsNote', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def update_recipient_group_by_id(self, id, body, **kwargs): # noqa: E501
"""update recipient group # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_recipient_group_by_id(id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: (required)
:param RecipientGroup body: (required)
:return: RecipientGroup
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.update_recipient_group_by_id_with_http_info(id, body, **kwargs) # noqa: E501
else:
(data) = self.update_recipient_group_by_id_with_http_info(id, body, **kwargs) # noqa: E501
return data
def update_recipient_group_by_id_with_http_info(self, id, body, **kwargs): # noqa: E501
"""update recipient group # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_recipient_group_by_id_with_http_info(id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: (required)
:param RecipientGroup body: (required)
:return: RecipientGroup
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method update_recipient_group_by_id" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `update_recipient_group_by_id`") # noqa: E501
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `update_recipient_group_by_id`") # noqa: E501
if 'id' in params and not re.search('\d+', params['id'] if type(params['id']) is str else str(params['id'])): # noqa: E501
raise ValueError("Invalid value for parameter `id` when calling `update_recipient_group_by_id`, must conform to the pattern `/\d+/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['LMv1'] # noqa: E501
return self.api_client.call_api(
'/setting/recipientgroups/{id}', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='RecipientGroup', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def update_report_by_id(self, id, body, **kwargs): # noqa: E501
"""update report # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_report_by_id(id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: (required)
:param ReportBase body: (required)
:return: ReportBase
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.update_report_by_id_with_http_info(id, body, **kwargs) # noqa: E501
else:
(data) = self.update_report_by_id_with_http_info(id, body, **kwargs) # noqa: E501
return data
def update_report_by_id_with_http_info(self, id, body, **kwargs): # noqa: E501
"""update report # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_report_by_id_with_http_info(id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: (required)
:param ReportBase body: (required)
:return: ReportBase
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method update_report_by_id" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `update_report_by_id`") # noqa: E501
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `update_report_by_id`") # noqa: E501
if 'id' in params and not re.search('\d+', params['id'] if type(params['id']) is str else str(params['id'])): # noqa: E501
raise ValueError("Invalid value for parameter `id` when calling `update_report_by_id`, must conform to the pattern `/\d+/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['LMv1'] # noqa: E501
return self.api_client.call_api(
'/report/reports/{id}', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ReportBase', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def update_report_group_by_id(self, id, body, **kwargs): # noqa: E501
"""update report group # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_report_group_by_id(id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: (required)
:param ReportGroup body: (required)
:return: ReportGroup
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.update_report_group_by_id_with_http_info(id, body, **kwargs) # noqa: E501
else:
(data) = self.update_report_group_by_id_with_http_info(id, body, **kwargs) # noqa: E501
return data
def update_report_group_by_id_with_http_info(self, id, body, **kwargs): # noqa: E501
"""update report group # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_report_group_by_id_with_http_info(id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: (required)
:param ReportGroup body: (required)
:return: ReportGroup
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method update_report_group_by_id" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `update_report_group_by_id`") # noqa: E501
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `update_report_group_by_id`") # noqa: E501
if 'id' in params and not re.search('\d+', params['id'] if type(params['id']) is str else str(params['id'])): # noqa: E501
raise ValueError("Invalid value for parameter `id` when calling `update_report_group_by_id`, must conform to the pattern `/\d+/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['LMv1'] # noqa: E501
return self.api_client.call_api(
'/report/groups/{id}', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ReportGroup', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def update_role_by_id(self, id, body, **kwargs): # noqa: E501
"""update role # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_role_by_id(id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: (required)
:param Role body: (required)
:return: Role
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.update_role_by_id_with_http_info(id, body, **kwargs) # noqa: E501
else:
(data) = self.update_role_by_id_with_http_info(id, body, **kwargs) # noqa: E501
return data
def update_role_by_id_with_http_info(self, id, body, **kwargs): # noqa: E501
"""update role # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_role_by_id_with_http_info(id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: (required)
:param Role body: (required)
:return: Role
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method update_role_by_id" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `update_role_by_id`") # noqa: E501
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `update_role_by_id`") # noqa: E501
if 'id' in params and not re.search('\d+', params['id'] if type(params['id']) is str else str(params['id'])): # noqa: E501
raise ValueError("Invalid value for parameter `id` when calling `update_role_by_id`, must conform to the pattern `/\d+/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['LMv1'] # noqa: E501
return self.api_client.call_api(
'/setting/roles/{id}', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Role', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def update_sdt_by_id(self, id, body, **kwargs): # noqa: E501
"""update SDT # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_sdt_by_id(id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: (required)
:param SDT body: (required)
:return: SDT
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.update_sdt_by_id_with_http_info(id, body, **kwargs) # noqa: E501
else:
(data) = self.update_sdt_by_id_with_http_info(id, body, **kwargs) # noqa: E501
return data
def update_sdt_by_id_with_http_info(self, id, body, **kwargs): # noqa: E501
"""update SDT # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_sdt_by_id_with_http_info(id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: (required)
:param SDT body: (required)
:return: SDT
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method update_sdt_by_id" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `update_sdt_by_id`") # noqa: E501
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `update_sdt_by_id`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['LMv1'] # noqa: E501
return self.api_client.call_api(
'/sdt/sdts/{id}', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='SDT', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def update_website_by_id(self, id, body, **kwargs): # noqa: E501
"""update website # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_website_by_id(id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: (required)
:param Website body: (required)
:return: Website
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.update_website_by_id_with_http_info(id, body, **kwargs) # noqa: E501
else:
(data) = self.update_website_by_id_with_http_info(id, body, **kwargs) # noqa: E501
return data
def update_website_by_id_with_http_info(self, id, body, **kwargs): # noqa: E501
"""update website # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_website_by_id_with_http_info(id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: (required)
:param Website body: (required)
:return: Website
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method update_website_by_id" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `update_website_by_id`") # noqa: E501
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `update_website_by_id`") # noqa: E501
if 'id' in params and not re.search('\d+', params['id'] if type(params['id']) is str else str(params['id'])): # noqa: E501
raise ValueError("Invalid value for parameter `id` when calling `update_website_by_id`, must conform to the pattern `/\d+/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['LMv1'] # noqa: E501
return self.api_client.call_api(
'/website/websites/{id}', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Website', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def update_website_group_by_id(self, id, body, **kwargs): # noqa: E501
"""update website group # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_website_group_by_id(id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: (required)
:param WebsiteGroup body: (required)
:return: WebsiteGroup
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.update_website_group_by_id_with_http_info(id, body, **kwargs) # noqa: E501
else:
(data) = self.update_website_group_by_id_with_http_info(id, body, **kwargs) # noqa: E501
return data
def update_website_group_by_id_with_http_info(self, id, body, **kwargs): # noqa: E501
"""update website group # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_website_group_by_id_with_http_info(id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: (required)
:param WebsiteGroup body: (required)
:return: WebsiteGroup
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method update_website_group_by_id" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `update_website_group_by_id`") # noqa: E501
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `update_website_group_by_id`") # noqa: E501
if 'id' in params and not re.search('\d+', params['id'] if type(params['id']) is str else str(params['id'])): # noqa: E501
raise ValueError("Invalid value for parameter `id` when calling `update_website_group_by_id`, must conform to the pattern `/\d+/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['LMv1'] # noqa: E501
return self.api_client.call_api(
'/website/groups/{id}', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='WebsiteGroup', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def update_widget_by_id(self, id, body, **kwargs): # noqa: E501
"""update widget # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_widget_by_id(id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: (required)
:param Widget body: (required)
:return: Widget
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.update_widget_by_id_with_http_info(id, body, **kwargs) # noqa: E501
else:
(data) = self.update_widget_by_id_with_http_info(id, body, **kwargs) # noqa: E501
return data
def update_widget_by_id_with_http_info(self, id, body, **kwargs): # noqa: E501
"""update widget # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_widget_by_id_with_http_info(id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: (required)
:param Widget body: (required)
:return: Widget
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method update_widget_by_id" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `update_widget_by_id`") # noqa: E501
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `update_widget_by_id`") # noqa: E501
if 'id' in params and not re.search('\d+', params['id'] if type(params['id']) is str else str(params['id'])): # noqa: E501
raise ValueError("Invalid value for parameter `id` when calling `update_widget_by_id`, must conform to the pattern `/\d+/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['LMv1'] # noqa: E501
return self.api_client.call_api(
'/dashboard/widgets/{id}', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Widget', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
```
#### File: logicmonitor_sdk/models/auto_discovery_method.py
```python
import pprint
import re # noqa: F401
import six
class AutoDiscoveryMethod(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'name': 'str'
}
attribute_map = {
'name': 'name'
}
discriminator_value_class_map = {
'ad_pdh': 'PDHAutoDiscoveryMethod',
'ad_awsserviceregion': 'AwsServiceRegionDiscoveryMethod',
'': 'GcpAppEngineDiscoveryMethod',
'ad_awsec2reservedinstance': 'AwsEC2ReservedInstanceDiscoveryMethod',
'ad_port': 'PortAutoDiscoveryMethod',
'': 'GcpBillingDiscoveryMethod',
'ad_mongo': 'MongoAutoDiscoveryMethod',
'ad_awslbtargetgroups': 'AwsLBTargetGroupDiscoveryMethod',
'ad_http': 'HttpAutoDiscoveryMethod',
'ad_dummy': 'DummyAutoDiscoveryMethod',
'ad_azuresubscription': 'AzureSubscriptionDiscoveryMethod',
'ad_azurerediscache': 'AzureRedisCacheDiscoveryMethod',
'ad_awsbillingreport': 'AwsBillingReportDiscoveryMethod',
'ad_azurebilling': 'AzureBillingDiscoveryMethod',
'ad_azureserviceregion': 'AzureServiceRegionDiscoveryMethod',
'ad_cim': 'CIMAutoDiscoveryMethod',
'ad_collector': 'CollectorAutoDiscoveryMethod',
'ad_jmx': 'JMXAutoDiscoveryMethod',
'ad_esx': 'ESXAutoDiscoveryMethod',
'ad_xen': 'XENAutoDiscoveryMethod',
'ad_script': 'ScriptAutoDiscoveryMethod',
'ad_awsredshift': 'AwsRedShiftDiscoveryMethod',
'ad_netapp': 'NetAppAutoDiscoveryMethod',
'ad_snmp': 'SNMPAutoDiscoveryMethod',
'ad_awsec2scheduledevents': 'EC2ScheduledEventAutoDiscoveryMethod',
'ad_cloudwatch': 'CloudWatchAutoDiscoveryMethod',
'ad_wmi': 'WMIAutoDiscoveryMethod',
'ad_awselasticache': 'AwsElastiCacheDiscoveryMethod',
'ad_awsec2reservedinstancecoverage': 'AwsEC2ReservedInstanceCoverageDiscoveryMethod',
'ad_ec2': 'EC2AutoDiscoveryMethod',
'ad_sdkscript': 'SDKScriptDiscoveryMethod',
'ad_jdbc': 'JDBCAutoDiscoveryMethod',
'ad_awsecsservice': 'AwsEcsServiceDiscoveryMethod',
'ad_ipmi': 'IPMIAutoDiscoveryMethod'
}
def __init__(self, name=None): # noqa: E501
"""AutoDiscoveryMethod - a model defined in Swagger""" # noqa: E501
self._name = None
self.discriminator = 'name'
self.name = name
@property
def name(self):
"""Gets the name of this AutoDiscoveryMethod. # noqa: E501
:return: The name of this AutoDiscoveryMethod. # noqa: E501
:rtype: str
"""
return self._name
@name.setter
def name(self, name):
"""Sets the name of this AutoDiscoveryMethod.
:param name: The name of this AutoDiscoveryMethod. # noqa: E501
:type: str
"""
if name is None:
raise ValueError("Invalid value for `name`, must not be `None`") # noqa: E501
self._name = name
def get_real_child_model(self, data):
"""Returns the real base class specified by the discriminator"""
discriminator_value = data[self.discriminator].lower()
return self.discriminator_value_class_map.get(discriminator_value)
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
if issubclass(AutoDiscoveryMethod, dict):
for key, value in self.items():
result[key] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, AutoDiscoveryMethod):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
```
#### File: logicmonitor_sdk/models/aws_dynamodb_collector_attribute.py
```python
import pprint
import re # noqa: F401
import six
from logicmonitor_sdk.models.collector_attribute import CollectorAttribute # noqa: F401,E501
class AwsDynamodbCollectorAttribute(CollectorAttribute):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'name': 'str',
'aws_range_value': 'str',
'aws_query_range_value': 'str',
'aws_dynamodb_attr_type': 'str',
'aws_query_range_op': 'str',
'aws_key_value': 'str',
'aws_attribute_name': 'str',
'aws_query_index_type': 'str',
'aws_query_index_name': 'str',
'aws_query_key_value': 'str'
}
attribute_map = {
'name': 'name',
'aws_range_value': 'awsRangeValue',
'aws_query_range_value': 'awsQueryRangeValue',
'aws_dynamodb_attr_type': 'awsDynamodbAttrType',
'aws_query_range_op': 'awsQueryRangeOp',
'aws_key_value': 'awsKeyValue',
'aws_attribute_name': 'awsAttributeName',
'aws_query_index_type': 'awsQueryIndexType',
'aws_query_index_name': 'awsQueryIndexName',
'aws_query_key_value': 'awsQueryKeyValue'
}
def __init__(self, name=None, aws_range_value=None, aws_query_range_value=None, aws_dynamodb_attr_type=None, aws_query_range_op=None, aws_key_value=None, aws_attribute_name=None, aws_query_index_type=None, aws_query_index_name=None, aws_query_key_value=None): # noqa: E501
"""AwsDynamodbCollectorAttribute - a model defined in Swagger""" # noqa: E501
self._name = None
self._aws_range_value = None
self._aws_query_range_value = None
self._aws_dynamodb_attr_type = None
self._aws_query_range_op = None
self._aws_key_value = None
self._aws_attribute_name = None
self._aws_query_index_type = None
self._aws_query_index_name = None
self._aws_query_key_value = None
self.discriminator = None
self.name = name
if aws_range_value is not None:
self.aws_range_value = aws_range_value
if aws_query_range_value is not None:
self.aws_query_range_value = aws_query_range_value
if aws_dynamodb_attr_type is not None:
self.aws_dynamodb_attr_type = aws_dynamodb_attr_type
if aws_query_range_op is not None:
self.aws_query_range_op = aws_query_range_op
if aws_key_value is not None:
self.aws_key_value = aws_key_value
if aws_attribute_name is not None:
self.aws_attribute_name = aws_attribute_name
if aws_query_index_type is not None:
self.aws_query_index_type = aws_query_index_type
if aws_query_index_name is not None:
self.aws_query_index_name = aws_query_index_name
if aws_query_key_value is not None:
self.aws_query_key_value = aws_query_key_value
@property
def name(self):
"""Gets the name of this AwsDynamodbCollectorAttribute. # noqa: E501
:return: The name of this AwsDynamodbCollectorAttribute. # noqa: E501
:rtype: str
"""
return self._name
@name.setter
def name(self, name):
"""Sets the name of this AwsDynamodbCollectorAttribute.
:param name: The name of this AwsDynamodbCollectorAttribute. # noqa: E501
:type: str
"""
if name is None:
raise ValueError("Invalid value for `name`, must not be `None`") # noqa: E501
self._name = name
@property
def aws_range_value(self):
"""Gets the aws_range_value of this AwsDynamodbCollectorAttribute. # noqa: E501
:return: The aws_range_value of this AwsDynamodbCollectorAttribute. # noqa: E501
:rtype: str
"""
return self._aws_range_value
@aws_range_value.setter
def aws_range_value(self, aws_range_value):
"""Sets the aws_range_value of this AwsDynamodbCollectorAttribute.
:param aws_range_value: The aws_range_value of this AwsDynamodbCollectorAttribute. # noqa: E501
:type: str
"""
self._aws_range_value = aws_range_value
@property
def aws_query_range_value(self):
"""Gets the aws_query_range_value of this AwsDynamodbCollectorAttribute. # noqa: E501
:return: The aws_query_range_value of this AwsDynamodbCollectorAttribute. # noqa: E501
:rtype: str
"""
return self._aws_query_range_value
@aws_query_range_value.setter
def aws_query_range_value(self, aws_query_range_value):
"""Sets the aws_query_range_value of this AwsDynamodbCollectorAttribute.
:param aws_query_range_value: The aws_query_range_value of this AwsDynamodbCollectorAttribute. # noqa: E501
:type: str
"""
self._aws_query_range_value = aws_query_range_value
@property
def aws_dynamodb_attr_type(self):
"""Gets the aws_dynamodb_attr_type of this AwsDynamodbCollectorAttribute. # noqa: E501
:return: The aws_dynamodb_attr_type of this AwsDynamodbCollectorAttribute. # noqa: E501
:rtype: str
"""
return self._aws_dynamodb_attr_type
@aws_dynamodb_attr_type.setter
def aws_dynamodb_attr_type(self, aws_dynamodb_attr_type):
"""Sets the aws_dynamodb_attr_type of this AwsDynamodbCollectorAttribute.
:param aws_dynamodb_attr_type: The aws_dynamodb_attr_type of this AwsDynamodbCollectorAttribute. # noqa: E501
:type: str
"""
self._aws_dynamodb_attr_type = aws_dynamodb_attr_type
@property
def aws_query_range_op(self):
"""Gets the aws_query_range_op of this AwsDynamodbCollectorAttribute. # noqa: E501
:return: The aws_query_range_op of this AwsDynamodbCollectorAttribute. # noqa: E501
:rtype: str
"""
return self._aws_query_range_op
@aws_query_range_op.setter
def aws_query_range_op(self, aws_query_range_op):
"""Sets the aws_query_range_op of this AwsDynamodbCollectorAttribute.
:param aws_query_range_op: The aws_query_range_op of this AwsDynamodbCollectorAttribute. # noqa: E501
:type: str
"""
self._aws_query_range_op = aws_query_range_op
@property
def aws_key_value(self):
"""Gets the aws_key_value of this AwsDynamodbCollectorAttribute. # noqa: E501
:return: The aws_key_value of this AwsDynamodbCollectorAttribute. # noqa: E501
:rtype: str
"""
return self._aws_key_value
@aws_key_value.setter
def aws_key_value(self, aws_key_value):
"""Sets the aws_key_value of this AwsDynamodbCollectorAttribute.
:param aws_key_value: The aws_key_value of this AwsDynamodbCollectorAttribute. # noqa: E501
:type: str
"""
self._aws_key_value = aws_key_value
@property
def aws_attribute_name(self):
"""Gets the aws_attribute_name of this AwsDynamodbCollectorAttribute. # noqa: E501
:return: The aws_attribute_name of this AwsDynamodbCollectorAttribute. # noqa: E501
:rtype: str
"""
return self._aws_attribute_name
@aws_attribute_name.setter
def aws_attribute_name(self, aws_attribute_name):
"""Sets the aws_attribute_name of this AwsDynamodbCollectorAttribute.
:param aws_attribute_name: The aws_attribute_name of this AwsDynamodbCollectorAttribute. # noqa: E501
:type: str
"""
self._aws_attribute_name = aws_attribute_name
@property
def aws_query_index_type(self):
"""Gets the aws_query_index_type of this AwsDynamodbCollectorAttribute. # noqa: E501
:return: The aws_query_index_type of this AwsDynamodbCollectorAttribute. # noqa: E501
:rtype: str
"""
return self._aws_query_index_type
@aws_query_index_type.setter
def aws_query_index_type(self, aws_query_index_type):
"""Sets the aws_query_index_type of this AwsDynamodbCollectorAttribute.
:param aws_query_index_type: The aws_query_index_type of this AwsDynamodbCollectorAttribute. # noqa: E501
:type: str
"""
self._aws_query_index_type = aws_query_index_type
@property
def aws_query_index_name(self):
"""Gets the aws_query_index_name of this AwsDynamodbCollectorAttribute. # noqa: E501
:return: The aws_query_index_name of this AwsDynamodbCollectorAttribute. # noqa: E501
:rtype: str
"""
return self._aws_query_index_name
@aws_query_index_name.setter
def aws_query_index_name(self, aws_query_index_name):
"""Sets the aws_query_index_name of this AwsDynamodbCollectorAttribute.
:param aws_query_index_name: The aws_query_index_name of this AwsDynamodbCollectorAttribute. # noqa: E501
:type: str
"""
self._aws_query_index_name = aws_query_index_name
@property
def aws_query_key_value(self):
"""Gets the aws_query_key_value of this AwsDynamodbCollectorAttribute. # noqa: E501
:return: The aws_query_key_value of this AwsDynamodbCollectorAttribute. # noqa: E501
:rtype: str
"""
return self._aws_query_key_value
@aws_query_key_value.setter
def aws_query_key_value(self, aws_query_key_value):
"""Sets the aws_query_key_value of this AwsDynamodbCollectorAttribute.
:param aws_query_key_value: The aws_query_key_value of this AwsDynamodbCollectorAttribute. # noqa: E501
:type: str
"""
self._aws_query_key_value = aws_query_key_value
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
if issubclass(AwsDynamodbCollectorAttribute, dict):
for key, value in self.items():
result[key] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, AwsDynamodbCollectorAttribute):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
```
#### File: logicmonitor_sdk/models/batch_job_execution_item.py
```python
import pprint
import re # noqa: F401
import six
class BatchJobExecutionItem(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'started_on_local': 'str',
'finished_on_local': 'str',
'user_data': 'str',
'stdout': 'str',
'bj_name': 'str',
'alert_level': 'int',
'device_batch_job_id': 'int',
'stderr': 'str',
'device_display_name': 'str',
'cmdline': 'str',
'exit_code': 'int',
'id': 'int',
'execution_no': 'int',
'finished_on': 'int',
'started_on': 'int'
}
attribute_map = {
'started_on_local': 'startedOnLocal',
'finished_on_local': 'finishedOnLocal',
'user_data': 'userData',
'stdout': 'stdout',
'bj_name': 'bjName',
'alert_level': 'alertLevel',
'device_batch_job_id': 'deviceBatchJobId',
'stderr': 'stderr',
'device_display_name': 'deviceDisplayName',
'cmdline': 'cmdline',
'exit_code': 'exitCode',
'id': 'id',
'execution_no': 'executionNo',
'finished_on': 'finishedOn',
'started_on': 'startedOn'
}
def __init__(self, started_on_local=None, finished_on_local=None, user_data=None, stdout=None, bj_name=None, alert_level=None, device_batch_job_id=None, stderr=None, device_display_name=None, cmdline=None, exit_code=None, id=None, execution_no=None, finished_on=None, started_on=None): # noqa: E501
"""BatchJobExecutionItem - a model defined in Swagger""" # noqa: E501
self._started_on_local = None
self._finished_on_local = None
self._user_data = None
self._stdout = None
self._bj_name = None
self._alert_level = None
self._device_batch_job_id = None
self._stderr = None
self._device_display_name = None
self._cmdline = None
self._exit_code = None
self._id = None
self._execution_no = None
self._finished_on = None
self._started_on = None
self.discriminator = None
if started_on_local is not None:
self.started_on_local = started_on_local
if finished_on_local is not None:
self.finished_on_local = finished_on_local
if user_data is not None:
self.user_data = user_data
if stdout is not None:
self.stdout = stdout
if bj_name is not None:
self.bj_name = bj_name
if alert_level is not None:
self.alert_level = alert_level
if device_batch_job_id is not None:
self.device_batch_job_id = device_batch_job_id
if stderr is not None:
self.stderr = stderr
if device_display_name is not None:
self.device_display_name = device_display_name
if cmdline is not None:
self.cmdline = cmdline
if exit_code is not None:
self.exit_code = exit_code
if id is not None:
self.id = id
if execution_no is not None:
self.execution_no = execution_no
if finished_on is not None:
self.finished_on = finished_on
if started_on is not None:
self.started_on = started_on
@property
def started_on_local(self):
"""Gets the started_on_local of this BatchJobExecutionItem. # noqa: E501
:return: The started_on_local of this BatchJobExecutionItem. # noqa: E501
:rtype: str
"""
return self._started_on_local
@started_on_local.setter
def started_on_local(self, started_on_local):
"""Sets the started_on_local of this BatchJobExecutionItem.
:param started_on_local: The started_on_local of this BatchJobExecutionItem. # noqa: E501
:type: str
"""
self._started_on_local = started_on_local
@property
def finished_on_local(self):
"""Gets the finished_on_local of this BatchJobExecutionItem. # noqa: E501
:return: The finished_on_local of this BatchJobExecutionItem. # noqa: E501
:rtype: str
"""
return self._finished_on_local
@finished_on_local.setter
def finished_on_local(self, finished_on_local):
"""Sets the finished_on_local of this BatchJobExecutionItem.
:param finished_on_local: The finished_on_local of this BatchJobExecutionItem. # noqa: E501
:type: str
"""
self._finished_on_local = finished_on_local
@property
def user_data(self):
"""Gets the user_data of this BatchJobExecutionItem. # noqa: E501
:return: The user_data of this BatchJobExecutionItem. # noqa: E501
:rtype: str
"""
return self._user_data
@user_data.setter
def user_data(self, user_data):
"""Sets the user_data of this BatchJobExecutionItem.
:param user_data: The user_data of this BatchJobExecutionItem. # noqa: E501
:type: str
"""
self._user_data = user_data
@property
def stdout(self):
"""Gets the stdout of this BatchJobExecutionItem. # noqa: E501
:return: The stdout of this BatchJobExecutionItem. # noqa: E501
:rtype: str
"""
return self._stdout
@stdout.setter
def stdout(self, stdout):
"""Sets the stdout of this BatchJobExecutionItem.
:param stdout: The stdout of this BatchJobExecutionItem. # noqa: E501
:type: str
"""
self._stdout = stdout
@property
def bj_name(self):
"""Gets the bj_name of this BatchJobExecutionItem. # noqa: E501
:return: The bj_name of this BatchJobExecutionItem. # noqa: E501
:rtype: str
"""
return self._bj_name
@bj_name.setter
def bj_name(self, bj_name):
"""Sets the bj_name of this BatchJobExecutionItem.
:param bj_name: The bj_name of this BatchJobExecutionItem. # noqa: E501
:type: str
"""
self._bj_name = bj_name
@property
def alert_level(self):
"""Gets the alert_level of this BatchJobExecutionItem. # noqa: E501
:return: The alert_level of this BatchJobExecutionItem. # noqa: E501
:rtype: int
"""
return self._alert_level
@alert_level.setter
def alert_level(self, alert_level):
"""Sets the alert_level of this BatchJobExecutionItem.
:param alert_level: The alert_level of this BatchJobExecutionItem. # noqa: E501
:type: int
"""
self._alert_level = alert_level
@property
def device_batch_job_id(self):
"""Gets the device_batch_job_id of this BatchJobExecutionItem. # noqa: E501
:return: The device_batch_job_id of this BatchJobExecutionItem. # noqa: E501
:rtype: int
"""
return self._device_batch_job_id
@device_batch_job_id.setter
def device_batch_job_id(self, device_batch_job_id):
"""Sets the device_batch_job_id of this BatchJobExecutionItem.
:param device_batch_job_id: The device_batch_job_id of this BatchJobExecutionItem. # noqa: E501
:type: int
"""
self._device_batch_job_id = device_batch_job_id
@property
def stderr(self):
"""Gets the stderr of this BatchJobExecutionItem. # noqa: E501
:return: The stderr of this BatchJobExecutionItem. # noqa: E501
:rtype: str
"""
return self._stderr
@stderr.setter
def stderr(self, stderr):
"""Sets the stderr of this BatchJobExecutionItem.
:param stderr: The stderr of this BatchJobExecutionItem. # noqa: E501
:type: str
"""
self._stderr = stderr
@property
def device_display_name(self):
"""Gets the device_display_name of this BatchJobExecutionItem. # noqa: E501
:return: The device_display_name of this BatchJobExecutionItem. # noqa: E501
:rtype: str
"""
return self._device_display_name
@device_display_name.setter
def device_display_name(self, device_display_name):
"""Sets the device_display_name of this BatchJobExecutionItem.
:param device_display_name: The device_display_name of this BatchJobExecutionItem. # noqa: E501
:type: str
"""
self._device_display_name = device_display_name
@property
def cmdline(self):
"""Gets the cmdline of this BatchJobExecutionItem. # noqa: E501
:return: The cmdline of this BatchJobExecutionItem. # noqa: E501
:rtype: str
"""
return self._cmdline
@cmdline.setter
def cmdline(self, cmdline):
"""Sets the cmdline of this BatchJobExecutionItem.
:param cmdline: The cmdline of this BatchJobExecutionItem. # noqa: E501
:type: str
"""
self._cmdline = cmdline
@property
def exit_code(self):
"""Gets the exit_code of this BatchJobExecutionItem. # noqa: E501
:return: The exit_code of this BatchJobExecutionItem. # noqa: E501
:rtype: int
"""
return self._exit_code
@exit_code.setter
def exit_code(self, exit_code):
"""Sets the exit_code of this BatchJobExecutionItem.
:param exit_code: The exit_code of this BatchJobExecutionItem. # noqa: E501
:type: int
"""
self._exit_code = exit_code
@property
def id(self):
"""Gets the id of this BatchJobExecutionItem. # noqa: E501
:return: The id of this BatchJobExecutionItem. # noqa: E501
:rtype: int
"""
return self._id
@id.setter
def id(self, id):
"""Sets the id of this BatchJobExecutionItem.
:param id: The id of this BatchJobExecutionItem. # noqa: E501
:type: int
"""
self._id = id
@property
def execution_no(self):
"""Gets the execution_no of this BatchJobExecutionItem. # noqa: E501
:return: The execution_no of this BatchJobExecutionItem. # noqa: E501
:rtype: int
"""
return self._execution_no
@execution_no.setter
def execution_no(self, execution_no):
"""Sets the execution_no of this BatchJobExecutionItem.
:param execution_no: The execution_no of this BatchJobExecutionItem. # noqa: E501
:type: int
"""
self._execution_no = execution_no
@property
def finished_on(self):
"""Gets the finished_on of this BatchJobExecutionItem. # noqa: E501
:return: The finished_on of this BatchJobExecutionItem. # noqa: E501
:rtype: int
"""
return self._finished_on
@finished_on.setter
def finished_on(self, finished_on):
"""Sets the finished_on of this BatchJobExecutionItem.
:param finished_on: The finished_on of this BatchJobExecutionItem. # noqa: E501
:type: int
"""
self._finished_on = finished_on
@property
def started_on(self):
"""Gets the started_on of this BatchJobExecutionItem. # noqa: E501
:return: The started_on of this BatchJobExecutionItem. # noqa: E501
:rtype: int
"""
return self._started_on
@started_on.setter
def started_on(self, started_on):
"""Sets the started_on of this BatchJobExecutionItem.
:param started_on: The started_on of this BatchJobExecutionItem. # noqa: E501
:type: int
"""
self._started_on = started_on
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
if issubclass(BatchJobExecutionItem, dict):
for key, value in self.items():
result[key] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, BatchJobExecutionItem):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
```
#### File: logicmonitor_sdk/models/collector_attribute.py
```python
import pprint
import re # noqa: F401
import six
class CollectorAttribute(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'name': 'str'
}
attribute_map = {
'name': 'name'
}
discriminator_value_class_map = {
'awsdynamodb': 'AwsDynamodbCollectorAttribute',
'ipmi': 'IPMICollectorAttribute',
'awsservicelimitsfromtrustedadvisor': 'AwsServiceLimitsFromTrustedAdvisorCollectorAttribute',
'memcached': 'MemcachedCollectorAttribute',
'udp': 'UDPCollectorAttribute',
'': 'GcpBillingCollectorAttributeV2',
'datapump': 'DataPumpCollectorAttribute',
'': 'GcpComputeServiceLimitsCollectorAttributeV2',
'awss3': 'AwsS3CollectorAttribute',
'aggregate': 'AggragateCollectorAttribute',
'webpage': 'WebPageCollectorAttribute',
'jmx': 'JMXCollectorAttribute',
'azurenetworkservicelimits': 'AzureNetworkServiceLimitsCollectorAttribute',
'wmi': 'WMICollectorAttribute',
'batchscript': 'BatchScriptCollectorAttribute',
'azureresourcehealth': 'AzureResourceHealthCollectorAttribute',
'jdbc': 'JDBCCollectorAttribute',
'perfmon': 'PerfmonCollectorAttribute',
'awsec2servicelimits': 'AwsEc2ServiceLimitsCollectorAttribute',
'netapp': 'NetAppCollectorAttribute',
'tcp': 'TCPCollectorAttribute',
'esx': 'ESXCollectorAttribute',
'snmp': 'SNMPCollectorAttribute',
'awssqs': 'AwsSqsCollectorAttribute',
'mongo': 'MongoCollectorAttribute',
'awsec2reservedinstancecoverage': 'AwsEC2ReservedInstanceCoverageCollectorAttribute',
'azurevmservicelimits': 'AzureVMServiceLimitsCollectorAttribute',
'': 'GcpStackDriverCollectorAttributeV2',
'awsautoscalingservicelimits': 'AwsAutoScalingServiceLimitsCollectorAttribute',
'ping': 'PingCollectorAttribute',
'azurestorageservicelimits': 'AzureStorageServiceLimitsCollectorAttribute',
'awsclassicelbservicelimits': 'AwsClassicElbServiceLimitsCollectorAttribute',
'awssesservicelimits': 'AwsSesServiceLimitsCollectorAttribute',
'awsbillingreport': 'AwsBillingReportCollectorAttribute',
'awsec2reservedinstance': 'AwsEC2ReservedInstanceCollectorAttribute',
'awscloudwatch': 'AwsCloudWatchCollectorAttribute',
'azureinsights': 'AzureInsightsCollectorAttribute',
'awsecsservicedetails': 'AwsEcsServiceDetailsCollectorAttribute',
'sdkscript': 'SDKScriptCollectorAttribute',
'internal': 'InternalCollectorAttribute',
'azurebilling': 'AzureBillingCollectorAttribute',
'dns': 'DNSCollectorAttribute',
'awsbilling': 'AwsBillingCollectorAttribute',
'awsec2scheduledevents': 'AwsEC2ScheduledEventsCollectorAttribute',
'cim': 'CIMCollectorAttribute',
'script': 'ScriptCollectorAttribute',
'xen': 'XENCollectorAttribute'
}
def __init__(self, name=None): # noqa: E501
"""CollectorAttribute - a model defined in Swagger""" # noqa: E501
self._name = None
self.discriminator = 'name'
self.name = name
@property
def name(self):
"""Gets the name of this CollectorAttribute. # noqa: E501
:return: The name of this CollectorAttribute. # noqa: E501
:rtype: str
"""
return self._name
@name.setter
def name(self, name):
"""Sets the name of this CollectorAttribute.
:param name: The name of this CollectorAttribute. # noqa: E501
:type: str
"""
if name is None:
raise ValueError("Invalid value for `name`, must not be `None`") # noqa: E501
self._name = name
def get_real_child_model(self, data):
"""Returns the real base class specified by the discriminator"""
discriminator_value = data[self.discriminator].lower()
return self.discriminator_value_class_map.get(discriminator_value)
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
if issubclass(CollectorAttribute, dict):
for key, value in self.items():
result[key] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, CollectorAttribute):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
```
#### File: logicmonitor_sdk/models/data_point.py
```python
import pprint
import re # noqa: F401
import six
class DataPoint(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'alert_for_no_data': 'int',
'max_value': 'str',
'data_type': 'int',
'post_processor_method': 'str',
'post_processor_param': 'str',
'max_digits': 'int',
'raw_data_field_name': 'str',
'description': 'str',
'alert_clear_transition_interval': 'int',
'user_param3': 'str',
'user_param2': 'str',
'type': 'int',
'alert_expr_note': 'str',
'data_source_id': 'int',
'min_value': 'str',
'alert_body': 'str',
'user_param1': 'str',
'name': 'str',
'alert_subject': 'str',
'id': 'int',
'alert_transition_interval': 'int',
'alert_expr': 'str'
}
attribute_map = {
'alert_for_no_data': 'alertForNoData',
'max_value': 'maxValue',
'data_type': 'dataType',
'post_processor_method': 'postProcessorMethod',
'post_processor_param': 'postProcessorParam',
'max_digits': 'maxDigits',
'raw_data_field_name': 'rawDataFieldName',
'description': 'description',
'alert_clear_transition_interval': 'alertClearTransitionInterval',
'user_param3': 'userParam3',
'user_param2': 'userParam2',
'type': 'type',
'alert_expr_note': 'alertExprNote',
'data_source_id': 'dataSourceId',
'min_value': 'minValue',
'alert_body': 'alertBody',
'user_param1': 'userParam1',
'name': 'name',
'alert_subject': 'alertSubject',
'id': 'id',
'alert_transition_interval': 'alertTransitionInterval',
'alert_expr': 'alertExpr'
}
def __init__(self, alert_for_no_data=None, max_value=None, data_type=None, post_processor_method=None, post_processor_param=None, max_digits=None, raw_data_field_name=None, description=None, alert_clear_transition_interval=None, user_param3=None, user_param2=None, type=None, alert_expr_note=None, data_source_id=None, min_value=None, alert_body=None, user_param1=None, name=None, alert_subject=None, id=None, alert_transition_interval=None, alert_expr=None): # noqa: E501
"""DataPoint - a model defined in Swagger""" # noqa: E501
self._alert_for_no_data = None
self._max_value = None
self._data_type = None
self._post_processor_method = None
self._post_processor_param = None
self._max_digits = None
self._raw_data_field_name = None
self._description = None
self._alert_clear_transition_interval = None
self._user_param3 = None
self._user_param2 = None
self._type = None
self._alert_expr_note = None
self._data_source_id = None
self._min_value = None
self._alert_body = None
self._user_param1 = None
self._name = None
self._alert_subject = None
self._id = None
self._alert_transition_interval = None
self._alert_expr = None
self.discriminator = None
if alert_for_no_data is not None:
self.alert_for_no_data = alert_for_no_data
if max_value is not None:
self.max_value = max_value
if data_type is not None:
self.data_type = data_type
if post_processor_method is not None:
self.post_processor_method = post_processor_method
if post_processor_param is not None:
self.post_processor_param = post_processor_param
if max_digits is not None:
self.max_digits = max_digits
if raw_data_field_name is not None:
self.raw_data_field_name = raw_data_field_name
if description is not None:
self.description = description
if alert_clear_transition_interval is not None:
self.alert_clear_transition_interval = alert_clear_transition_interval
if user_param3 is not None:
self.user_param3 = user_param3
if user_param2 is not None:
self.user_param2 = user_param2
if type is not None:
self.type = type
if alert_expr_note is not None:
self.alert_expr_note = alert_expr_note
if data_source_id is not None:
self.data_source_id = data_source_id
if min_value is not None:
self.min_value = min_value
if alert_body is not None:
self.alert_body = alert_body
if user_param1 is not None:
self.user_param1 = user_param1
self.name = name
if alert_subject is not None:
self.alert_subject = alert_subject
if id is not None:
self.id = id
if alert_transition_interval is not None:
self.alert_transition_interval = alert_transition_interval
if alert_expr is not None:
self.alert_expr = alert_expr
@property
def alert_for_no_data(self):
"""Gets the alert_for_no_data of this DataPoint. # noqa: E501
:return: The alert_for_no_data of this DataPoint. # noqa: E501
:rtype: int
"""
return self._alert_for_no_data
@alert_for_no_data.setter
def alert_for_no_data(self, alert_for_no_data):
"""Sets the alert_for_no_data of this DataPoint.
:param alert_for_no_data: The alert_for_no_data of this DataPoint. # noqa: E501
:type: int
"""
self._alert_for_no_data = alert_for_no_data
@property
def max_value(self):
"""Gets the max_value of this DataPoint. # noqa: E501
:return: The max_value of this DataPoint. # noqa: E501
:rtype: str
"""
return self._max_value
@max_value.setter
def max_value(self, max_value):
"""Sets the max_value of this DataPoint.
:param max_value: The max_value of this DataPoint. # noqa: E501
:type: str
"""
self._max_value = max_value
@property
def data_type(self):
"""Gets the data_type of this DataPoint. # noqa: E501
:return: The data_type of this DataPoint. # noqa: E501
:rtype: int
"""
return self._data_type
@data_type.setter
def data_type(self, data_type):
"""Sets the data_type of this DataPoint.
:param data_type: The data_type of this DataPoint. # noqa: E501
:type: int
"""
self._data_type = data_type
@property
def post_processor_method(self):
"""Gets the post_processor_method of this DataPoint. # noqa: E501
:return: The post_processor_method of this DataPoint. # noqa: E501
:rtype: str
"""
return self._post_processor_method
@post_processor_method.setter
def post_processor_method(self, post_processor_method):
"""Sets the post_processor_method of this DataPoint.
:param post_processor_method: The post_processor_method of this DataPoint. # noqa: E501
:type: str
"""
self._post_processor_method = post_processor_method
@property
def post_processor_param(self):
"""Gets the post_processor_param of this DataPoint. # noqa: E501
:return: The post_processor_param of this DataPoint. # noqa: E501
:rtype: str
"""
return self._post_processor_param
@post_processor_param.setter
def post_processor_param(self, post_processor_param):
"""Sets the post_processor_param of this DataPoint.
:param post_processor_param: The post_processor_param of this DataPoint. # noqa: E501
:type: str
"""
self._post_processor_param = post_processor_param
@property
def max_digits(self):
"""Gets the max_digits of this DataPoint. # noqa: E501
:return: The max_digits of this DataPoint. # noqa: E501
:rtype: int
"""
return self._max_digits
@max_digits.setter
def max_digits(self, max_digits):
"""Sets the max_digits of this DataPoint.
:param max_digits: The max_digits of this DataPoint. # noqa: E501
:type: int
"""
self._max_digits = max_digits
@property
def raw_data_field_name(self):
"""Gets the raw_data_field_name of this DataPoint. # noqa: E501
:return: The raw_data_field_name of this DataPoint. # noqa: E501
:rtype: str
"""
return self._raw_data_field_name
@raw_data_field_name.setter
def raw_data_field_name(self, raw_data_field_name):
"""Sets the raw_data_field_name of this DataPoint.
:param raw_data_field_name: The raw_data_field_name of this DataPoint. # noqa: E501
:type: str
"""
self._raw_data_field_name = raw_data_field_name
@property
def description(self):
"""Gets the description of this DataPoint. # noqa: E501
:return: The description of this DataPoint. # noqa: E501
:rtype: str
"""
return self._description
@description.setter
def description(self, description):
"""Sets the description of this DataPoint.
:param description: The description of this DataPoint. # noqa: E501
:type: str
"""
self._description = description
@property
def alert_clear_transition_interval(self):
"""Gets the alert_clear_transition_interval of this DataPoint. # noqa: E501
:return: The alert_clear_transition_interval of this DataPoint. # noqa: E501
:rtype: int
"""
return self._alert_clear_transition_interval
@alert_clear_transition_interval.setter
def alert_clear_transition_interval(self, alert_clear_transition_interval):
"""Sets the alert_clear_transition_interval of this DataPoint.
:param alert_clear_transition_interval: The alert_clear_transition_interval of this DataPoint. # noqa: E501
:type: int
"""
self._alert_clear_transition_interval = alert_clear_transition_interval
@property
def user_param3(self):
"""Gets the user_param3 of this DataPoint. # noqa: E501
:return: The user_param3 of this DataPoint. # noqa: E501
:rtype: str
"""
return self._user_param3
@user_param3.setter
def user_param3(self, user_param3):
"""Sets the user_param3 of this DataPoint.
:param user_param3: The user_param3 of this DataPoint. # noqa: E501
:type: str
"""
self._user_param3 = user_param3
@property
def user_param2(self):
"""Gets the user_param2 of this DataPoint. # noqa: E501
:return: The user_param2 of this DataPoint. # noqa: E501
:rtype: str
"""
return self._user_param2
@user_param2.setter
def user_param2(self, user_param2):
"""Sets the user_param2 of this DataPoint.
:param user_param2: The user_param2 of this DataPoint. # noqa: E501
:type: str
"""
self._user_param2 = user_param2
@property
def type(self):
"""Gets the type of this DataPoint. # noqa: E501
:return: The type of this DataPoint. # noqa: E501
:rtype: int
"""
return self._type
@type.setter
def type(self, type):
"""Sets the type of this DataPoint.
:param type: The type of this DataPoint. # noqa: E501
:type: int
"""
self._type = type
@property
def alert_expr_note(self):
"""Gets the alert_expr_note of this DataPoint. # noqa: E501
:return: The alert_expr_note of this DataPoint. # noqa: E501
:rtype: str
"""
return self._alert_expr_note
@alert_expr_note.setter
def alert_expr_note(self, alert_expr_note):
"""Sets the alert_expr_note of this DataPoint.
:param alert_expr_note: The alert_expr_note of this DataPoint. # noqa: E501
:type: str
"""
self._alert_expr_note = alert_expr_note
@property
def data_source_id(self):
"""Gets the data_source_id of this DataPoint. # noqa: E501
:return: The data_source_id of this DataPoint. # noqa: E501
:rtype: int
"""
return self._data_source_id
@data_source_id.setter
def data_source_id(self, data_source_id):
"""Sets the data_source_id of this DataPoint.
:param data_source_id: The data_source_id of this DataPoint. # noqa: E501
:type: int
"""
self._data_source_id = data_source_id
@property
def min_value(self):
"""Gets the min_value of this DataPoint. # noqa: E501
:return: The min_value of this DataPoint. # noqa: E501
:rtype: str
"""
return self._min_value
@min_value.setter
def min_value(self, min_value):
"""Sets the min_value of this DataPoint.
:param min_value: The min_value of this DataPoint. # noqa: E501
:type: str
"""
self._min_value = min_value
@property
def alert_body(self):
"""Gets the alert_body of this DataPoint. # noqa: E501
:return: The alert_body of this DataPoint. # noqa: E501
:rtype: str
"""
return self._alert_body
@alert_body.setter
def alert_body(self, alert_body):
"""Sets the alert_body of this DataPoint.
:param alert_body: The alert_body of this DataPoint. # noqa: E501
:type: str
"""
self._alert_body = alert_body
@property
def user_param1(self):
"""Gets the user_param1 of this DataPoint. # noqa: E501
:return: The user_param1 of this DataPoint. # noqa: E501
:rtype: str
"""
return self._user_param1
@user_param1.setter
def user_param1(self, user_param1):
"""Sets the user_param1 of this DataPoint.
:param user_param1: The user_param1 of this DataPoint. # noqa: E501
:type: str
"""
self._user_param1 = user_param1
@property
def name(self):
"""Gets the name of this DataPoint. # noqa: E501
:return: The name of this DataPoint. # noqa: E501
:rtype: str
"""
return self._name
@name.setter
def name(self, name):
"""Sets the name of this DataPoint.
:param name: The name of this DataPoint. # noqa: E501
:type: str
"""
if name is None:
raise ValueError("Invalid value for `name`, must not be `None`") # noqa: E501
self._name = name
@property
def alert_subject(self):
"""Gets the alert_subject of this DataPoint. # noqa: E501
:return: The alert_subject of this DataPoint. # noqa: E501
:rtype: str
"""
return self._alert_subject
@alert_subject.setter
def alert_subject(self, alert_subject):
"""Sets the alert_subject of this DataPoint.
:param alert_subject: The alert_subject of this DataPoint. # noqa: E501
:type: str
"""
self._alert_subject = alert_subject
@property
def id(self):
"""Gets the id of this DataPoint. # noqa: E501
:return: The id of this DataPoint. # noqa: E501
:rtype: int
"""
return self._id
@id.setter
def id(self, id):
"""Sets the id of this DataPoint.
:param id: The id of this DataPoint. # noqa: E501
:type: int
"""
self._id = id
@property
def alert_transition_interval(self):
"""Gets the alert_transition_interval of this DataPoint. # noqa: E501
:return: The alert_transition_interval of this DataPoint. # noqa: E501
:rtype: int
"""
return self._alert_transition_interval
@alert_transition_interval.setter
def alert_transition_interval(self, alert_transition_interval):
"""Sets the alert_transition_interval of this DataPoint.
:param alert_transition_interval: The alert_transition_interval of this DataPoint. # noqa: E501
:type: int
"""
self._alert_transition_interval = alert_transition_interval
@property
def alert_expr(self):
"""Gets the alert_expr of this DataPoint. # noqa: E501
:return: The alert_expr of this DataPoint. # noqa: E501
:rtype: str
"""
return self._alert_expr
@alert_expr.setter
def alert_expr(self, alert_expr):
"""Sets the alert_expr of this DataPoint.
:param alert_expr: The alert_expr of this DataPoint. # noqa: E501
:type: str
"""
self._alert_expr = alert_expr
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
if issubclass(DataPoint, dict):
for key, value in self.items():
result[key] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, DataPoint):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
```
#### File: logicmonitor_sdk/models/device_data_source_data.py
```python
import pprint
import re # noqa: F401
import six
from logicmonitor_sdk.models.raw_data_values import RawDataValues # noqa: F401,E501
class DeviceDataSourceData(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'instances': 'dict(str, RawDataValues)',
'data_points': 'list[str]',
'next_page_params': 'str',
'data_source_name': 'str'
}
attribute_map = {
'instances': 'instances',
'data_points': 'dataPoints',
'next_page_params': 'nextPageParams',
'data_source_name': 'dataSourceName'
}
def __init__(self, instances=None, data_points=None, next_page_params=None, data_source_name=None): # noqa: E501
"""DeviceDataSourceData - a model defined in Swagger""" # noqa: E501
self._instances = None
self._data_points = None
self._next_page_params = None
self._data_source_name = None
self.discriminator = None
if instances is not None:
self.instances = instances
if data_points is not None:
self.data_points = data_points
if next_page_params is not None:
self.next_page_params = next_page_params
if data_source_name is not None:
self.data_source_name = data_source_name
@property
def instances(self):
"""Gets the instances of this DeviceDataSourceData. # noqa: E501
:return: The instances of this DeviceDataSourceData. # noqa: E501
:rtype: dict(str, RawDataValues)
"""
return self._instances
@instances.setter
def instances(self, instances):
"""Sets the instances of this DeviceDataSourceData.
:param instances: The instances of this DeviceDataSourceData. # noqa: E501
:type: dict(str, RawDataValues)
"""
self._instances = instances
@property
def data_points(self):
"""Gets the data_points of this DeviceDataSourceData. # noqa: E501
:return: The data_points of this DeviceDataSourceData. # noqa: E501
:rtype: list[str]
"""
return self._data_points
@data_points.setter
def data_points(self, data_points):
"""Sets the data_points of this DeviceDataSourceData.
:param data_points: The data_points of this DeviceDataSourceData. # noqa: E501
:type: list[str]
"""
self._data_points = data_points
@property
def next_page_params(self):
"""Gets the next_page_params of this DeviceDataSourceData. # noqa: E501
:return: The next_page_params of this DeviceDataSourceData. # noqa: E501
:rtype: str
"""
return self._next_page_params
@next_page_params.setter
def next_page_params(self, next_page_params):
"""Sets the next_page_params of this DeviceDataSourceData.
:param next_page_params: The next_page_params of this DeviceDataSourceData. # noqa: E501
:type: str
"""
self._next_page_params = next_page_params
@property
def data_source_name(self):
"""Gets the data_source_name of this DeviceDataSourceData. # noqa: E501
:return: The data_source_name of this DeviceDataSourceData. # noqa: E501
:rtype: str
"""
return self._data_source_name
@data_source_name.setter
def data_source_name(self, data_source_name):
"""Sets the data_source_name of this DeviceDataSourceData.
:param data_source_name: The data_source_name of this DeviceDataSourceData. # noqa: E501
:type: str
"""
self._data_source_name = data_source_name
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
if issubclass(DeviceDataSourceData, dict):
for key, value in self.items():
result[key] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, DeviceDataSourceData):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
```
#### File: logicmonitor_sdk/models/device_data_source_instance_config_alert.py
```python
import pprint
import re # noqa: F401
import six
class DeviceDataSourceInstanceConfigAlert(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'alert_level': 'int',
'id': 'str',
'alert_id': 'str',
'alert_summary': 'str',
'timestamp': 'int'
}
attribute_map = {
'alert_level': 'alertLevel',
'id': 'id',
'alert_id': 'alertId',
'alert_summary': 'alertSummary',
'timestamp': 'timestamp'
}
def __init__(self, alert_level=None, id=None, alert_id=None, alert_summary=None, timestamp=None): # noqa: E501
"""DeviceDataSourceInstanceConfigAlert - a model defined in Swagger""" # noqa: E501
self._alert_level = None
self._id = None
self._alert_id = None
self._alert_summary = None
self._timestamp = None
self.discriminator = None
if alert_level is not None:
self.alert_level = alert_level
if id is not None:
self.id = id
if alert_id is not None:
self.alert_id = alert_id
if alert_summary is not None:
self.alert_summary = alert_summary
if timestamp is not None:
self.timestamp = timestamp
@property
def alert_level(self):
"""Gets the alert_level of this DeviceDataSourceInstanceConfigAlert. # noqa: E501
:return: The alert_level of this DeviceDataSourceInstanceConfigAlert. # noqa: E501
:rtype: int
"""
return self._alert_level
@alert_level.setter
def alert_level(self, alert_level):
"""Sets the alert_level of this DeviceDataSourceInstanceConfigAlert.
:param alert_level: The alert_level of this DeviceDataSourceInstanceConfigAlert. # noqa: E501
:type: int
"""
self._alert_level = alert_level
@property
def id(self):
"""Gets the id of this DeviceDataSourceInstanceConfigAlert. # noqa: E501
:return: The id of this DeviceDataSourceInstanceConfigAlert. # noqa: E501
:rtype: str
"""
return self._id
@id.setter
def id(self, id):
"""Sets the id of this DeviceDataSourceInstanceConfigAlert.
:param id: The id of this DeviceDataSourceInstanceConfigAlert. # noqa: E501
:type: str
"""
self._id = id
@property
def alert_id(self):
"""Gets the alert_id of this DeviceDataSourceInstanceConfigAlert. # noqa: E501
:return: The alert_id of this DeviceDataSourceInstanceConfigAlert. # noqa: E501
:rtype: str
"""
return self._alert_id
@alert_id.setter
def alert_id(self, alert_id):
"""Sets the alert_id of this DeviceDataSourceInstanceConfigAlert.
:param alert_id: The alert_id of this DeviceDataSourceInstanceConfigAlert. # noqa: E501
:type: str
"""
self._alert_id = alert_id
@property
def alert_summary(self):
"""Gets the alert_summary of this DeviceDataSourceInstanceConfigAlert. # noqa: E501
:return: The alert_summary of this DeviceDataSourceInstanceConfigAlert. # noqa: E501
:rtype: str
"""
return self._alert_summary
@alert_summary.setter
def alert_summary(self, alert_summary):
"""Sets the alert_summary of this DeviceDataSourceInstanceConfigAlert.
:param alert_summary: The alert_summary of this DeviceDataSourceInstanceConfigAlert. # noqa: E501
:type: str
"""
self._alert_summary = alert_summary
@property
def timestamp(self):
"""Gets the timestamp of this DeviceDataSourceInstanceConfigAlert. # noqa: E501
:return: The timestamp of this DeviceDataSourceInstanceConfigAlert. # noqa: E501
:rtype: int
"""
return self._timestamp
@timestamp.setter
def timestamp(self, timestamp):
"""Sets the timestamp of this DeviceDataSourceInstanceConfigAlert.
:param timestamp: The timestamp of this DeviceDataSourceInstanceConfigAlert. # noqa: E501
:type: int
"""
self._timestamp = timestamp
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
if issubclass(DeviceDataSourceInstanceConfigAlert, dict):
for key, value in self.items():
result[key] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, DeviceDataSourceInstanceConfigAlert):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
```
#### File: logicmonitor_sdk/models/device.py
```python
import pprint
import re # noqa: F401
import six
from logicmonitor_sdk.models.name_and_value import NameAndValue # noqa: F401,E501
class Device(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'netflow_collector_group_name': 'str',
'azure_state': 'int',
'related_device_id': 'int',
'display_name': 'str',
'link': 'str',
'aws_state': 'int',
'description': 'str',
'load_balance_collector_group_id': 'int',
'disable_alerting': 'bool',
'netflow_collector_group_id': 'int',
'created_on': 'int',
'system_properties': 'list[NameAndValue]',
'host_status': 'str',
'gcp_state': 'int',
'auto_props_updated_on': 'int',
'scan_config_id': 'int',
'inherited_properties': 'list[NameAndValue]',
'id': 'int',
'enable_netflow': 'bool',
'last_data_time': 'int',
'host_group_ids': 'str',
'up_time_in_seconds': 'int',
'device_type': 'int',
'current_collector_id': 'int',
'netflow_collector_description': 'str',
'netflow_collector_id': 'int',
'user_permission': 'str',
'auto_props_assigned_on': 'int',
'updated_on': 'int',
'preferred_collector_group_name': 'str',
'preferred_collector_group_id': 'int',
'auto_properties': 'list[NameAndValue]',
'custom_properties': 'list[NameAndValue]',
'to_delete_time_in_ms': 'int',
'collector_description': 'str',
'preferred_collector_id': 'int',
'last_rawdata_time': 'int',
'name': 'str',
'deleted_time_in_ms': 'int'
}
attribute_map = {
'netflow_collector_group_name': 'netflowCollectorGroupName',
'azure_state': 'azureState',
'related_device_id': 'relatedDeviceId',
'display_name': 'displayName',
'link': 'link',
'aws_state': 'awsState',
'description': 'description',
'load_balance_collector_group_id': 'loadBalanceCollectorGroupId',
'disable_alerting': 'disableAlerting',
'netflow_collector_group_id': 'netflowCollectorGroupId',
'created_on': 'createdOn',
'system_properties': 'systemProperties',
'host_status': 'hostStatus',
'gcp_state': 'gcpState',
'auto_props_updated_on': 'autoPropsUpdatedOn',
'scan_config_id': 'scanConfigId',
'inherited_properties': 'inheritedProperties',
'id': 'id',
'enable_netflow': 'enableNetflow',
'last_data_time': 'lastDataTime',
'host_group_ids': 'hostGroupIds',
'up_time_in_seconds': 'upTimeInSeconds',
'device_type': 'deviceType',
'current_collector_id': 'currentCollectorId',
'netflow_collector_description': 'netflowCollectorDescription',
'netflow_collector_id': 'netflowCollectorId',
'user_permission': 'userPermission',
'auto_props_assigned_on': 'autoPropsAssignedOn',
'updated_on': 'updatedOn',
'preferred_collector_group_name': 'preferredCollectorGroupName',
'preferred_collector_group_id': 'preferredCollectorGroupId',
'auto_properties': 'autoProperties',
'custom_properties': 'customProperties',
'to_delete_time_in_ms': 'toDeleteTimeInMs',
'collector_description': 'collectorDescription',
'preferred_collector_id': 'preferredCollectorId',
'last_rawdata_time': 'lastRawdataTime',
'name': 'name',
'deleted_time_in_ms': 'deletedTimeInMs'
}
def __init__(self, netflow_collector_group_name=None, azure_state=None, related_device_id=None, display_name=None, link=None, aws_state=None, description=None, load_balance_collector_group_id=None, disable_alerting=None, netflow_collector_group_id=None, created_on=None, system_properties=None, host_status=None, gcp_state=None, auto_props_updated_on=None, scan_config_id=None, inherited_properties=None, id=None, enable_netflow=None, last_data_time=None, host_group_ids=None, up_time_in_seconds=None, device_type=None, current_collector_id=None, netflow_collector_description=None, netflow_collector_id=None, user_permission=None, auto_props_assigned_on=None, updated_on=None, preferred_collector_group_name=None, preferred_collector_group_id=None, auto_properties=None, custom_properties=None, to_delete_time_in_ms=None, collector_description=None, preferred_collector_id=None, last_rawdata_time=None, name=None, deleted_time_in_ms=None): # noqa: E501
"""Device - a model defined in Swagger""" # noqa: E501
self._netflow_collector_group_name = None
self._azure_state = None
self._related_device_id = None
self._display_name = None
self._link = None
self._aws_state = None
self._description = None
self._load_balance_collector_group_id = None
self._disable_alerting = None
self._netflow_collector_group_id = None
self._created_on = None
self._system_properties = None
self._host_status = None
self._gcp_state = None
self._auto_props_updated_on = None
self._scan_config_id = None
self._inherited_properties = None
self._id = None
self._enable_netflow = None
self._last_data_time = None
self._host_group_ids = None
self._up_time_in_seconds = None
self._device_type = None
self._current_collector_id = None
self._netflow_collector_description = None
self._netflow_collector_id = None
self._user_permission = None
self._auto_props_assigned_on = None
self._updated_on = None
self._preferred_collector_group_name = None
self._preferred_collector_group_id = None
self._auto_properties = None
self._custom_properties = None
self._to_delete_time_in_ms = None
self._collector_description = None
self._preferred_collector_id = None
self._last_rawdata_time = None
self._name = None
self._deleted_time_in_ms = None
self.discriminator = None
if netflow_collector_group_name is not None:
self.netflow_collector_group_name = netflow_collector_group_name
if azure_state is not None:
self.azure_state = azure_state
if related_device_id is not None:
self.related_device_id = related_device_id
self.display_name = display_name
if link is not None:
self.link = link
if aws_state is not None:
self.aws_state = aws_state
if description is not None:
self.description = description
if load_balance_collector_group_id is not None:
self.load_balance_collector_group_id = load_balance_collector_group_id
if disable_alerting is not None:
self.disable_alerting = disable_alerting
if netflow_collector_group_id is not None:
self.netflow_collector_group_id = netflow_collector_group_id
if created_on is not None:
self.created_on = created_on
if system_properties is not None:
self.system_properties = system_properties
if host_status is not None:
self.host_status = host_status
if gcp_state is not None:
self.gcp_state = gcp_state
if auto_props_updated_on is not None:
self.auto_props_updated_on = auto_props_updated_on
if scan_config_id is not None:
self.scan_config_id = scan_config_id
if inherited_properties is not None:
self.inherited_properties = inherited_properties
if id is not None:
self.id = id
if enable_netflow is not None:
self.enable_netflow = enable_netflow
if last_data_time is not None:
self.last_data_time = last_data_time
self.host_group_ids = host_group_ids
if up_time_in_seconds is not None:
self.up_time_in_seconds = up_time_in_seconds
if device_type is not None:
self.device_type = device_type
if current_collector_id is not None:
self.current_collector_id = current_collector_id
if netflow_collector_description is not None:
self.netflow_collector_description = netflow_collector_description
if netflow_collector_id is not None:
self.netflow_collector_id = netflow_collector_id
if user_permission is not None:
self.user_permission = user_permission
if auto_props_assigned_on is not None:
self.auto_props_assigned_on = auto_props_assigned_on
if updated_on is not None:
self.updated_on = updated_on
if preferred_collector_group_name is not None:
self.preferred_collector_group_name = preferred_collector_group_name
if preferred_collector_group_id is not None:
self.preferred_collector_group_id = preferred_collector_group_id
if auto_properties is not None:
self.auto_properties = auto_properties
if custom_properties is not None:
self.custom_properties = custom_properties
if to_delete_time_in_ms is not None:
self.to_delete_time_in_ms = to_delete_time_in_ms
if collector_description is not None:
self.collector_description = collector_description
self.preferred_collector_id = preferred_collector_id
if last_rawdata_time is not None:
self.last_rawdata_time = last_rawdata_time
self.name = name
if deleted_time_in_ms is not None:
self.deleted_time_in_ms = deleted_time_in_ms
@property
def netflow_collector_group_name(self):
"""Gets the netflow_collector_group_name of this Device. # noqa: E501
The name of the Collector Group associated with the device's netflow collector # noqa: E501
:return: The netflow_collector_group_name of this Device. # noqa: E501
:rtype: str
"""
return self._netflow_collector_group_name
@netflow_collector_group_name.setter
def netflow_collector_group_name(self, netflow_collector_group_name):
"""Sets the netflow_collector_group_name of this Device.
The name of the Collector Group associated with the device's netflow collector # noqa: E501
:param netflow_collector_group_name: The netflow_collector_group_name of this Device. # noqa: E501
:type: str
"""
self._netflow_collector_group_name = netflow_collector_group_name
@property
def azure_state(self):
"""Gets the azure_state of this Device. # noqa: E501
The GCP instance state (if applicable): 1 indicates that the instance is running, 2 indicates that the instance is stopped and 3 the instance is terminated. # noqa: E501
:return: The azure_state of this Device. # noqa: E501
:rtype: int
"""
return self._azure_state
@azure_state.setter
def azure_state(self, azure_state):
"""Sets the azure_state of this Device.
The GCP instance state (if applicable): 1 indicates that the instance is running, 2 indicates that the instance is stopped and 3 the instance is terminated. # noqa: E501
:param azure_state: The azure_state of this Device. # noqa: E501
:type: int
"""
self._azure_state = azure_state
@property
def related_device_id(self):
"""Gets the related_device_id of this Device. # noqa: E501
The Id of the AWS EC2 instance related to this device, if one exists in the LogicMonitor account. This value defaults to -1, which indicates that there are no related devices # noqa: E501
:return: The related_device_id of this Device. # noqa: E501
:rtype: int
"""
return self._related_device_id
@related_device_id.setter
def related_device_id(self, related_device_id):
"""Sets the related_device_id of this Device.
The Id of the AWS EC2 instance related to this device, if one exists in the LogicMonitor account. This value defaults to -1, which indicates that there are no related devices # noqa: E501
:param related_device_id: The related_device_id of this Device. # noqa: E501
:type: int
"""
self._related_device_id = related_device_id
@property
def display_name(self):
"""Gets the display_name of this Device. # noqa: E501
The display name of the device # noqa: E501
:return: The display_name of this Device. # noqa: E501
:rtype: str
"""
return self._display_name
@display_name.setter
def display_name(self, display_name):
"""Sets the display_name of this Device.
The display name of the device # noqa: E501
:param display_name: The display_name of this Device. # noqa: E501
:type: str
"""
if display_name is None:
raise ValueError("Invalid value for `display_name`, must not be `None`") # noqa: E501
self._display_name = display_name
@property
def link(self):
"""Gets the link of this Device. # noqa: E501
The URL link associated with the device # noqa: E501
:return: The link of this Device. # noqa: E501
:rtype: str
"""
return self._link
@link.setter
def link(self, link):
"""Sets the link of this Device.
The URL link associated with the device # noqa: E501
:param link: The link of this Device. # noqa: E501
:type: str
"""
self._link = link
@property
def aws_state(self):
"""Gets the aws_state of this Device. # noqa: E501
The AWS instance state (if applicable): 1 indicates that the instance is running, 2 indicates that the instance is stopped and 3 the instance is terminated # noqa: E501
:return: The aws_state of this Device. # noqa: E501
:rtype: int
"""
return self._aws_state
@aws_state.setter
def aws_state(self, aws_state):
"""Sets the aws_state of this Device.
The AWS instance state (if applicable): 1 indicates that the instance is running, 2 indicates that the instance is stopped and 3 the instance is terminated # noqa: E501
:param aws_state: The aws_state of this Device. # noqa: E501
:type: int
"""
self._aws_state = aws_state
@property
def description(self):
"""Gets the description of this Device. # noqa: E501
The device description # noqa: E501
:return: The description of this Device. # noqa: E501
:rtype: str
"""
return self._description
@description.setter
def description(self, description):
"""Sets the description of this Device.
The device description # noqa: E501
:param description: The description of this Device. # noqa: E501
:type: str
"""
self._description = description
@property
def load_balance_collector_group_id(self):
"""Gets the load_balance_collector_group_id of this Device. # noqa: E501
:return: The load_balance_collector_group_id of this Device. # noqa: E501
:rtype: int
"""
return self._load_balance_collector_group_id
@load_balance_collector_group_id.setter
def load_balance_collector_group_id(self, load_balance_collector_group_id):
"""Sets the load_balance_collector_group_id of this Device.
:param load_balance_collector_group_id: The load_balance_collector_group_id of this Device. # noqa: E501
:type: int
"""
self._load_balance_collector_group_id = load_balance_collector_group_id
@property
def disable_alerting(self):
"""Gets the disable_alerting of this Device. # noqa: E501
Indicates whether alerting is disabled (true) or enabled (false) for this device # noqa: E501
:return: The disable_alerting of this Device. # noqa: E501
:rtype: bool
"""
return self._disable_alerting
@disable_alerting.setter
def disable_alerting(self, disable_alerting):
"""Sets the disable_alerting of this Device.
Indicates whether alerting is disabled (true) or enabled (false) for this device # noqa: E501
:param disable_alerting: The disable_alerting of this Device. # noqa: E501
:type: bool
"""
self._disable_alerting = disable_alerting
@property
def netflow_collector_group_id(self):
"""Gets the netflow_collector_group_id of this Device. # noqa: E501
The id of the Collector Group associated with the device's netflow collector # noqa: E501
:return: The netflow_collector_group_id of this Device. # noqa: E501
:rtype: int
"""
return self._netflow_collector_group_id
@netflow_collector_group_id.setter
def netflow_collector_group_id(self, netflow_collector_group_id):
"""Sets the netflow_collector_group_id of this Device.
The id of the Collector Group associated with the device's netflow collector # noqa: E501
:param netflow_collector_group_id: The netflow_collector_group_id of this Device. # noqa: E501
:type: int
"""
self._netflow_collector_group_id = netflow_collector_group_id
@property
def created_on(self):
"""Gets the created_on of this Device. # noqa: E501
The time, in epoch seconds format, that the device was added to your LogicMonitor account # noqa: E501
:return: The created_on of this Device. # noqa: E501
:rtype: int
"""
return self._created_on
@created_on.setter
def created_on(self, created_on):
"""Sets the created_on of this Device.
The time, in epoch seconds format, that the device was added to your LogicMonitor account # noqa: E501
:param created_on: The created_on of this Device. # noqa: E501
:type: int
"""
self._created_on = created_on
@property
def system_properties(self):
"""Gets the system_properties of this Device. # noqa: E501
Any system properties (aside from system.categories) defined for this device # noqa: E501
:return: The system_properties of this Device. # noqa: E501
:rtype: list[NameAndValue]
"""
return self._system_properties
@system_properties.setter
def system_properties(self, system_properties):
"""Sets the system_properties of this Device.
Any system properties (aside from system.categories) defined for this device # noqa: E501
:param system_properties: The system_properties of this Device. # noqa: E501
:type: list[NameAndValue]
"""
self._system_properties = system_properties
@property
def host_status(self):
"""Gets the host_status of this Device. # noqa: E501
The status of this device, where possible statuses are normal, dead and dead-collector # noqa: E501
:return: The host_status of this Device. # noqa: E501
:rtype: str
"""
return self._host_status
@host_status.setter
def host_status(self, host_status):
"""Sets the host_status of this Device.
The status of this device, where possible statuses are normal, dead and dead-collector # noqa: E501
:param host_status: The host_status of this Device. # noqa: E501
:type: str
"""
self._host_status = host_status
@property
def gcp_state(self):
"""Gets the gcp_state of this Device. # noqa: E501
The Azure instance state (if applicable): 1 indicates that the instance is running, 2 indicates that the instance is stopped and 3 the instance is terminated. # noqa: E501
:return: The gcp_state of this Device. # noqa: E501
:rtype: int
"""
return self._gcp_state
@gcp_state.setter
def gcp_state(self, gcp_state):
"""Sets the gcp_state of this Device.
The Azure instance state (if applicable): 1 indicates that the instance is running, 2 indicates that the instance is stopped and 3 the instance is terminated. # noqa: E501
:param gcp_state: The gcp_state of this Device. # noqa: E501
:type: int
"""
self._gcp_state = gcp_state
@property
def auto_props_updated_on(self):
"""Gets the auto_props_updated_on of this Device. # noqa: E501
The time, in epoch seconds, that auto properties last ran and updated the properties table for this device # noqa: E501
:return: The auto_props_updated_on of this Device. # noqa: E501
:rtype: int
"""
return self._auto_props_updated_on
@auto_props_updated_on.setter
def auto_props_updated_on(self, auto_props_updated_on):
"""Sets the auto_props_updated_on of this Device.
The time, in epoch seconds, that auto properties last ran and updated the properties table for this device # noqa: E501
:param auto_props_updated_on: The auto_props_updated_on of this Device. # noqa: E501
:type: int
"""
self._auto_props_updated_on = auto_props_updated_on
@property
def scan_config_id(self):
"""Gets the scan_config_id of this Device. # noqa: E501
The Id of the netscan configuration which was used to discover this device. 0 indicates that the device was not discovered by a scan # noqa: E501
:return: The scan_config_id of this Device. # noqa: E501
:rtype: int
"""
return self._scan_config_id
@scan_config_id.setter
def scan_config_id(self, scan_config_id):
"""Sets the scan_config_id of this Device.
The Id of the netscan configuration which was used to discover this device. 0 indicates that the device was not discovered by a scan # noqa: E501
:param scan_config_id: The scan_config_id of this Device. # noqa: E501
:type: int
"""
self._scan_config_id = scan_config_id
@property
def inherited_properties(self):
"""Gets the inherited_properties of this Device. # noqa: E501
Any properties inherit from parents # noqa: E501
:return: The inherited_properties of this Device. # noqa: E501
:rtype: list[NameAndValue]
"""
return self._inherited_properties
@inherited_properties.setter
def inherited_properties(self, inherited_properties):
"""Sets the inherited_properties of this Device.
Any properties inherit from parents # noqa: E501
:param inherited_properties: The inherited_properties of this Device. # noqa: E501
:type: list[NameAndValue]
"""
self._inherited_properties = inherited_properties
@property
def id(self):
"""Gets the id of this Device. # noqa: E501
The Id of the device # noqa: E501
:return: The id of this Device. # noqa: E501
:rtype: int
"""
return self._id
@id.setter
def id(self, id):
"""Sets the id of this Device.
The Id of the device # noqa: E501
:param id: The id of this Device. # noqa: E501
:type: int
"""
self._id = id
@property
def enable_netflow(self):
"""Gets the enable_netflow of this Device. # noqa: E501
Indicates whether Netflow is enabled (true) or disabled (false) for the device # noqa: E501
:return: The enable_netflow of this Device. # noqa: E501
:rtype: bool
"""
return self._enable_netflow
@enable_netflow.setter
def enable_netflow(self, enable_netflow):
"""Sets the enable_netflow of this Device.
Indicates whether Netflow is enabled (true) or disabled (false) for the device # noqa: E501
:param enable_netflow: The enable_netflow of this Device. # noqa: E501
:type: bool
"""
self._enable_netflow = enable_netflow
@property
def last_data_time(self):
"""Gets the last_data_time of this Device. # noqa: E501
The last time, in epoch seconds, that the device received Netflow data # noqa: E501
:return: The last_data_time of this Device. # noqa: E501
:rtype: int
"""
return self._last_data_time
@last_data_time.setter
def last_data_time(self, last_data_time):
"""Sets the last_data_time of this Device.
The last time, in epoch seconds, that the device received Netflow data # noqa: E501
:param last_data_time: The last_data_time of this Device. # noqa: E501
:type: int
"""
self._last_data_time = last_data_time
@property
def host_group_ids(self):
"""Gets the host_group_ids of this Device. # noqa: E501
The Id(s) of the groups the device is in, where multiple group ids are comma separated # noqa: E501
:return: The host_group_ids of this Device. # noqa: E501
:rtype: str
"""
return self._host_group_ids
@host_group_ids.setter
def host_group_ids(self, host_group_ids):
"""Sets the host_group_ids of this Device.
The Id(s) of the groups the device is in, where multiple group ids are comma separated # noqa: E501
:param host_group_ids: The host_group_ids of this Device. # noqa: E501
:type: str
"""
if host_group_ids is None:
raise ValueError("Invalid value for `host_group_ids`, must not be `None`") # noqa: E501
self._host_group_ids = host_group_ids
@property
def up_time_in_seconds(self):
"""Gets the up_time_in_seconds of this Device. # noqa: E501
The uptime of the device in seconds. This value will always be the largest value reported by the following datasources: Host Uptime- SNMPUptime- SNMP_Engine_Uptime- WinSystemUptime- NimbleUptime- # noqa: E501
:return: The up_time_in_seconds of this Device. # noqa: E501
:rtype: int
"""
return self._up_time_in_seconds
@up_time_in_seconds.setter
def up_time_in_seconds(self, up_time_in_seconds):
"""Sets the up_time_in_seconds of this Device.
The uptime of the device in seconds. This value will always be the largest value reported by the following datasources: Host Uptime- SNMPUptime- SNMP_Engine_Uptime- WinSystemUptime- NimbleUptime- # noqa: E501
:param up_time_in_seconds: The up_time_in_seconds of this Device. # noqa: E501
:type: int
"""
self._up_time_in_seconds = up_time_in_seconds
@property
def device_type(self):
"""Gets the device_type of this Device. # noqa: E501
The type of device: 0 indicates a regular device, 2 indicates an AWS device, 4 indicates an Azure device # noqa: E501
:return: The device_type of this Device. # noqa: E501
:rtype: int
"""
return self._device_type
@device_type.setter
def device_type(self, device_type):
"""Sets the device_type of this Device.
The type of device: 0 indicates a regular device, 2 indicates an AWS device, 4 indicates an Azure device # noqa: E501
:param device_type: The device_type of this Device. # noqa: E501
:type: int
"""
self._device_type = device_type
@property
def current_collector_id(self):
"""Gets the current_collector_id of this Device. # noqa: E501
The id of the collector currently monitoring the device and discovering instances # noqa: E501
:return: The current_collector_id of this Device. # noqa: E501
:rtype: int
"""
return self._current_collector_id
@current_collector_id.setter
def current_collector_id(self, current_collector_id):
"""Sets the current_collector_id of this Device.
The id of the collector currently monitoring the device and discovering instances # noqa: E501
:param current_collector_id: The current_collector_id of this Device. # noqa: E501
:type: int
"""
self._current_collector_id = current_collector_id
@property
def netflow_collector_description(self):
"""Gets the netflow_collector_description of this Device. # noqa: E501
The description/name of the netflow collector for this device # noqa: E501
:return: The netflow_collector_description of this Device. # noqa: E501
:rtype: str
"""
return self._netflow_collector_description
@netflow_collector_description.setter
def netflow_collector_description(self, netflow_collector_description):
"""Sets the netflow_collector_description of this Device.
The description/name of the netflow collector for this device # noqa: E501
:param netflow_collector_description: The netflow_collector_description of this Device. # noqa: E501
:type: str
"""
self._netflow_collector_description = netflow_collector_description
@property
def netflow_collector_id(self):
"""Gets the netflow_collector_id of this Device. # noqa: E501
The Id of the netflow collector associated with the device # noqa: E501
:return: The netflow_collector_id of this Device. # noqa: E501
:rtype: int
"""
return self._netflow_collector_id
@netflow_collector_id.setter
def netflow_collector_id(self, netflow_collector_id):
"""Sets the netflow_collector_id of this Device.
The Id of the netflow collector associated with the device # noqa: E501
:param netflow_collector_id: The netflow_collector_id of this Device. # noqa: E501
:type: int
"""
self._netflow_collector_id = netflow_collector_id
@property
def user_permission(self):
"""Gets the user_permission of this Device. # noqa: E501
The read and/or write permissions for this device that are granted to the user who made the API request # noqa: E501
:return: The user_permission of this Device. # noqa: E501
:rtype: str
"""
return self._user_permission
@user_permission.setter
def user_permission(self, user_permission):
"""Sets the user_permission of this Device.
The read and/or write permissions for this device that are granted to the user who made the API request # noqa: E501
:param user_permission: The user_permission of this Device. # noqa: E501
:type: str
"""
self._user_permission = user_permission
@property
def auto_props_assigned_on(self):
"""Gets the auto_props_assigned_on of this Device. # noqa: E501
The time, in epoch seconds format, that properties were first discovered for this device # noqa: E501
:return: The auto_props_assigned_on of this Device. # noqa: E501
:rtype: int
"""
return self._auto_props_assigned_on
@auto_props_assigned_on.setter
def auto_props_assigned_on(self, auto_props_assigned_on):
"""Sets the auto_props_assigned_on of this Device.
The time, in epoch seconds format, that properties were first discovered for this device # noqa: E501
:param auto_props_assigned_on: The auto_props_assigned_on of this Device. # noqa: E501
:type: int
"""
self._auto_props_assigned_on = auto_props_assigned_on
@property
def updated_on(self):
"""Gets the updated_on of this Device. # noqa: E501
The time, in epoch seconds format, that the device was last updated # noqa: E501
:return: The updated_on of this Device. # noqa: E501
:rtype: int
"""
return self._updated_on
@updated_on.setter
def updated_on(self, updated_on):
"""Sets the updated_on of this Device.
The time, in epoch seconds format, that the device was last updated # noqa: E501
:param updated_on: The updated_on of this Device. # noqa: E501
:type: int
"""
self._updated_on = updated_on
@property
def preferred_collector_group_name(self):
"""Gets the preferred_collector_group_name of this Device. # noqa: E501
The name of the Collector Group associated with the device's preferred collector # noqa: E501
:return: The preferred_collector_group_name of this Device. # noqa: E501
:rtype: str
"""
return self._preferred_collector_group_name
@preferred_collector_group_name.setter
def preferred_collector_group_name(self, preferred_collector_group_name):
"""Sets the preferred_collector_group_name of this Device.
The name of the Collector Group associated with the device's preferred collector # noqa: E501
:param preferred_collector_group_name: The preferred_collector_group_name of this Device. # noqa: E501
:type: str
"""
self._preferred_collector_group_name = preferred_collector_group_name
@property
def preferred_collector_group_id(self):
"""Gets the preferred_collector_group_id of this Device. # noqa: E501
The id of the Collector Group associated with the device's preferred collector # noqa: E501
:return: The preferred_collector_group_id of this Device. # noqa: E501
:rtype: int
"""
return self._preferred_collector_group_id
@preferred_collector_group_id.setter
def preferred_collector_group_id(self, preferred_collector_group_id):
"""Sets the preferred_collector_group_id of this Device.
The id of the Collector Group associated with the device's preferred collector # noqa: E501
:param preferred_collector_group_id: The preferred_collector_group_id of this Device. # noqa: E501
:type: int
"""
self._preferred_collector_group_id = preferred_collector_group_id
@property
def auto_properties(self):
"""Gets the auto_properties of this Device. # noqa: E501
Any auto properties assigned to the device # noqa: E501
:return: The auto_properties of this Device. # noqa: E501
:rtype: list[NameAndValue]
"""
return self._auto_properties
@auto_properties.setter
def auto_properties(self, auto_properties):
"""Sets the auto_properties of this Device.
Any auto properties assigned to the device # noqa: E501
:param auto_properties: The auto_properties of this Device. # noqa: E501
:type: list[NameAndValue]
"""
self._auto_properties = auto_properties
@property
def custom_properties(self):
"""Gets the custom_properties of this Device. # noqa: E501
Any non-system properties (aside from system.categories) defined for this device # noqa: E501
:return: The custom_properties of this Device. # noqa: E501
:rtype: list[NameAndValue]
"""
return self._custom_properties
@custom_properties.setter
def custom_properties(self, custom_properties):
"""Sets the custom_properties of this Device.
Any non-system properties (aside from system.categories) defined for this device # noqa: E501
:param custom_properties: The custom_properties of this Device. # noqa: E501
:type: list[NameAndValue]
"""
self._custom_properties = custom_properties
@property
def to_delete_time_in_ms(self):
"""Gets the to_delete_time_in_ms of this Device. # noqa: E501
The number of milliseconds until the device will be automatically deleted from your LogicMonitor account (a value of zero indicates that a future delete time/date has not been scheduled) # noqa: E501
:return: The to_delete_time_in_ms of this Device. # noqa: E501
:rtype: int
"""
return self._to_delete_time_in_ms
@to_delete_time_in_ms.setter
def to_delete_time_in_ms(self, to_delete_time_in_ms):
"""Sets the to_delete_time_in_ms of this Device.
The number of milliseconds until the device will be automatically deleted from your LogicMonitor account (a value of zero indicates that a future delete time/date has not been scheduled) # noqa: E501
:param to_delete_time_in_ms: The to_delete_time_in_ms of this Device. # noqa: E501
:type: int
"""
self._to_delete_time_in_ms = to_delete_time_in_ms
@property
def collector_description(self):
"""Gets the collector_description of this Device. # noqa: E501
The description/name of the collector for this device # noqa: E501
:return: The collector_description of this Device. # noqa: E501
:rtype: str
"""
return self._collector_description
@collector_description.setter
def collector_description(self, collector_description):
"""Sets the collector_description of this Device.
The description/name of the collector for this device # noqa: E501
:param collector_description: The collector_description of this Device. # noqa: E501
:type: str
"""
self._collector_description = collector_description
@property
def preferred_collector_id(self):
"""Gets the preferred_collector_id of this Device. # noqa: E501
The Id of the preferred collector assigned to monitor the device # noqa: E501
:return: The preferred_collector_id of this Device. # noqa: E501
:rtype: int
"""
return self._preferred_collector_id
@preferred_collector_id.setter
def preferred_collector_id(self, preferred_collector_id):
"""Sets the preferred_collector_id of this Device.
The Id of the preferred collector assigned to monitor the device # noqa: E501
:param preferred_collector_id: The preferred_collector_id of this Device. # noqa: E501
:type: int
"""
if preferred_collector_id is None:
raise ValueError("Invalid value for `preferred_collector_id`, must not be `None`") # noqa: E501
self._preferred_collector_id = preferred_collector_id
@property
def last_rawdata_time(self):
"""Gets the last_rawdata_time of this Device. # noqa: E501
The last time, in epoch seconds, that raw Netflow data was reported # noqa: E501
:return: The last_rawdata_time of this Device. # noqa: E501
:rtype: int
"""
return self._last_rawdata_time
@last_rawdata_time.setter
def last_rawdata_time(self, last_rawdata_time):
"""Sets the last_rawdata_time of this Device.
The last time, in epoch seconds, that raw Netflow data was reported # noqa: E501
:param last_rawdata_time: The last_rawdata_time of this Device. # noqa: E501
:type: int
"""
self._last_rawdata_time = last_rawdata_time
@property
def name(self):
"""Gets the name of this Device. # noqa: E501
The host name or IP address of the device # noqa: E501
:return: The name of this Device. # noqa: E501
:rtype: str
"""
return self._name
@name.setter
def name(self, name):
"""Sets the name of this Device.
The host name or IP address of the device # noqa: E501
:param name: The name of this Device. # noqa: E501
:type: str
"""
if name is None:
raise ValueError("Invalid value for `name`, must not be `None`") # noqa: E501
self._name = name
@property
def deleted_time_in_ms(self):
"""Gets the deleted_time_in_ms of this Device. # noqa: E501
The time in milliseconds that the device has been dead for, or since the AWS device was filtered out # noqa: E501
:return: The deleted_time_in_ms of this Device. # noqa: E501
:rtype: int
"""
return self._deleted_time_in_ms
@deleted_time_in_ms.setter
def deleted_time_in_ms(self, deleted_time_in_ms):
"""Sets the deleted_time_in_ms of this Device.
The time in milliseconds that the device has been dead for, or since the AWS device was filtered out # noqa: E501
:param deleted_time_in_ms: The deleted_time_in_ms of this Device. # noqa: E501
:type: int
"""
self._deleted_time_in_ms = deleted_time_in_ms
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
if issubclass(Device, dict):
for key, value in self.items():
result[key] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, Device):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
```
#### File: logicmonitor_sdk/models/escalating_chain.py
```python
import pprint
import re # noqa: F401
import six
from logicmonitor_sdk.models.chain import Chain # noqa: F401,E501
from logicmonitor_sdk.models.recipient import Recipient # noqa: F401,E501
class EscalatingChain(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'in_alerting': 'bool',
'throttling_alerts': 'int',
'enable_throttling': 'bool',
'destinations': 'list[Chain]',
'name': 'str',
'description': 'str',
'id': 'int',
'cc_destinations': 'list[Recipient]',
'throttling_period': 'int'
}
attribute_map = {
'in_alerting': 'inAlerting',
'throttling_alerts': 'throttlingAlerts',
'enable_throttling': 'enableThrottling',
'destinations': 'destinations',
'name': 'name',
'description': 'description',
'id': 'id',
'cc_destinations': 'ccDestinations',
'throttling_period': 'throttlingPeriod'
}
def __init__(self, in_alerting=None, throttling_alerts=None, enable_throttling=None, destinations=None, name=None, description=None, id=None, cc_destinations=None, throttling_period=None): # noqa: E501
"""EscalatingChain - a model defined in Swagger""" # noqa: E501
self._in_alerting = None
self._throttling_alerts = None
self._enable_throttling = None
self._destinations = None
self._name = None
self._description = None
self._id = None
self._cc_destinations = None
self._throttling_period = None
self.discriminator = None
if in_alerting is not None:
self.in_alerting = in_alerting
if throttling_alerts is not None:
self.throttling_alerts = throttling_alerts
if enable_throttling is not None:
self.enable_throttling = enable_throttling
self.destinations = destinations
self.name = name
if description is not None:
self.description = description
if id is not None:
self.id = id
if cc_destinations is not None:
self.cc_destinations = cc_destinations
if throttling_period is not None:
self.throttling_period = throttling_period
@property
def in_alerting(self):
"""Gets the in_alerting of this EscalatingChain. # noqa: E501
:return: The in_alerting of this EscalatingChain. # noqa: E501
:rtype: bool
"""
return self._in_alerting
@in_alerting.setter
def in_alerting(self, in_alerting):
"""Sets the in_alerting of this EscalatingChain.
:param in_alerting: The in_alerting of this EscalatingChain. # noqa: E501
:type: bool
"""
self._in_alerting = in_alerting
@property
def throttling_alerts(self):
"""Gets the throttling_alerts of this EscalatingChain. # noqa: E501
:return: The throttling_alerts of this EscalatingChain. # noqa: E501
:rtype: int
"""
return self._throttling_alerts
@throttling_alerts.setter
def throttling_alerts(self, throttling_alerts):
"""Sets the throttling_alerts of this EscalatingChain.
:param throttling_alerts: The throttling_alerts of this EscalatingChain. # noqa: E501
:type: int
"""
self._throttling_alerts = throttling_alerts
@property
def enable_throttling(self):
"""Gets the enable_throttling of this EscalatingChain. # noqa: E501
:return: The enable_throttling of this EscalatingChain. # noqa: E501
:rtype: bool
"""
return self._enable_throttling
@enable_throttling.setter
def enable_throttling(self, enable_throttling):
"""Sets the enable_throttling of this EscalatingChain.
:param enable_throttling: The enable_throttling of this EscalatingChain. # noqa: E501
:type: bool
"""
self._enable_throttling = enable_throttling
@property
def destinations(self):
"""Gets the destinations of this EscalatingChain. # noqa: E501
:return: The destinations of this EscalatingChain. # noqa: E501
:rtype: list[Chain]
"""
return self._destinations
@destinations.setter
def destinations(self, destinations):
"""Sets the destinations of this EscalatingChain.
:param destinations: The destinations of this EscalatingChain. # noqa: E501
:type: list[Chain]
"""
if destinations is None:
raise ValueError("Invalid value for `destinations`, must not be `None`") # noqa: E501
self._destinations = destinations
@property
def name(self):
"""Gets the name of this EscalatingChain. # noqa: E501
:return: The name of this EscalatingChain. # noqa: E501
:rtype: str
"""
return self._name
@name.setter
def name(self, name):
"""Sets the name of this EscalatingChain.
:param name: The name of this EscalatingChain. # noqa: E501
:type: str
"""
if name is None:
raise ValueError("Invalid value for `name`, must not be `None`") # noqa: E501
self._name = name
@property
def description(self):
"""Gets the description of this EscalatingChain. # noqa: E501
:return: The description of this EscalatingChain. # noqa: E501
:rtype: str
"""
return self._description
@description.setter
def description(self, description):
"""Sets the description of this EscalatingChain.
:param description: The description of this EscalatingChain. # noqa: E501
:type: str
"""
self._description = description
@property
def id(self):
"""Gets the id of this EscalatingChain. # noqa: E501
:return: The id of this EscalatingChain. # noqa: E501
:rtype: int
"""
return self._id
@id.setter
def id(self, id):
"""Sets the id of this EscalatingChain.
:param id: The id of this EscalatingChain. # noqa: E501
:type: int
"""
self._id = id
@property
def cc_destinations(self):
"""Gets the cc_destinations of this EscalatingChain. # noqa: E501
:return: The cc_destinations of this EscalatingChain. # noqa: E501
:rtype: list[Recipient]
"""
return self._cc_destinations
@cc_destinations.setter
def cc_destinations(self, cc_destinations):
"""Sets the cc_destinations of this EscalatingChain.
:param cc_destinations: The cc_destinations of this EscalatingChain. # noqa: E501
:type: list[Recipient]
"""
self._cc_destinations = cc_destinations
@property
def throttling_period(self):
"""Gets the throttling_period of this EscalatingChain. # noqa: E501
:return: The throttling_period of this EscalatingChain. # noqa: E501
:rtype: int
"""
return self._throttling_period
@throttling_period.setter
def throttling_period(self, throttling_period):
"""Sets the throttling_period of this EscalatingChain.
:param throttling_period: The throttling_period of this EscalatingChain. # noqa: E501
:type: int
"""
self._throttling_period = throttling_period
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
if issubclass(EscalatingChain, dict):
for key, value in self.items():
result[key] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, EscalatingChain):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
```
#### File: logicmonitor_sdk/models/google_map_widget.py
```python
import pprint
import re # noqa: F401
import six
from logicmonitor_sdk.models.point_source import PointSource # noqa: F401,E501
from logicmonitor_sdk.models.widget import Widget # noqa: F401,E501
class GoogleMapWidget(Widget):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'last_updated_by': 'str',
'user_permission': 'str',
'dashboard_id': 'int',
'name': 'str',
'description': 'str',
'last_updated_on': 'int',
'theme': 'str',
'interval': 'int',
'id': 'int',
'type': 'str',
'timescale': 'str',
'ack_checked': 'bool',
'display_error_alert': 'bool',
'display_warn_alert': 'bool',
'display_critical_alert': 'bool',
'sdt_checked': 'bool',
'map_points': 'list[PointSource]'
}
attribute_map = {
'last_updated_by': 'lastUpdatedBy',
'user_permission': 'userPermission',
'dashboard_id': 'dashboardId',
'name': 'name',
'description': 'description',
'last_updated_on': 'lastUpdatedOn',
'theme': 'theme',
'interval': 'interval',
'id': 'id',
'type': 'type',
'timescale': 'timescale',
'ack_checked': 'ackChecked',
'display_error_alert': 'displayErrorAlert',
'display_warn_alert': 'displayWarnAlert',
'display_critical_alert': 'displayCriticalAlert',
'sdt_checked': 'sdtChecked',
'map_points': 'mapPoints'
}
def __init__(self, last_updated_by=None, user_permission=None, dashboard_id=None, name=None, description=None, last_updated_on=None, theme=None, interval=None, id=None, type=None, timescale=None, ack_checked=None, display_error_alert=None, display_warn_alert=None, display_critical_alert=None, sdt_checked=None, map_points=None): # noqa: E501
"""GoogleMapWidget - a model defined in Swagger""" # noqa: E501
self._last_updated_by = None
self._user_permission = None
self._dashboard_id = None
self._name = None
self._description = None
self._last_updated_on = None
self._theme = None
self._interval = None
self._id = None
self._type = None
self._timescale = None
self._ack_checked = None
self._display_error_alert = None
self._display_warn_alert = None
self._display_critical_alert = None
self._sdt_checked = None
self._map_points = None
self.discriminator = None
if last_updated_by is not None:
self.last_updated_by = last_updated_by
if user_permission is not None:
self.user_permission = user_permission
self.dashboard_id = dashboard_id
self.name = name
if description is not None:
self.description = description
if last_updated_on is not None:
self.last_updated_on = last_updated_on
if theme is not None:
self.theme = theme
if interval is not None:
self.interval = interval
if id is not None:
self.id = id
self.type = type
if timescale is not None:
self.timescale = timescale
if ack_checked is not None:
self.ack_checked = ack_checked
if display_error_alert is not None:
self.display_error_alert = display_error_alert
if display_warn_alert is not None:
self.display_warn_alert = display_warn_alert
if display_critical_alert is not None:
self.display_critical_alert = display_critical_alert
if sdt_checked is not None:
self.sdt_checked = sdt_checked
self.map_points = map_points
@property
def last_updated_by(self):
"""Gets the last_updated_by of this GoogleMapWidget. # noqa: E501
The user that last updated the widget # noqa: E501
:return: The last_updated_by of this GoogleMapWidget. # noqa: E501
:rtype: str
"""
return self._last_updated_by
@last_updated_by.setter
def last_updated_by(self, last_updated_by):
"""Sets the last_updated_by of this GoogleMapWidget.
The user that last updated the widget # noqa: E501
:param last_updated_by: The last_updated_by of this GoogleMapWidget. # noqa: E501
:type: str
"""
self._last_updated_by = last_updated_by
@property
def user_permission(self):
"""Gets the user_permission of this GoogleMapWidget. # noqa: E501
The permission level of the user who last modified the widget # noqa: E501
:return: The user_permission of this GoogleMapWidget. # noqa: E501
:rtype: str
"""
return self._user_permission
@user_permission.setter
def user_permission(self, user_permission):
"""Sets the user_permission of this GoogleMapWidget.
The permission level of the user who last modified the widget # noqa: E501
:param user_permission: The user_permission of this GoogleMapWidget. # noqa: E501
:type: str
"""
self._user_permission = user_permission
@property
def dashboard_id(self):
"""Gets the dashboard_id of this GoogleMapWidget. # noqa: E501
The id of the dashboard the widget belongs to # noqa: E501
:return: The dashboard_id of this GoogleMapWidget. # noqa: E501
:rtype: int
"""
return self._dashboard_id
@dashboard_id.setter
def dashboard_id(self, dashboard_id):
"""Sets the dashboard_id of this GoogleMapWidget.
The id of the dashboard the widget belongs to # noqa: E501
:param dashboard_id: The dashboard_id of this GoogleMapWidget. # noqa: E501
:type: int
"""
if dashboard_id is None:
raise ValueError("Invalid value for `dashboard_id`, must not be `None`") # noqa: E501
self._dashboard_id = dashboard_id
@property
def name(self):
"""Gets the name of this GoogleMapWidget. # noqa: E501
The name of the widget # noqa: E501
:return: The name of this GoogleMapWidget. # noqa: E501
:rtype: str
"""
return self._name
@name.setter
def name(self, name):
"""Sets the name of this GoogleMapWidget.
The name of the widget # noqa: E501
:param name: The name of this GoogleMapWidget. # noqa: E501
:type: str
"""
if name is None:
raise ValueError("Invalid value for `name`, must not be `None`") # noqa: E501
self._name = name
@property
def description(self):
"""Gets the description of this GoogleMapWidget. # noqa: E501
The description of the widget # noqa: E501
:return: The description of this GoogleMapWidget. # noqa: E501
:rtype: str
"""
return self._description
@description.setter
def description(self, description):
"""Sets the description of this GoogleMapWidget.
The description of the widget # noqa: E501
:param description: The description of this GoogleMapWidget. # noqa: E501
:type: str
"""
self._description = description
@property
def last_updated_on(self):
"""Gets the last_updated_on of this GoogleMapWidget. # noqa: E501
The time that corresponds to when the widget was last updated, in epoch format # noqa: E501
:return: The last_updated_on of this GoogleMapWidget. # noqa: E501
:rtype: int
"""
return self._last_updated_on
@last_updated_on.setter
def last_updated_on(self, last_updated_on):
"""Sets the last_updated_on of this GoogleMapWidget.
The time that corresponds to when the widget was last updated, in epoch format # noqa: E501
:param last_updated_on: The last_updated_on of this GoogleMapWidget. # noqa: E501
:type: int
"""
self._last_updated_on = last_updated_on
@property
def theme(self):
"""Gets the theme of this GoogleMapWidget. # noqa: E501
The color scheme of the widget. Options are: borderPurple | borderGray | borderBlue | solidPurple | solidGray | solidBlue | simplePurple | simpleBlue | simpleGray | newBorderGray | newBorderBlue | newBorderDarkBlue | newSolidGray | newSolidBlue | newSolidDarkBlue | newSimpleGray | newSimpleBlue |newSimpleDarkBlue # noqa: E501
:return: The theme of this GoogleMapWidget. # noqa: E501
:rtype: str
"""
return self._theme
@theme.setter
def theme(self, theme):
"""Sets the theme of this GoogleMapWidget.
The color scheme of the widget. Options are: borderPurple | borderGray | borderBlue | solidPurple | solidGray | solidBlue | simplePurple | simpleBlue | simpleGray | newBorderGray | newBorderBlue | newBorderDarkBlue | newSolidGray | newSolidBlue | newSolidDarkBlue | newSimpleGray | newSimpleBlue |newSimpleDarkBlue # noqa: E501
:param theme: The theme of this GoogleMapWidget. # noqa: E501
:type: str
"""
self._theme = theme
@property
def interval(self):
"""Gets the interval of this GoogleMapWidget. # noqa: E501
The refresh interval of the widget, in minutes # noqa: E501
:return: The interval of this GoogleMapWidget. # noqa: E501
:rtype: int
"""
return self._interval
@interval.setter
def interval(self, interval):
"""Sets the interval of this GoogleMapWidget.
The refresh interval of the widget, in minutes # noqa: E501
:param interval: The interval of this GoogleMapWidget. # noqa: E501
:type: int
"""
self._interval = interval
@property
def id(self):
"""Gets the id of this GoogleMapWidget. # noqa: E501
The Id of the widget # noqa: E501
:return: The id of this GoogleMapWidget. # noqa: E501
:rtype: int
"""
return self._id
@id.setter
def id(self, id):
"""Sets the id of this GoogleMapWidget.
The Id of the widget # noqa: E501
:param id: The id of this GoogleMapWidget. # noqa: E501
:type: int
"""
self._id = id
@property
def type(self):
"""Gets the type of this GoogleMapWidget. # noqa: E501
alert | deviceNOC | html | serviceOverallStatus | sgraph | ngraph | serviceNOC | serviceSLA | bigNumber | gmap | serviceIndividualStatus | gauge | pieChart | ngraph | batchjob # noqa: E501
:return: The type of this GoogleMapWidget. # noqa: E501
:rtype: str
"""
return self._type
@type.setter
def type(self, type):
"""Sets the type of this GoogleMapWidget.
alert | deviceNOC | html | serviceOverallStatus | sgraph | ngraph | serviceNOC | serviceSLA | bigNumber | gmap | serviceIndividualStatus | gauge | pieChart | ngraph | batchjob # noqa: E501
:param type: The type of this GoogleMapWidget. # noqa: E501
:type: str
"""
if type is None:
raise ValueError("Invalid value for `type`, must not be `None`") # noqa: E501
self._type = type
@property
def timescale(self):
"""Gets the timescale of this GoogleMapWidget. # noqa: E501
The default timescale of the widget # noqa: E501
:return: The timescale of this GoogleMapWidget. # noqa: E501
:rtype: str
"""
return self._timescale
@timescale.setter
def timescale(self, timescale):
"""Sets the timescale of this GoogleMapWidget.
The default timescale of the widget # noqa: E501
:param timescale: The timescale of this GoogleMapWidget. # noqa: E501
:type: str
"""
self._timescale = timescale
@property
def ack_checked(self):
"""Gets the ack_checked of this GoogleMapWidget. # noqa: E501
Whether ACKed alerts should be displayed # noqa: E501
:return: The ack_checked of this GoogleMapWidget. # noqa: E501
:rtype: bool
"""
return self._ack_checked
@ack_checked.setter
def ack_checked(self, ack_checked):
"""Sets the ack_checked of this GoogleMapWidget.
Whether ACKed alerts should be displayed # noqa: E501
:param ack_checked: The ack_checked of this GoogleMapWidget. # noqa: E501
:type: bool
"""
self._ack_checked = ack_checked
@property
def display_error_alert(self):
"""Gets the display_error_alert of this GoogleMapWidget. # noqa: E501
Whether error alerts should be displayed # noqa: E501
:return: The display_error_alert of this GoogleMapWidget. # noqa: E501
:rtype: bool
"""
return self._display_error_alert
@display_error_alert.setter
def display_error_alert(self, display_error_alert):
"""Sets the display_error_alert of this GoogleMapWidget.
Whether error alerts should be displayed # noqa: E501
:param display_error_alert: The display_error_alert of this GoogleMapWidget. # noqa: E501
:type: bool
"""
self._display_error_alert = display_error_alert
@property
def display_warn_alert(self):
"""Gets the display_warn_alert of this GoogleMapWidget. # noqa: E501
Whether warning alerts should be displayed # noqa: E501
:return: The display_warn_alert of this GoogleMapWidget. # noqa: E501
:rtype: bool
"""
return self._display_warn_alert
@display_warn_alert.setter
def display_warn_alert(self, display_warn_alert):
"""Sets the display_warn_alert of this GoogleMapWidget.
Whether warning alerts should be displayed # noqa: E501
:param display_warn_alert: The display_warn_alert of this GoogleMapWidget. # noqa: E501
:type: bool
"""
self._display_warn_alert = display_warn_alert
@property
def display_critical_alert(self):
"""Gets the display_critical_alert of this GoogleMapWidget. # noqa: E501
Whether critical alerts should be displayed # noqa: E501
:return: The display_critical_alert of this GoogleMapWidget. # noqa: E501
:rtype: bool
"""
return self._display_critical_alert
@display_critical_alert.setter
def display_critical_alert(self, display_critical_alert):
"""Sets the display_critical_alert of this GoogleMapWidget.
Whether critical alerts should be displayed # noqa: E501
:param display_critical_alert: The display_critical_alert of this GoogleMapWidget. # noqa: E501
:type: bool
"""
self._display_critical_alert = display_critical_alert
@property
def sdt_checked(self):
"""Gets the sdt_checked of this GoogleMapWidget. # noqa: E501
Whether alerts occuring during an SDT period should be displayed # noqa: E501
:return: The sdt_checked of this GoogleMapWidget. # noqa: E501
:rtype: bool
"""
return self._sdt_checked
@sdt_checked.setter
def sdt_checked(self, sdt_checked):
"""Sets the sdt_checked of this GoogleMapWidget.
Whether alerts occuring during an SDT period should be displayed # noqa: E501
:param sdt_checked: The sdt_checked of this GoogleMapWidget. # noqa: E501
:type: bool
"""
self._sdt_checked = sdt_checked
@property
def map_points(self):
"""Gets the map_points of this GoogleMapWidget. # noqa: E501
The points info # noqa: E501
:return: The map_points of this GoogleMapWidget. # noqa: E501
:rtype: list[PointSource]
"""
return self._map_points
@map_points.setter
def map_points(self, map_points):
"""Sets the map_points of this GoogleMapWidget.
The points info # noqa: E501
:param map_points: The map_points of this GoogleMapWidget. # noqa: E501
:type: list[PointSource]
"""
if map_points is None:
raise ValueError("Invalid value for `map_points`, must not be `None`") # noqa: E501
self._map_points = map_points
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
if issubclass(GoogleMapWidget, dict):
for key, value in self.items():
result[key] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, GoogleMapWidget):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
```
#### File: logicmonitor_sdk/models/graph_line.py
```python
import pprint
import re # noqa: F401
import six
class GraphLine(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'color_name': 'str',
'data_point_name': 'str',
'data_point_id': 'int',
'legend': 'str',
'is_virtual_data_point': 'bool',
'type': 'int'
}
attribute_map = {
'color_name': 'colorName',
'data_point_name': 'dataPointName',
'data_point_id': 'dataPointId',
'legend': 'legend',
'is_virtual_data_point': 'isVirtualDataPoint',
'type': 'type'
}
def __init__(self, color_name=None, data_point_name=None, data_point_id=None, legend=None, is_virtual_data_point=None, type=None): # noqa: E501
"""GraphLine - a model defined in Swagger""" # noqa: E501
self._color_name = None
self._data_point_name = None
self._data_point_id = None
self._legend = None
self._is_virtual_data_point = None
self._type = None
self.discriminator = None
if color_name is not None:
self.color_name = color_name
if data_point_name is not None:
self.data_point_name = data_point_name
if data_point_id is not None:
self.data_point_id = data_point_id
if legend is not None:
self.legend = legend
if is_virtual_data_point is not None:
self.is_virtual_data_point = is_virtual_data_point
if type is not None:
self.type = type
@property
def color_name(self):
"""Gets the color_name of this GraphLine. # noqa: E501
:return: The color_name of this GraphLine. # noqa: E501
:rtype: str
"""
return self._color_name
@color_name.setter
def color_name(self, color_name):
"""Sets the color_name of this GraphLine.
:param color_name: The color_name of this GraphLine. # noqa: E501
:type: str
"""
self._color_name = color_name
@property
def data_point_name(self):
"""Gets the data_point_name of this GraphLine. # noqa: E501
:return: The data_point_name of this GraphLine. # noqa: E501
:rtype: str
"""
return self._data_point_name
@data_point_name.setter
def data_point_name(self, data_point_name):
"""Sets the data_point_name of this GraphLine.
:param data_point_name: The data_point_name of this GraphLine. # noqa: E501
:type: str
"""
self._data_point_name = data_point_name
@property
def data_point_id(self):
"""Gets the data_point_id of this GraphLine. # noqa: E501
:return: The data_point_id of this GraphLine. # noqa: E501
:rtype: int
"""
return self._data_point_id
@data_point_id.setter
def data_point_id(self, data_point_id):
"""Sets the data_point_id of this GraphLine.
:param data_point_id: The data_point_id of this GraphLine. # noqa: E501
:type: int
"""
self._data_point_id = data_point_id
@property
def legend(self):
"""Gets the legend of this GraphLine. # noqa: E501
:return: The legend of this GraphLine. # noqa: E501
:rtype: str
"""
return self._legend
@legend.setter
def legend(self, legend):
"""Sets the legend of this GraphLine.
:param legend: The legend of this GraphLine. # noqa: E501
:type: str
"""
self._legend = legend
@property
def is_virtual_data_point(self):
"""Gets the is_virtual_data_point of this GraphLine. # noqa: E501
:return: The is_virtual_data_point of this GraphLine. # noqa: E501
:rtype: bool
"""
return self._is_virtual_data_point
@is_virtual_data_point.setter
def is_virtual_data_point(self, is_virtual_data_point):
"""Sets the is_virtual_data_point of this GraphLine.
:param is_virtual_data_point: The is_virtual_data_point of this GraphLine. # noqa: E501
:type: bool
"""
self._is_virtual_data_point = is_virtual_data_point
@property
def type(self):
"""Gets the type of this GraphLine. # noqa: E501
:return: The type of this GraphLine. # noqa: E501
:rtype: int
"""
return self._type
@type.setter
def type(self, type):
"""Sets the type of this GraphLine.
:param type: The type of this GraphLine. # noqa: E501
:type: int
"""
self._type = type
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
if issubclass(GraphLine, dict):
for key, value in self.items():
result[key] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, GraphLine):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
```
#### File: logicmonitor_sdk/models/graph_plot_line.py
```python
import pprint
import re # noqa: F401
import six
class GraphPlotLine(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'color_name': 'str',
'std': 'object',
'visible': 'bool',
'color': 'str',
'data': 'list[object]',
'max': 'object',
'legend': 'str',
'description': 'str',
'label': 'str',
'type': 'str',
'min': 'object',
'avg': 'object',
'decimal': 'int',
'use_y_max': 'bool'
}
attribute_map = {
'color_name': 'colorName',
'std': 'std',
'visible': 'visible',
'color': 'color',
'data': 'data',
'max': 'max',
'legend': 'legend',
'description': 'description',
'label': 'label',
'type': 'type',
'min': 'min',
'avg': 'avg',
'decimal': 'decimal',
'use_y_max': 'useYMax'
}
def __init__(self, color_name=None, std=None, visible=None, color=None, data=None, max=None, legend=None, description=None, label=None, type=None, min=None, avg=None, decimal=None, use_y_max=None): # noqa: E501
"""GraphPlotLine - a model defined in Swagger""" # noqa: E501
self._color_name = None
self._std = None
self._visible = None
self._color = None
self._data = None
self._max = None
self._legend = None
self._description = None
self._label = None
self._type = None
self._min = None
self._avg = None
self._decimal = None
self._use_y_max = None
self.discriminator = None
if color_name is not None:
self.color_name = color_name
if std is not None:
self.std = std
if visible is not None:
self.visible = visible
if color is not None:
self.color = color
if data is not None:
self.data = data
if max is not None:
self.max = max
if legend is not None:
self.legend = legend
if description is not None:
self.description = description
if label is not None:
self.label = label
if type is not None:
self.type = type
if min is not None:
self.min = min
if avg is not None:
self.avg = avg
if decimal is not None:
self.decimal = decimal
if use_y_max is not None:
self.use_y_max = use_y_max
@property
def color_name(self):
"""Gets the color_name of this GraphPlotLine. # noqa: E501
:return: The color_name of this GraphPlotLine. # noqa: E501
:rtype: str
"""
return self._color_name
@color_name.setter
def color_name(self, color_name):
"""Sets the color_name of this GraphPlotLine.
:param color_name: The color_name of this GraphPlotLine. # noqa: E501
:type: str
"""
self._color_name = color_name
@property
def std(self):
"""Gets the std of this GraphPlotLine. # noqa: E501
:return: The std of this GraphPlotLine. # noqa: E501
:rtype: object
"""
return self._std
@std.setter
def std(self, std):
"""Sets the std of this GraphPlotLine.
:param std: The std of this GraphPlotLine. # noqa: E501
:type: object
"""
self._std = std
@property
def visible(self):
"""Gets the visible of this GraphPlotLine. # noqa: E501
:return: The visible of this GraphPlotLine. # noqa: E501
:rtype: bool
"""
return self._visible
@visible.setter
def visible(self, visible):
"""Sets the visible of this GraphPlotLine.
:param visible: The visible of this GraphPlotLine. # noqa: E501
:type: bool
"""
self._visible = visible
@property
def color(self):
"""Gets the color of this GraphPlotLine. # noqa: E501
:return: The color of this GraphPlotLine. # noqa: E501
:rtype: str
"""
return self._color
@color.setter
def color(self, color):
"""Sets the color of this GraphPlotLine.
:param color: The color of this GraphPlotLine. # noqa: E501
:type: str
"""
self._color = color
@property
def data(self):
"""Gets the data of this GraphPlotLine. # noqa: E501
:return: The data of this GraphPlotLine. # noqa: E501
:rtype: list[object]
"""
return self._data
@data.setter
def data(self, data):
"""Sets the data of this GraphPlotLine.
:param data: The data of this GraphPlotLine. # noqa: E501
:type: list[object]
"""
self._data = data
@property
def max(self):
"""Gets the max of this GraphPlotLine. # noqa: E501
:return: The max of this GraphPlotLine. # noqa: E501
:rtype: object
"""
return self._max
@max.setter
def max(self, max):
"""Sets the max of this GraphPlotLine.
:param max: The max of this GraphPlotLine. # noqa: E501
:type: object
"""
self._max = max
@property
def legend(self):
"""Gets the legend of this GraphPlotLine. # noqa: E501
:return: The legend of this GraphPlotLine. # noqa: E501
:rtype: str
"""
return self._legend
@legend.setter
def legend(self, legend):
"""Sets the legend of this GraphPlotLine.
:param legend: The legend of this GraphPlotLine. # noqa: E501
:type: str
"""
self._legend = legend
@property
def description(self):
"""Gets the description of this GraphPlotLine. # noqa: E501
:return: The description of this GraphPlotLine. # noqa: E501
:rtype: str
"""
return self._description
@description.setter
def description(self, description):
"""Sets the description of this GraphPlotLine.
:param description: The description of this GraphPlotLine. # noqa: E501
:type: str
"""
self._description = description
@property
def label(self):
"""Gets the label of this GraphPlotLine. # noqa: E501
:return: The label of this GraphPlotLine. # noqa: E501
:rtype: str
"""
return self._label
@label.setter
def label(self, label):
"""Sets the label of this GraphPlotLine.
:param label: The label of this GraphPlotLine. # noqa: E501
:type: str
"""
self._label = label
@property
def type(self):
"""Gets the type of this GraphPlotLine. # noqa: E501
:return: The type of this GraphPlotLine. # noqa: E501
:rtype: str
"""
return self._type
@type.setter
def type(self, type):
"""Sets the type of this GraphPlotLine.
:param type: The type of this GraphPlotLine. # noqa: E501
:type: str
"""
self._type = type
@property
def min(self):
"""Gets the min of this GraphPlotLine. # noqa: E501
:return: The min of this GraphPlotLine. # noqa: E501
:rtype: object
"""
return self._min
@min.setter
def min(self, min):
"""Sets the min of this GraphPlotLine.
:param min: The min of this GraphPlotLine. # noqa: E501
:type: object
"""
self._min = min
@property
def avg(self):
"""Gets the avg of this GraphPlotLine. # noqa: E501
:return: The avg of this GraphPlotLine. # noqa: E501
:rtype: object
"""
return self._avg
@avg.setter
def avg(self, avg):
"""Sets the avg of this GraphPlotLine.
:param avg: The avg of this GraphPlotLine. # noqa: E501
:type: object
"""
self._avg = avg
@property
def decimal(self):
"""Gets the decimal of this GraphPlotLine. # noqa: E501
:return: The decimal of this GraphPlotLine. # noqa: E501
:rtype: int
"""
return self._decimal
@decimal.setter
def decimal(self, decimal):
"""Sets the decimal of this GraphPlotLine.
:param decimal: The decimal of this GraphPlotLine. # noqa: E501
:type: int
"""
self._decimal = decimal
@property
def use_y_max(self):
"""Gets the use_y_max of this GraphPlotLine. # noqa: E501
:return: The use_y_max of this GraphPlotLine. # noqa: E501
:rtype: bool
"""
return self._use_y_max
@use_y_max.setter
def use_y_max(self, use_y_max):
"""Sets the use_y_max of this GraphPlotLine.
:param use_y_max: The use_y_max of this GraphPlotLine. # noqa: E501
:type: bool
"""
self._use_y_max = use_y_max
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
if issubclass(GraphPlotLine, dict):
for key, value in self.items():
result[key] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, GraphPlotLine):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
```
#### File: logicmonitor_sdk/models/map_item_info.py
```python
import pprint
import re # noqa: F401
import six
class MapItemInfo(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'alert_status': 'str',
'display_name': 'str',
'formatted_location': 'str',
'latitude': 'str',
'description': 'str',
'type': 'str',
'sdt_status': 'str',
'active_status': 'str',
'name': 'str',
'sub_type': 'str',
'location': 'str',
'id': 'int',
'longitude': 'str'
}
attribute_map = {
'alert_status': 'alertStatus',
'display_name': 'displayName',
'formatted_location': 'formattedLocation',
'latitude': 'latitude',
'description': 'description',
'type': 'type',
'sdt_status': 'sdtStatus',
'active_status': 'activeStatus',
'name': 'name',
'sub_type': 'subType',
'location': 'location',
'id': 'id',
'longitude': 'longitude'
}
def __init__(self, alert_status=None, display_name=None, formatted_location=None, latitude=None, description=None, type=None, sdt_status=None, active_status=None, name=None, sub_type=None, location=None, id=None, longitude=None): # noqa: E501
"""MapItemInfo - a model defined in Swagger""" # noqa: E501
self._alert_status = None
self._display_name = None
self._formatted_location = None
self._latitude = None
self._description = None
self._type = None
self._sdt_status = None
self._active_status = None
self._name = None
self._sub_type = None
self._location = None
self._id = None
self._longitude = None
self.discriminator = None
if alert_status is not None:
self.alert_status = alert_status
if display_name is not None:
self.display_name = display_name
if formatted_location is not None:
self.formatted_location = formatted_location
if latitude is not None:
self.latitude = latitude
if description is not None:
self.description = description
if type is not None:
self.type = type
if sdt_status is not None:
self.sdt_status = sdt_status
if active_status is not None:
self.active_status = active_status
if name is not None:
self.name = name
if sub_type is not None:
self.sub_type = sub_type
if location is not None:
self.location = location
if id is not None:
self.id = id
if longitude is not None:
self.longitude = longitude
@property
def alert_status(self):
"""Gets the alert_status of this MapItemInfo. # noqa: E501
:return: The alert_status of this MapItemInfo. # noqa: E501
:rtype: str
"""
return self._alert_status
@alert_status.setter
def alert_status(self, alert_status):
"""Sets the alert_status of this MapItemInfo.
:param alert_status: The alert_status of this MapItemInfo. # noqa: E501
:type: str
"""
self._alert_status = alert_status
@property
def display_name(self):
"""Gets the display_name of this MapItemInfo. # noqa: E501
:return: The display_name of this MapItemInfo. # noqa: E501
:rtype: str
"""
return self._display_name
@display_name.setter
def display_name(self, display_name):
"""Sets the display_name of this MapItemInfo.
:param display_name: The display_name of this MapItemInfo. # noqa: E501
:type: str
"""
self._display_name = display_name
@property
def formatted_location(self):
"""Gets the formatted_location of this MapItemInfo. # noqa: E501
:return: The formatted_location of this MapItemInfo. # noqa: E501
:rtype: str
"""
return self._formatted_location
@formatted_location.setter
def formatted_location(self, formatted_location):
"""Sets the formatted_location of this MapItemInfo.
:param formatted_location: The formatted_location of this MapItemInfo. # noqa: E501
:type: str
"""
self._formatted_location = formatted_location
@property
def latitude(self):
"""Gets the latitude of this MapItemInfo. # noqa: E501
:return: The latitude of this MapItemInfo. # noqa: E501
:rtype: str
"""
return self._latitude
@latitude.setter
def latitude(self, latitude):
"""Sets the latitude of this MapItemInfo.
:param latitude: The latitude of this MapItemInfo. # noqa: E501
:type: str
"""
self._latitude = latitude
@property
def description(self):
"""Gets the description of this MapItemInfo. # noqa: E501
:return: The description of this MapItemInfo. # noqa: E501
:rtype: str
"""
return self._description
@description.setter
def description(self, description):
"""Sets the description of this MapItemInfo.
:param description: The description of this MapItemInfo. # noqa: E501
:type: str
"""
self._description = description
@property
def type(self):
"""Gets the type of this MapItemInfo. # noqa: E501
:return: The type of this MapItemInfo. # noqa: E501
:rtype: str
"""
return self._type
@type.setter
def type(self, type):
"""Sets the type of this MapItemInfo.
:param type: The type of this MapItemInfo. # noqa: E501
:type: str
"""
self._type = type
@property
def sdt_status(self):
"""Gets the sdt_status of this MapItemInfo. # noqa: E501
:return: The sdt_status of this MapItemInfo. # noqa: E501
:rtype: str
"""
return self._sdt_status
@sdt_status.setter
def sdt_status(self, sdt_status):
"""Sets the sdt_status of this MapItemInfo.
:param sdt_status: The sdt_status of this MapItemInfo. # noqa: E501
:type: str
"""
self._sdt_status = sdt_status
@property
def active_status(self):
"""Gets the active_status of this MapItemInfo. # noqa: E501
:return: The active_status of this MapItemInfo. # noqa: E501
:rtype: str
"""
return self._active_status
@active_status.setter
def active_status(self, active_status):
"""Sets the active_status of this MapItemInfo.
:param active_status: The active_status of this MapItemInfo. # noqa: E501
:type: str
"""
self._active_status = active_status
@property
def name(self):
"""Gets the name of this MapItemInfo. # noqa: E501
:return: The name of this MapItemInfo. # noqa: E501
:rtype: str
"""
return self._name
@name.setter
def name(self, name):
"""Sets the name of this MapItemInfo.
:param name: The name of this MapItemInfo. # noqa: E501
:type: str
"""
self._name = name
@property
def sub_type(self):
"""Gets the sub_type of this MapItemInfo. # noqa: E501
:return: The sub_type of this MapItemInfo. # noqa: E501
:rtype: str
"""
return self._sub_type
@sub_type.setter
def sub_type(self, sub_type):
"""Sets the sub_type of this MapItemInfo.
:param sub_type: The sub_type of this MapItemInfo. # noqa: E501
:type: str
"""
self._sub_type = sub_type
@property
def location(self):
"""Gets the location of this MapItemInfo. # noqa: E501
:return: The location of this MapItemInfo. # noqa: E501
:rtype: str
"""
return self._location
@location.setter
def location(self, location):
"""Sets the location of this MapItemInfo.
:param location: The location of this MapItemInfo. # noqa: E501
:type: str
"""
self._location = location
@property
def id(self):
"""Gets the id of this MapItemInfo. # noqa: E501
:return: The id of this MapItemInfo. # noqa: E501
:rtype: int
"""
return self._id
@id.setter
def id(self, id):
"""Sets the id of this MapItemInfo.
:param id: The id of this MapItemInfo. # noqa: E501
:type: int
"""
self._id = id
@property
def longitude(self):
"""Gets the longitude of this MapItemInfo. # noqa: E501
:return: The longitude of this MapItemInfo. # noqa: E501
:rtype: str
"""
return self._longitude
@longitude.setter
def longitude(self, longitude):
"""Sets the longitude of this MapItemInfo.
:param longitude: The longitude of this MapItemInfo. # noqa: E501
:type: str
"""
self._longitude = longitude
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
if issubclass(MapItemInfo, dict):
for key, value in self.items():
result[key] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, MapItemInfo):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
```
#### File: logicmonitor_sdk/models/netflow_data_base.py
```python
import pprint
import re # noqa: F401
import six
class NetflowDataBase(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'data_type': 'str'
}
attribute_map = {
'data_type': 'dataType'
}
discriminator_value_class_map = {
'application': 'NetflowApplication',
'bandwidth': 'NetflowBandwidth',
'qosreporttablerow': 'NetflowQoSReportTableRow',
'groupflowrecord': 'GroupNetFlowRecord'
}
def __init__(self, data_type=None): # noqa: E501
"""NetflowDataBase - a model defined in Swagger""" # noqa: E501
self._data_type = None
self.discriminator = 'dataType'
if data_type is not None:
self.data_type = data_type
@property
def data_type(self):
"""Gets the data_type of this NetflowDataBase. # noqa: E501
:return: The data_type of this NetflowDataBase. # noqa: E501
:rtype: str
"""
return self._data_type
@data_type.setter
def data_type(self, data_type):
"""Sets the data_type of this NetflowDataBase.
:param data_type: The data_type of this NetflowDataBase. # noqa: E501
:type: str
"""
self._data_type = data_type
def get_real_child_model(self, data):
"""Returns the real base class specified by the discriminator"""
discriminator_value = data[self.discriminator].lower()
return self.discriminator_value_class_map.get(discriminator_value)
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
if issubclass(NetflowDataBase, dict):
for key, value in self.items():
result[key] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, NetflowDataBase):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
```
#### File: logicmonitor_sdk/models/privilege.py
```python
import pprint
import re # noqa: F401
import six
class Privilege(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'object_name': 'str',
'sub_operation': 'str',
'operation': 'str',
'object_id': 'str',
'object_type': 'str'
}
attribute_map = {
'object_name': 'objectName',
'sub_operation': 'subOperation',
'operation': 'operation',
'object_id': 'objectId',
'object_type': 'objectType'
}
def __init__(self, object_name=None, sub_operation=None, operation=None, object_id=None, object_type=None): # noqa: E501
"""Privilege - a model defined in Swagger""" # noqa: E501
self._object_name = None
self._sub_operation = None
self._operation = None
self._object_id = None
self._object_type = None
self.discriminator = None
if object_name is not None:
self.object_name = object_name
if sub_operation is not None:
self.sub_operation = sub_operation
self.operation = operation
self.object_id = object_id
self.object_type = object_type
@property
def object_name(self):
"""Gets the object_name of this Privilege. # noqa: E501
:return: The object_name of this Privilege. # noqa: E501
:rtype: str
"""
return self._object_name
@object_name.setter
def object_name(self, object_name):
"""Sets the object_name of this Privilege.
:param object_name: The object_name of this Privilege. # noqa: E501
:type: str
"""
self._object_name = object_name
@property
def sub_operation(self):
"""Gets the sub_operation of this Privilege. # noqa: E501
:return: The sub_operation of this Privilege. # noqa: E501
:rtype: str
"""
return self._sub_operation
@sub_operation.setter
def sub_operation(self, sub_operation):
"""Sets the sub_operation of this Privilege.
:param sub_operation: The sub_operation of this Privilege. # noqa: E501
:type: str
"""
self._sub_operation = sub_operation
@property
def operation(self):
"""Gets the operation of this Privilege. # noqa: E501
:return: The operation of this Privilege. # noqa: E501
:rtype: str
"""
return self._operation
@operation.setter
def operation(self, operation):
"""Sets the operation of this Privilege.
:param operation: The operation of this Privilege. # noqa: E501
:type: str
"""
if operation is None:
raise ValueError("Invalid value for `operation`, must not be `None`") # noqa: E501
self._operation = operation
@property
def object_id(self):
"""Gets the object_id of this Privilege. # noqa: E501
:return: The object_id of this Privilege. # noqa: E501
:rtype: str
"""
return self._object_id
@object_id.setter
def object_id(self, object_id):
"""Sets the object_id of this Privilege.
:param object_id: The object_id of this Privilege. # noqa: E501
:type: str
"""
if object_id is None:
raise ValueError("Invalid value for `object_id`, must not be `None`") # noqa: E501
self._object_id = object_id
@property
def object_type(self):
"""Gets the object_type of this Privilege. # noqa: E501
:return: The object_type of this Privilege. # noqa: E501
:rtype: str
"""
return self._object_type
@object_type.setter
def object_type(self, object_type):
"""Sets the object_type of this Privilege.
:param object_type: The object_type of this Privilege. # noqa: E501
:type: str
"""
if object_type is None:
raise ValueError("Invalid value for `object_type`, must not be `None`") # noqa: E501
self._object_type = object_type
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
if issubclass(Privilege, dict):
for key, value in self.items():
result[key] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, Privilege):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
```
#### File: logicmonitor_sdk/models/recipient.py
```python
import pprint
import re # noqa: F401
import six
class Recipient(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'method': 'str',
'contact': 'str',
'type': 'str',
'addr': 'str'
}
attribute_map = {
'method': 'method',
'contact': 'contact',
'type': 'type',
'addr': 'addr'
}
def __init__(self, method=None, contact=None, type=None, addr=None): # noqa: E501
"""Recipient - a model defined in Swagger""" # noqa: E501
self._method = None
self._contact = None
self._type = None
self._addr = None
self.discriminator = None
self.method = method
if contact is not None:
self.contact = contact
self.type = type
if addr is not None:
self.addr = addr
@property
def method(self):
"""Gets the method of this Recipient. # noqa: E501
Admin | Arbitrary, where Admin = a user, and Arbitrary = an arbitrary email # noqa: E501
:return: The method of this Recipient. # noqa: E501
:rtype: str
"""
return self._method
@method.setter
def method(self, method):
"""Sets the method of this Recipient.
Admin | Arbitrary, where Admin = a user, and Arbitrary = an arbitrary email # noqa: E501
:param method: The method of this Recipient. # noqa: E501
:type: str
"""
if method is None:
raise ValueError("Invalid value for `method`, must not be `None`") # noqa: E501
self._method = method
@property
def contact(self):
"""Gets the contact of this Recipient. # noqa: E501
:return: The contact of this Recipient. # noqa: E501
:rtype: str
"""
return self._contact
@contact.setter
def contact(self, contact):
"""Sets the contact of this Recipient.
:param contact: The contact of this Recipient. # noqa: E501
:type: str
"""
self._contact = contact
@property
def type(self):
"""Gets the type of this Recipient. # noqa: E501
email | sms | voice, where type must be email if method = arbitrary # noqa: E501
:return: The type of this Recipient. # noqa: E501
:rtype: str
"""
return self._type
@type.setter
def type(self, type):
"""Sets the type of this Recipient.
email | sms | voice, where type must be email if method = arbitrary # noqa: E501
:param type: The type of this Recipient. # noqa: E501
:type: str
"""
if type is None:
raise ValueError("Invalid value for `type`, must not be `None`") # noqa: E501
self._type = type
@property
def addr(self):
"""Gets the addr of this Recipient. # noqa: E501
the user name if method = admin, or the email address if method = arbitrary # noqa: E501
:return: The addr of this Recipient. # noqa: E501
:rtype: str
"""
return self._addr
@addr.setter
def addr(self, addr):
"""Sets the addr of this Recipient.
the user name if method = admin, or the email address if method = arbitrary # noqa: E501
:param addr: The addr of this Recipient. # noqa: E501
:type: str
"""
self._addr = addr
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
if issubclass(Recipient, dict):
for key, value in self.items():
result[key] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, Recipient):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
```
#### File: logicmonitor_sdk/models/role.py
```python
import pprint
import re # noqa: F401
import six
from logicmonitor_sdk.models.privilege import Privilege # noqa: F401,E501
class Role(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'enable_remote_session_in_company_level': 'bool',
'custom_help_label': 'str',
'custom_help_url': 'str',
'privileges': 'list[Privilege]',
'associated_user_count': 'int',
'name': 'str',
'description': 'str',
'id': 'int',
'two_fa_required': 'bool',
'require_eula': 'bool',
'acct_require_two_fa': 'bool'
}
attribute_map = {
'enable_remote_session_in_company_level': 'enableRemoteSessionInCompanyLevel',
'custom_help_label': 'customHelpLabel',
'custom_help_url': 'customHelpURL',
'privileges': 'privileges',
'associated_user_count': 'associatedUserCount',
'name': 'name',
'description': 'description',
'id': 'id',
'two_fa_required': 'twoFARequired',
'require_eula': 'requireEULA',
'acct_require_two_fa': 'acctRequireTwoFA'
}
def __init__(self, enable_remote_session_in_company_level=None, custom_help_label=None, custom_help_url=None, privileges=None, associated_user_count=None, name=None, description=None, id=None, two_fa_required=None, require_eula=None, acct_require_two_fa=None): # noqa: E501
"""Role - a model defined in Swagger""" # noqa: E501
self._enable_remote_session_in_company_level = None
self._custom_help_label = None
self._custom_help_url = None
self._privileges = None
self._associated_user_count = None
self._name = None
self._description = None
self._id = None
self._two_fa_required = None
self._require_eula = None
self._acct_require_two_fa = None
self.discriminator = None
if enable_remote_session_in_company_level is not None:
self.enable_remote_session_in_company_level = enable_remote_session_in_company_level
if custom_help_label is not None:
self.custom_help_label = custom_help_label
if custom_help_url is not None:
self.custom_help_url = custom_help_url
self.privileges = privileges
if associated_user_count is not None:
self.associated_user_count = associated_user_count
self.name = name
if description is not None:
self.description = description
if id is not None:
self.id = id
if two_fa_required is not None:
self.two_fa_required = two_fa_required
if require_eula is not None:
self.require_eula = require_eula
if acct_require_two_fa is not None:
self.acct_require_two_fa = acct_require_two_fa
@property
def enable_remote_session_in_company_level(self):
"""Gets the enable_remote_session_in_company_level of this Role. # noqa: E501
Whether Remote Session should be enabled at the account level # noqa: E501
:return: The enable_remote_session_in_company_level of this Role. # noqa: E501
:rtype: bool
"""
return self._enable_remote_session_in_company_level
@enable_remote_session_in_company_level.setter
def enable_remote_session_in_company_level(self, enable_remote_session_in_company_level):
"""Sets the enable_remote_session_in_company_level of this Role.
Whether Remote Session should be enabled at the account level # noqa: E501
:param enable_remote_session_in_company_level: The enable_remote_session_in_company_level of this Role. # noqa: E501
:type: bool
"""
self._enable_remote_session_in_company_level = enable_remote_session_in_company_level
@property
def custom_help_label(self):
"""Gets the custom_help_label of this Role. # noqa: E501
The label for the custom help URL as it will appear in the Help & Support dropdown menu # noqa: E501
:return: The custom_help_label of this Role. # noqa: E501
:rtype: str
"""
return self._custom_help_label
@custom_help_label.setter
def custom_help_label(self, custom_help_label):
"""Sets the custom_help_label of this Role.
The label for the custom help URL as it will appear in the Help & Support dropdown menu # noqa: E501
:param custom_help_label: The custom_help_label of this Role. # noqa: E501
:type: str
"""
self._custom_help_label = custom_help_label
@property
def custom_help_url(self):
"""Gets the custom_help_url of this Role. # noqa: E501
The URL that should be added to the Help & Support dropdown menu # noqa: E501
:return: The custom_help_url of this Role. # noqa: E501
:rtype: str
"""
return self._custom_help_url
@custom_help_url.setter
def custom_help_url(self, custom_help_url):
"""Sets the custom_help_url of this Role.
The URL that should be added to the Help & Support dropdown menu # noqa: E501
:param custom_help_url: The custom_help_url of this Role. # noqa: E501
:type: str
"""
self._custom_help_url = custom_help_url
@property
def privileges(self):
"""Gets the privileges of this Role. # noqa: E501
The account privileges associated with the role. Privileges can be added to a role for each area of your account # noqa: E501
:return: The privileges of this Role. # noqa: E501
:rtype: list[Privilege]
"""
return self._privileges
@privileges.setter
def privileges(self, privileges):
"""Sets the privileges of this Role.
The account privileges associated with the role. Privileges can be added to a role for each area of your account # noqa: E501
:param privileges: The privileges of this Role. # noqa: E501
:type: list[Privilege]
"""
if privileges is None:
raise ValueError("Invalid value for `privileges`, must not be `None`") # noqa: E501
self._privileges = privileges
@property
def associated_user_count(self):
"""Gets the associated_user_count of this Role. # noqa: E501
The count of the users which are belongs to the role # noqa: E501
:return: The associated_user_count of this Role. # noqa: E501
:rtype: int
"""
return self._associated_user_count
@associated_user_count.setter
def associated_user_count(self, associated_user_count):
"""Sets the associated_user_count of this Role.
The count of the users which are belongs to the role # noqa: E501
:param associated_user_count: The associated_user_count of this Role. # noqa: E501
:type: int
"""
self._associated_user_count = associated_user_count
@property
def name(self):
"""Gets the name of this Role. # noqa: E501
The name of the role # noqa: E501
:return: The name of this Role. # noqa: E501
:rtype: str
"""
return self._name
@name.setter
def name(self, name):
"""Sets the name of this Role.
The name of the role # noqa: E501
:param name: The name of this Role. # noqa: E501
:type: str
"""
if name is None:
raise ValueError("Invalid value for `name`, must not be `None`") # noqa: E501
self._name = name
@property
def description(self):
"""Gets the description of this Role. # noqa: E501
The description of the role # noqa: E501
:return: The description of this Role. # noqa: E501
:rtype: str
"""
return self._description
@description.setter
def description(self, description):
"""Sets the description of this Role.
The description of the role # noqa: E501
:param description: The description of this Role. # noqa: E501
:type: str
"""
self._description = description
@property
def id(self):
"""Gets the id of this Role. # noqa: E501
The Id of the role # noqa: E501
:return: The id of this Role. # noqa: E501
:rtype: int
"""
return self._id
@id.setter
def id(self, id):
"""Sets the id of this Role.
The Id of the role # noqa: E501
:param id: The id of this Role. # noqa: E501
:type: int
"""
self._id = id
@property
def two_fa_required(self):
"""Gets the two_fa_required of this Role. # noqa: E501
Whether Two-Factor Authentication should be required for this role # noqa: E501
:return: The two_fa_required of this Role. # noqa: E501
:rtype: bool
"""
return self._two_fa_required
@two_fa_required.setter
def two_fa_required(self, two_fa_required):
"""Sets the two_fa_required of this Role.
Whether Two-Factor Authentication should be required for this role # noqa: E501
:param two_fa_required: The two_fa_required of this Role. # noqa: E501
:type: bool
"""
self._two_fa_required = two_fa_required
@property
def require_eula(self):
"""Gets the require_eula of this Role. # noqa: E501
Whether or not users assigned this role should be required to acknowledge the EULA (end user license agreement) # noqa: E501
:return: The require_eula of this Role. # noqa: E501
:rtype: bool
"""
return self._require_eula
@require_eula.setter
def require_eula(self, require_eula):
"""Sets the require_eula of this Role.
Whether or not users assigned this role should be required to acknowledge the EULA (end user license agreement) # noqa: E501
:param require_eula: The require_eula of this Role. # noqa: E501
:type: bool
"""
self._require_eula = require_eula
@property
def acct_require_two_fa(self):
"""Gets the acct_require_two_fa of this Role. # noqa: E501
Whether Two-Factor Authentication should be required for the entire account # noqa: E501
:return: The acct_require_two_fa of this Role. # noqa: E501
:rtype: bool
"""
return self._acct_require_two_fa
@acct_require_two_fa.setter
def acct_require_two_fa(self, acct_require_two_fa):
"""Sets the acct_require_two_fa of this Role.
Whether Two-Factor Authentication should be required for the entire account # noqa: E501
:param acct_require_two_fa: The acct_require_two_fa of this Role. # noqa: E501
:type: bool
"""
self._acct_require_two_fa = acct_require_two_fa
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
if issubclass(Role, dict):
for key, value in self.items():
result[key] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, Role):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
```
#### File: logicmonitor_sdk/models/sdt_history.py
```python
import pprint
import re # noqa: F401
import six
class SDTHistory(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'duration': 'int',
'item_id': 'int',
'end_epoch': 'int',
'admin': 'str',
'comment': 'str',
'id': 'str',
'type': 'str',
'start_epoch': 'int'
}
attribute_map = {
'duration': 'duration',
'item_id': 'itemId',
'end_epoch': 'endEpoch',
'admin': 'admin',
'comment': 'comment',
'id': 'id',
'type': 'type',
'start_epoch': 'startEpoch'
}
def __init__(self, duration=None, item_id=None, end_epoch=None, admin=None, comment=None, id=None, type=None, start_epoch=None): # noqa: E501
"""SDTHistory - a model defined in Swagger""" # noqa: E501
self._duration = None
self._item_id = None
self._end_epoch = None
self._admin = None
self._comment = None
self._id = None
self._type = None
self._start_epoch = None
self.discriminator = None
if duration is not None:
self.duration = duration
if item_id is not None:
self.item_id = item_id
if end_epoch is not None:
self.end_epoch = end_epoch
if admin is not None:
self.admin = admin
if comment is not None:
self.comment = comment
if id is not None:
self.id = id
if type is not None:
self.type = type
if start_epoch is not None:
self.start_epoch = start_epoch
@property
def duration(self):
"""Gets the duration of this SDTHistory. # noqa: E501
The duration of the SDT, in minutes # noqa: E501
:return: The duration of this SDTHistory. # noqa: E501
:rtype: int
"""
return self._duration
@duration.setter
def duration(self, duration):
"""Sets the duration of this SDTHistory.
The duration of the SDT, in minutes # noqa: E501
:param duration: The duration of this SDTHistory. # noqa: E501
:type: int
"""
self._duration = duration
@property
def item_id(self):
"""Gets the item_id of this SDTHistory. # noqa: E501
The ID of the resource in SDT, e.g. the group or device in SDT # noqa: E501
:return: The item_id of this SDTHistory. # noqa: E501
:rtype: int
"""
return self._item_id
@item_id.setter
def item_id(self, item_id):
"""Sets the item_id of this SDTHistory.
The ID of the resource in SDT, e.g. the group or device in SDT # noqa: E501
:param item_id: The item_id of this SDTHistory. # noqa: E501
:type: int
"""
self._item_id = item_id
@property
def end_epoch(self):
"""Gets the end_epoch of this SDTHistory. # noqa: E501
The end epoch for the SDT # noqa: E501
:return: The end_epoch of this SDTHistory. # noqa: E501
:rtype: int
"""
return self._end_epoch
@end_epoch.setter
def end_epoch(self, end_epoch):
"""Sets the end_epoch of this SDTHistory.
The end epoch for the SDT # noqa: E501
:param end_epoch: The end_epoch of this SDTHistory. # noqa: E501
:type: int
"""
self._end_epoch = end_epoch
@property
def admin(self):
"""Gets the admin of this SDTHistory. # noqa: E501
The user that added the SDT # noqa: E501
:return: The admin of this SDTHistory. # noqa: E501
:rtype: str
"""
return self._admin
@admin.setter
def admin(self, admin):
"""Sets the admin of this SDTHistory.
The user that added the SDT # noqa: E501
:param admin: The admin of this SDTHistory. # noqa: E501
:type: str
"""
self._admin = admin
@property
def comment(self):
"""Gets the comment of this SDTHistory. # noqa: E501
The comment associated with the SDT # noqa: E501
:return: The comment of this SDTHistory. # noqa: E501
:rtype: str
"""
return self._comment
@comment.setter
def comment(self, comment):
"""Sets the comment of this SDTHistory.
The comment associated with the SDT # noqa: E501
:param comment: The comment of this SDTHistory. # noqa: E501
:type: str
"""
self._comment = comment
@property
def id(self):
"""Gets the id of this SDTHistory. # noqa: E501
The ID of the SDT # noqa: E501
:return: The id of this SDTHistory. # noqa: E501
:rtype: str
"""
return self._id
@id.setter
def id(self, id):
"""Sets the id of this SDTHistory.
The ID of the SDT # noqa: E501
:param id: The id of this SDTHistory. # noqa: E501
:type: str
"""
self._id = id
@property
def type(self):
"""Gets the type of this SDTHistory. # noqa: E501
The SDT type # noqa: E501
:return: The type of this SDTHistory. # noqa: E501
:rtype: str
"""
return self._type
@type.setter
def type(self, type):
"""Sets the type of this SDTHistory.
The SDT type # noqa: E501
:param type: The type of this SDTHistory. # noqa: E501
:type: str
"""
self._type = type
@property
def start_epoch(self):
"""Gets the start_epoch of this SDTHistory. # noqa: E501
The start epoch for the SDT # noqa: E501
:return: The start_epoch of this SDTHistory. # noqa: E501
:rtype: int
"""
return self._start_epoch
@start_epoch.setter
def start_epoch(self, start_epoch):
"""Sets the start_epoch of this SDTHistory.
The start epoch for the SDT # noqa: E501
:param start_epoch: The start_epoch of this SDTHistory. # noqa: E501
:type: int
"""
self._start_epoch = start_epoch
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
if issubclass(SDTHistory, dict):
for key, value in self.items():
result[key] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, SDTHistory):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
```
#### File: logicmonitor_sdk/models/stats_d_metric_definition.py
```python
import pprint
import re # noqa: F401
import six
from logicmonitor_sdk.models.stats_d_graph_display import StatsDGraphDisplay # noqa: F401,E501
class StatsDMetricDefinition(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'consolidate_function': 'int',
'display': 'StatsDGraphDisplay',
'name': 'str',
'aggregate_function': 'str'
}
attribute_map = {
'consolidate_function': 'consolidateFunction',
'display': 'display',
'name': 'name',
'aggregate_function': 'aggregateFunction'
}
def __init__(self, consolidate_function=None, display=None, name=None, aggregate_function=None): # noqa: E501
"""StatsDMetricDefinition - a model defined in Swagger""" # noqa: E501
self._consolidate_function = None
self._display = None
self._name = None
self._aggregate_function = None
self.discriminator = None
if consolidate_function is not None:
self.consolidate_function = consolidate_function
if display is not None:
self.display = display
if name is not None:
self.name = name
if aggregate_function is not None:
self.aggregate_function = aggregate_function
@property
def consolidate_function(self):
"""Gets the consolidate_function of this StatsDMetricDefinition. # noqa: E501
:return: The consolidate_function of this StatsDMetricDefinition. # noqa: E501
:rtype: int
"""
return self._consolidate_function
@consolidate_function.setter
def consolidate_function(self, consolidate_function):
"""Sets the consolidate_function of this StatsDMetricDefinition.
:param consolidate_function: The consolidate_function of this StatsDMetricDefinition. # noqa: E501
:type: int
"""
self._consolidate_function = consolidate_function
@property
def display(self):
"""Gets the display of this StatsDMetricDefinition. # noqa: E501
:return: The display of this StatsDMetricDefinition. # noqa: E501
:rtype: StatsDGraphDisplay
"""
return self._display
@display.setter
def display(self, display):
"""Sets the display of this StatsDMetricDefinition.
:param display: The display of this StatsDMetricDefinition. # noqa: E501
:type: StatsDGraphDisplay
"""
self._display = display
@property
def name(self):
"""Gets the name of this StatsDMetricDefinition. # noqa: E501
:return: The name of this StatsDMetricDefinition. # noqa: E501
:rtype: str
"""
return self._name
@name.setter
def name(self, name):
"""Sets the name of this StatsDMetricDefinition.
:param name: The name of this StatsDMetricDefinition. # noqa: E501
:type: str
"""
self._name = name
@property
def aggregate_function(self):
"""Gets the aggregate_function of this StatsDMetricDefinition. # noqa: E501
:return: The aggregate_function of this StatsDMetricDefinition. # noqa: E501
:rtype: str
"""
return self._aggregate_function
@aggregate_function.setter
def aggregate_function(self, aggregate_function):
"""Sets the aggregate_function of this StatsDMetricDefinition.
:param aggregate_function: The aggregate_function of this StatsDMetricDefinition. # noqa: E501
:type: str
"""
self._aggregate_function = aggregate_function
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
if issubclass(StatsDMetricDefinition, dict):
for key, value in self.items():
result[key] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, StatsDMetricDefinition):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
```
#### File: logicmonitor_sdk/models/table_widget_column.py
```python
import pprint
import re # noqa: F401
import six
from logicmonitor_sdk.models.table_widget_data_point import TableWidgetDataPoint # noqa: F401,E501
class TableWidgetColumn(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'alternate_data_points': 'list[TableWidgetDataPoint]',
'rpn': 'str',
'data_point': 'TableWidgetDataPoint',
'column_name': 'str',
'enable_forecast': 'bool',
'rounding_decimal': 'int'
}
attribute_map = {
'alternate_data_points': 'alternateDataPoints',
'rpn': 'rpn',
'data_point': 'dataPoint',
'column_name': 'columnName',
'enable_forecast': 'enableForecast',
'rounding_decimal': 'roundingDecimal'
}
def __init__(self, alternate_data_points=None, rpn=None, data_point=None, column_name=None, enable_forecast=None, rounding_decimal=None): # noqa: E501
"""TableWidgetColumn - a model defined in Swagger""" # noqa: E501
self._alternate_data_points = None
self._rpn = None
self._data_point = None
self._column_name = None
self._enable_forecast = None
self._rounding_decimal = None
self.discriminator = None
if alternate_data_points is not None:
self.alternate_data_points = alternate_data_points
if rpn is not None:
self.rpn = rpn
self.data_point = data_point
self.column_name = column_name
if enable_forecast is not None:
self.enable_forecast = enable_forecast
if rounding_decimal is not None:
self.rounding_decimal = rounding_decimal
@property
def alternate_data_points(self):
"""Gets the alternate_data_points of this TableWidgetColumn. # noqa: E501
:return: The alternate_data_points of this TableWidgetColumn. # noqa: E501
:rtype: list[TableWidgetDataPoint]
"""
return self._alternate_data_points
@alternate_data_points.setter
def alternate_data_points(self, alternate_data_points):
"""Sets the alternate_data_points of this TableWidgetColumn.
:param alternate_data_points: The alternate_data_points of this TableWidgetColumn. # noqa: E501
:type: list[TableWidgetDataPoint]
"""
self._alternate_data_points = alternate_data_points
@property
def rpn(self):
"""Gets the rpn of this TableWidgetColumn. # noqa: E501
:return: The rpn of this TableWidgetColumn. # noqa: E501
:rtype: str
"""
return self._rpn
@rpn.setter
def rpn(self, rpn):
"""Sets the rpn of this TableWidgetColumn.
:param rpn: The rpn of this TableWidgetColumn. # noqa: E501
:type: str
"""
self._rpn = rpn
@property
def data_point(self):
"""Gets the data_point of this TableWidgetColumn. # noqa: E501
:return: The data_point of this TableWidgetColumn. # noqa: E501
:rtype: TableWidgetDataPoint
"""
return self._data_point
@data_point.setter
def data_point(self, data_point):
"""Sets the data_point of this TableWidgetColumn.
:param data_point: The data_point of this TableWidgetColumn. # noqa: E501
:type: TableWidgetDataPoint
"""
if data_point is None:
raise ValueError("Invalid value for `data_point`, must not be `None`") # noqa: E501
self._data_point = data_point
@property
def column_name(self):
"""Gets the column_name of this TableWidgetColumn. # noqa: E501
:return: The column_name of this TableWidgetColumn. # noqa: E501
:rtype: str
"""
return self._column_name
@column_name.setter
def column_name(self, column_name):
"""Sets the column_name of this TableWidgetColumn.
:param column_name: The column_name of this TableWidgetColumn. # noqa: E501
:type: str
"""
if column_name is None:
raise ValueError("Invalid value for `column_name`, must not be `None`") # noqa: E501
self._column_name = column_name
@property
def enable_forecast(self):
"""Gets the enable_forecast of this TableWidgetColumn. # noqa: E501
:return: The enable_forecast of this TableWidgetColumn. # noqa: E501
:rtype: bool
"""
return self._enable_forecast
@enable_forecast.setter
def enable_forecast(self, enable_forecast):
"""Sets the enable_forecast of this TableWidgetColumn.
:param enable_forecast: The enable_forecast of this TableWidgetColumn. # noqa: E501
:type: bool
"""
self._enable_forecast = enable_forecast
@property
def rounding_decimal(self):
"""Gets the rounding_decimal of this TableWidgetColumn. # noqa: E501
:return: The rounding_decimal of this TableWidgetColumn. # noqa: E501
:rtype: int
"""
return self._rounding_decimal
@rounding_decimal.setter
def rounding_decimal(self, rounding_decimal):
"""Sets the rounding_decimal of this TableWidgetColumn.
:param rounding_decimal: The rounding_decimal of this TableWidgetColumn. # noqa: E501
:type: int
"""
self._rounding_decimal = rounding_decimal
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
if issubclass(TableWidgetColumn, dict):
for key, value in self.items():
result[key] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, TableWidgetColumn):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
```
#### File: logicmonitor_sdk/models/table_widget_forecast_configuration.py
```python
import pprint
import re # noqa: F401
import six
class TableWidgetForecastConfiguration(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'severity': 'str',
'confidence': 'int',
'time_range': 'str',
'algorithm': 'str'
}
attribute_map = {
'severity': 'severity',
'confidence': 'confidence',
'time_range': 'timeRange',
'algorithm': 'algorithm'
}
def __init__(self, severity=None, confidence=None, time_range=None, algorithm=None): # noqa: E501
"""TableWidgetForecastConfiguration - a model defined in Swagger""" # noqa: E501
self._severity = None
self._confidence = None
self._time_range = None
self._algorithm = None
self.discriminator = None
if severity is not None:
self.severity = severity
if confidence is not None:
self.confidence = confidence
if time_range is not None:
self.time_range = time_range
if algorithm is not None:
self.algorithm = algorithm
@property
def severity(self):
"""Gets the severity of this TableWidgetForecastConfiguration. # noqa: E501
The minimum alert severity the forecasting should include, one of warn | error | critical # noqa: E501
:return: The severity of this TableWidgetForecastConfiguration. # noqa: E501
:rtype: str
"""
return self._severity
@severity.setter
def severity(self, severity):
"""Sets the severity of this TableWidgetForecastConfiguration.
The minimum alert severity the forecasting should include, one of warn | error | critical # noqa: E501
:param severity: The severity of this TableWidgetForecastConfiguration. # noqa: E501
:type: str
"""
self._severity = severity
@property
def confidence(self):
"""Gets the confidence of this TableWidgetForecastConfiguration. # noqa: E501
The percent confidence that should be required for a forecasted alert. # noqa: E501
:return: The confidence of this TableWidgetForecastConfiguration. # noqa: E501
:rtype: int
"""
return self._confidence
@confidence.setter
def confidence(self, confidence):
"""Sets the confidence of this TableWidgetForecastConfiguration.
The percent confidence that should be required for a forecasted alert. # noqa: E501
:param confidence: The confidence of this TableWidgetForecastConfiguration. # noqa: E501
:type: int
"""
self._confidence = confidence
@property
def time_range(self):
"""Gets the time_range of this TableWidgetForecastConfiguration. # noqa: E501
The training data time range (the data on which forecasting is calculated). Options are Last 7 days, Last 14 days, Last 30 days, Last calendar month, Last 365 days or a custom time range # noqa: E501
:return: The time_range of this TableWidgetForecastConfiguration. # noqa: E501
:rtype: str
"""
return self._time_range
@time_range.setter
def time_range(self, time_range):
"""Sets the time_range of this TableWidgetForecastConfiguration.
The training data time range (the data on which forecasting is calculated). Options are Last 7 days, Last 14 days, Last 30 days, Last calendar month, Last 365 days or a custom time range # noqa: E501
:param time_range: The time_range of this TableWidgetForecastConfiguration. # noqa: E501
:type: str
"""
self._time_range = time_range
@property
def algorithm(self):
"""Gets the algorithm of this TableWidgetForecastConfiguration. # noqa: E501
Forecast method for the widget :Linear | ARIMA # noqa: E501
:return: The algorithm of this TableWidgetForecastConfiguration. # noqa: E501
:rtype: str
"""
return self._algorithm
@algorithm.setter
def algorithm(self, algorithm):
"""Sets the algorithm of this TableWidgetForecastConfiguration.
Forecast method for the widget :Linear | ARIMA # noqa: E501
:param algorithm: The algorithm of this TableWidgetForecastConfiguration. # noqa: E501
:type: str
"""
self._algorithm = algorithm
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
if issubclass(TableWidgetForecastConfiguration, dict):
for key, value in self.items():
result[key] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, TableWidgetForecastConfiguration):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
```
|
{
"source": "jeremythai01/End-to-End-ML-Tutorial",
"score": 3
}
|
#### File: services/data_prep/text_preprocessor.py
```python
import re
from nltk.corpus import stopwords
from nltk.stem import PorterStemmer
from nltk.stem import WordNetLemmatizer
from nltk.tokenize import RegexpTokenizer
class TextPreprocessor():
"""Text preprocessor class to clean Reddit comments and prepare it for
NLP model prediction.
"""
def __init__(self):
self.__stemmer = PorterStemmer()
self.__lemmatizer = WordNetLemmatizer()
self.__tokenizer = RegexpTokenizer(r'\w+')
self.__stopwords = stopwords.words('english')
def preprocess_text(self, text: str):
"""Preprocess comments with different techniques to transform them
into a more predictable form for the model.
Parameters
----------
text : string
The comment to be preprocessed.
Returns
-------
text : preprocessed text
"""
# Lowercasing, removing digits and non alphabetic characters
text = str(text).lower().replace('{html}',"")
cleanr = re.compile('<.*?>')
clean_text = re.sub(cleanr, '', text)
rem_url = re.sub(r'http\S+', '', clean_text)
rem_num = re.sub('[0-9]+', '', rem_url)
#Tokenization
tokens = self.__tokenizer.tokenize(rem_num)
#Removing stop words
filtered_words = [w for w in tokens if not w in self.__stopwords]
#Stemming
stem_words=[self.__stemmer.stem(w) for w in filtered_words]
#Lemming
lemma_words=[self.__lemmatizer.lemmatize(w) for w in stem_words]
clean_text = " ".join(lemma_words)
return clean_text
```
#### File: services/data_scrape/reddit_bot.py
```python
from decouple import config
from praw import Reddit
from praw.reddit import Comment
from typing import Any, List
class RedditBot:
"""Reddit bot singleton to scrape Reddit data from subreddits."""
_instance = None
@staticmethod
def getInstance():
"""Static access method."""
if RedditBot._instance == None:
RedditBot()
return RedditBot._instance
def __init__(self):
"""Virtually private constructor."""
if RedditBot._instance != None:
raise Exception("This class is a singleton!")
else:
RedditBot._instance = self
RedditBot._instance._bot = self._create_reddit_bot()
def _create_reddit_bot(self):
"""Create Reddit bot instance.
Returns
-------
reddit_bot : new reddit bot instance
"""
reddit_bot = Reddit(username=config('REDDIT_USERNAME'),
password=config('<PASSWORD>'),
client_id=config('REDDIT_CLIENT_ID'),
client_secret=config('REDDIT_CLIENT_SECRET'),
user_agent=config('REDDIT_USER_AGENT'))
return reddit_bot
def _extract_comment_info(self, comment: Comment):
"""Extract the needed info for reddit comments.
Parameters
----------
comment : Comment object
The comment used to extract the info.
Returns
-------
comment_info : dict of the comment info
"""
comment_info = {
'post': str(comment.submission.title),
'author': str(comment.author.name),
'author_comment_karma': str(comment.author.comment_karma),
'author_link_karma': str(comment.author.link_karma),
'author_is_mod': str(comment.author.is_mod),
'author_is_gold': str(comment.author.is_gold),
'comment_id': str(comment.id),
'body': str(comment.body),
'score': str(comment.score),
'date': str(comment.created_utc)
}
return comment_info
def _check_comment_constraints(self, comment: Any):
# Filter comments without author or enough votes
MIN_SCORE = 10
return (type(comment) == Comment and
comment.author != None and
comment.score >= MIN_SCORE)
def scrape_reddit(self, subreddit: str, limit_posts: int):
"""Scrape Reddit comments from specified submissions.
Parameters
----------
posts : list of Submission object
The posts used to scrape data.
Returns
-------
comments_info : list of scraped Reddit comments info
"""
posts = self._instance._bot.subreddit(subreddit).hot(limit=limit_posts)
comments_info = []
i_c = 0
for post in posts:
# Include comments from the "load more comments" section
post.comments.replace_more(limit=0)
filtered_comments = list(filter(self._check_comment_constraints, post.comments.list()))
for comment in filtered_comments:
try:
c_info = self._extract_comment_info(comment)
except AttributeError:
continue
i_c += 1
comments_info.append(c_info)
print("1 post done")
print(f'Scraped {i_c} comments')
return comments_info
```
#### File: services/data_vis/helpers.py
```python
def get_sentiment_select_query():
return '''
SELECT s.score, c.date
FROM Reddit.Comment c INNER JOIN Reddit.Sentiment s ON c.id = s.idComment
ORDER BY c.date;
'''
```
#### File: tests/unit/test_sentiment_analyzer.py
```python
import unittest
import pandas as pd
from src.services.ml_predict.sentiment_analyzer import SentimentAnalyzer
class TestSentimentAnalyzer(unittest.TestCase):
def setUp(self):
self.model = SentimentAnalyzer()
def test_predict_sentiment(self):
df = pd.DataFrame()
text = ["This is a test sentence", "another one"]
df['body'] = text
result_df = self.model.predict_sentiment(df)
scores = result_df['score'].tolist()
self.assertFalse('body' in result_df.columns)
self.assertTrue('score' in result_df.columns)
self.assertTrue(str(score)[::-1].find('.') > 2 for score in scores)
```
|
{
"source": "jeremythai01/Stock-Market-Sentiment-Real-Time-Visualization",
"score": 3
}
|
#### File: api/etl/stream_handler.py
```python
from etl.database_connection import DBConnectionSingleton
class StreamHandler():
"""Stream handler to perform data retrieval and streaming with the
database.
"""
def __init__(self):
self.__db_connection = DBConnectionSingleton.getInstance()
def stream_to_database(self, df):
"""Stream transformed data to database.
Parameters
----------
df : Dataframe
The dataframe of transformed Reddit comments
"""
i_c = 0
for i in range(len(df.axes[0])): # Iterate row by row
try:
to_add = [df['subreddit'][i],
df['author'][i],
df['text'][i],
df['date'][i],
float(df['sentiment'][i])
]
insert_query = """
INSERT IGNORE INTO Comment
VALUES (DEFAULT, %s, %s, %s, %s, %s)
"""
self.__db_connection.query(insert_query, to_add)
self.__db_connection.commit()
i_c += 1
except AttributeError:
continue
print(f'Streamed {i_c} comments')
def retrieve_data(self, size):
"""Retrieve specified data from database.
Parameters
----------
size : integer
The max size of row to be retrieved.
Returns
-------
sentiment_data : list of tuples of sentiment scores and dates
"""
insert_query = "SELECT date, sentiment FROM Comment ORDER BY date DESC LIMIT " + str(size)
self.__db_connection.query(insert_query)
sentiment_data = self.__db_connection.fetchall()
return sentiment_data
def serialize(self, row):
"""Seralize specified row fetched from database.
Parameters
----------
row : tuple
The sentiment score and date.
Returns
-------
sentiment_data_serialized : dict of sentiment score and date
"""
sentiment_data_serialized = {'date' : row[0], 'sentiment' : row[1]}
return sentiment_data_serialized
def close_db_connection(self):
"""Close connection with the database."""
self.__db_connection.close()
```
#### File: ml/tests/test_model.py
```python
import pandas as pd
from ml import model
def test_predict_sentiment():
ml_model = model.SentimentAnalysisModel()
df = pd.DataFrame()
text = ["This is a test sentence", "another one"]
df['text'] = text
result_df = ml_model.predict_sentiment(df)
assert [str(sentiment)[::-1].find('.') > 2 for text, sentiment in result_df.items()]
```
#### File: module/dash/dash_app.py
```python
import dash
import pandas as pd
import plotly
import requests
import time
import dash_core_components as dcc
import dash_html_components as html
import plotly.graph_objs as go
from dash.dependencies import Input, Output
from decouple import config
app = dash.Dash(__name__)
app.layout = html.Div(
[ html.H2('Live Reddit Stock Sentiment'),
dcc.Graph(id='live-graph', animate=True),
dcc.Interval(
id='graph-update',
interval=1*20000, # 1 second = 1000
n_intervals= 0
)
]
)
@app.callback(Output('live-graph', 'figure'),
[Input('graph-update', 'n_intervals')])
def update_graph_scatter(n_intervals):
response_stream = requests.post(f"{config('REST_API_URL')}stream")
time.sleep(30)
response_comments = requests.get(f"{config('REST_API_URL')}comments")
comments = pd.DataFrame(response_comments.json())
comments.sort_values('date', inplace=True)
comments['sentiment'] = comments['sentiment'].rolling(int(len(comments)/5)).mean()
X = comments.date.values[-100:]
Y = comments.sentiment.values[-100:]
data = plotly.graph_objs.Scatter(
x=list(X),
y=list(Y),
name='Scatter',
mode= 'lines+markers'
)
return {'data': [data],'layout' : go.Layout(xaxis=dict(range=[min(X),max(X)]),
yaxis=dict(range=[min(Y),max(Y)]),)}
if __name__ == '__main__':
app.run_server(host=config('DASH_HOST'))
```
|
{
"source": "jeremytiki/blurple.py",
"score": 3
}
|
#### File: blurple/ext/router.py
```python
import typing as t
from discord.ext import commands
class Router:
""" Create a router, connected to a bot instance to allow route-based command registry.
:Example Usage:
.. code-block:: python
bot = commands.Bot()
router = Router(bot)
@router.route(["cmd", "subcmd"])
async def subcmd(ctx):
pass
"""
def __init__(self, bot: commands.Bot):
self.bot = bot
def route(self, command: list, **kwargs):
""" A shortcut decorator that registers a command route.
:param list[str] command: A list of strings defining the route to the command.
:param **kwargs: Any command attributes to pass on, such as aliases.
"""
def deco(func):
return self.get_command_group(command, func, **kwargs)
return deco
def get_command_group(self, path: list, func: t.Optional[t.Callable] = None, **kwargs) -> commands.Group:
# Return self.bot if path is the root
if len(path) == 0:
return self.bot
# Try and find group
group: commands.Group = self.bot.get_command(" ".join(path))
# If it doesn't exist, create it group
if group is None or func:
# Get the parent group
parent = self.get_command_group(path[:-1])
existing_subcommands = []
# Create the func if it doesn't exist
if func is None:
async def func(ctx):
raise commands.CommandNotFound(f'Command "{path[-1]}" is not found')
# Remove the current group it already exists
elif group is not None:
existing_subcommands = group.commands
parent.remove_command(group.name)
# Create the group
group = commands.Group(func, name=path[-1], invoke_without_command=True, **kwargs)
# Add existing subcommands
for cmd in existing_subcommands:
group.add_command(cmd)
# Register command
parent.add_command(group)
return group
```
#### File: blurple/io/reaction.py
```python
import discord
import blurple.io as io
class ReactionAddBasic(io.Reply):
"""An unopinionated, lower level class to wait for a user to add a reaction."""
event = "raw_reaction_add"
async def on_reply_init(self, message: discord.Message):
"""Sepcialized to pass message object."""
self.message = message
def reply_check(self, payload: discord.RawReactionActionEvent):
"""Specialized to check if the reaction and payload message is valid."""
if payload.message_id == self.message.id and not payload.user_id == self.ctx.me.id:
if self._iscontainer(self.validate):
return str(payload.emoji) in self.validate
return True
class ReactionRemoveBasic(ReactionAddBasic):
"""An unopinionated, lower level class to wait for a user to remove a reaction."""
event = "raw_reaction_remove"
class ReactionAddReply(ReactionAddBasic):
""" Ask for the user's reaction reply.
:Example Usage:
.. code-block:: python
reply = await io.ReactionAddBasic(ctx, validate=["✅", "❎"]).result()
"""
async def on_reply_init(self, message: discord.Message):
"""Specialized to add vaild reaction emojis to message, if validation is on."""
await super().on_reply_init(message)
if self._iscontainer(self.validate):
for react in self.validate:
await self.message.add_reaction(react)
def reply_check(self, payload: discord.RawReactionActionEvent):
"""Specialized to check if payload user and message are valid."""
return payload.user_id == self.ctx.author.id and \
payload.message_id == self.message.id
async def on_reply_attempt(self, payload: discord.RawReactionActionEvent):
"""Specialized to remove the user's reaction."""
await self.message.remove_reaction(payload.emoji, self.ctx.bot.get_user(payload.user_id))
return payload
async def on_reply_complete(self):
"""Specialized to clear all reactions off the message."""
await self.message.clear_reactions()
```
#### File: blurple/io/reply.py
```python
from __future__ import annotations
import re
import inspect
import typing as t
from abc import ABC
import discord
from discord.ext import commands
import asyncio
import blurple.ui as ui
class Reply(ABC):
""" An abstract class for getting replies, to be extended.
If you are trying to get a reply from the user directly, you may be looking for :class:`MessageReply` or :class:`ReactionAddReply`.
:Extending this class::
In order to extend this class, there are 5 methods you can specialize.
- :func:`on_reply_init` Use this method to initialize variables at the start.
- :func:`on_pre_reply` Use this method to prepare anything before reply attempts.
- :func:`reply_check` This is required. Evaluate whether an event call is considered a user reply attempt.
- :func:`on_reply_attempt` Use this method to handle resetting the state after a reply attempt.
- :func:`on_reply_complete` Use this method to handle final cleanup.
:param ctx: The :class:`~commands.Context` variable
:param validate: An optional parameter to validate the reply.
- If left blank, no validation will be performed.
- If you pass a :class:`list` / :class:`set`, validation will succeed when the reply content is found inside the list/set.
- If you pass a :class:`str`, validation will succeed when the reply content matches the string as a regex.
- If you pass a :class:`function` or :class:`coroutine`, the function will be called, and the coroutine awaited, validation will succeed when the function returns a Truthy value. The reply object will be passed as a parameter.
:param error: An optional parameter specifying the message to send when the user fails validation, defaults to a simple "Invalid Reply" :class:`~Alert`.
"""
def __init__(self,
ctx: commands.Context,
*,
validate: t.Optional[t.Union[str, t.Callable, t.List]] = None,
on_error: t.Union[str, discord.Embed] = ui.Alert(ui.Style.DANGER, title="Invalid Reply"),
timeout = 180,
**kwargs) -> None:
self.ctx = ctx
self.validate = validate
self.on_error = on_error
self.error = None
self.timeout = timeout
self.kwargs = kwargs
def __str__(self):
return self.__class__.__name__
def __repr__(self):
return f"<{self.__class__.__name__} for '{self.event}'>"
async def result(self):
"""Await the result of the reply."""
await self.on_reply_init(**self.kwargs) # Event method
reply = await self._get_valid_reply()
await self._cleanup() # Event method
return reply
async def _cleanup(self):
"""Clean up reply after result."""
await self.on_reply_complete()
await self._delete_error()
async def _get_valid_reply(self):
"""Wrap get_reply with validation, error handling, and recursive calls."""
reply = await self._get_reply()
if reply is not None: # Reply hasn't timed out
# Validate reply
is_valid = await self._validate_reply(reply, self.validate)
# If reply isn't valid, recursively call function
if not is_valid:
await self._send_error()
return await self._get_valid_reply()
return reply
async def _get_reply(self):
"""Get a reply from the user, no validation."""
await self.on_pre_reply() # Event method
# Wait for reply
try:
raw_reply = await self.ctx.bot.wait_for(
self.event,
check=self.reply_check,
timeout=self.timeout
)
except asyncio.TimeoutError:
reply = None
else:
r = await self.on_reply_attempt(raw_reply) # Event method
reply = r if r else raw_reply
return reply
async def _send_error(self) -> discord.Message:
""" Send an error message to the user.
Will replace the current error message.
:param error: An embed or a string representing the error message.
"""
await self._delete_error()
if isinstance(self.on_error, discord.Embed):
self.error = await self.ctx.send(embed=self.on_error)
elif isinstance(self.on_error, str):
self.error = await self.ctx.send(self.on_error)
return self.error
async def _delete_error(self) -> None:
"""Delete the current error message, if it exists."""
if self.error is None:
return
await self.error.delete()
self.error = None
@classmethod
async def _validate_reply(cls, reply, valid: t.Union[str, t.Container, t.Callable]) -> bool:
"""Detect validation type and check it against the reply."""
if valid is None:
return True
content = cls._get_reply_content(reply)
if isinstance(valid, str):
return bool(re.search(valid, content))
if cls._iscontainer(valid):
return content in valid
if callable(valid):
if inspect.iscoroutinefunction(object):
return await valid(reply)
return valid(reply)
@staticmethod
def _get_reply_content(reply):
""" Retrieve the content of the reply."""
if isinstance(reply, discord.Message):
return reply.content
if isinstance(reply, (discord.Reaction, discord.RawReactionActionEvent)):
return str(reply.emoji)
@staticmethod
def _iscontainer(obj: t.Union[t.Container, t.Any]):
return getattr(obj, "__contains__", False)
@classmethod
async def result_between(cls, replies: t.Container[Reply]) -> t.Tuple[Reply, t.Any]:
""" Return the first completed result between multiple reply objects.
:param replies: A collection of Reply objects.
:returns: A tuple containing the Reply object and the result it returned.
:How to use this:
This can be an especially powerful function if used correctly.
Here's an example of an rsvp list interaction with reactions using this function.
This is completely contrived for example and not a practical use.
.. code-block:: python
rsvp_react = "..." # Replace this with whatever you want
rsvp_list = []
# Start the reply wait
message = await ctx.send("React to RSVP!")
await message.add_reaction(rsvp_react)
add = io.ReactionAddBasic(message, validate=[rsvp_react])
remove = io.ReactionRemoveBasic(message, validate=[rsvp_react])
while True:
obj, result = io.Reply.result_between({add, remove})
if obj is add:
rsvp_list.append(result.user_id)
elif obj is remove:
rsvp_list.remove(result.user_id)
else: # obj is None (The reply timed out)
break
# Reply wait complete
await message.clear_reactions()
await message.edit(f"Here's the list of RSVPrs:\\n{'\\n'.join([f'> <@{user_id}>' for user_id in rsvp_list])}")
"""
# Prepare tasks
timeouts = []
def parse_task(reply: Reply):
# Handle timeout
timeouts.append(reply.timeout)
reply.timeout = None
# Return task
return asyncio.create_task(reply.result(), name=reply)
# Wait tasks
tasks = [parse_task(task) for task in replies]
task, result = await cls._wait_tasks(tasks, timeout=min(timeouts))
# Get original reply object
for obj in replies:
if task is None:
obj = None
break
if str(obj) == task.get_name():
break
# Run cleanup on cancelled replies
replies.remove(obj)
for cancelled in replies:
await cancelled._cleanup()
# Return original reply object and the result
return obj, result
@staticmethod
async def _wait_tasks(tasks: t.Container[asyncio.Task], timeout: int) -> t.Tuple[t.Optional[asyncio.Future], t.Optional[t.Any]]:
""" Try block to asyncio.wait a set of tasks with timeout handling.
:param tasks: A collection of task objects
:param timeout: How long in seconds to wait until a timeout occurs.
:return: A tuple containing the task and the result. Both will be None if a timeout occurs.
"""
done, pending = await asyncio.wait(tasks, timeout=timeout, return_when=asyncio.FIRST_COMPLETED)
for rest in pending:
rest.cancel()
if done:
task: asyncio.Future = done.pop()
return task, task.result()
return None, None
async def on_reply_init(self):
""" An abstract method, to be extended in custom Reply listeners.
This method runs when the Reply class is created.
"""
async def on_pre_reply(self):
""" An abstract method, to be extended in custom Reply listeners.
This method runs before each reply attempt, can run multiple times with validation.
"""
def reply_check(self, reply):
""" An abstract method, to be extended in custom Reply listeners.
This method runs as a check to determine whether to recognize the reply event, can run multiple times with validation.
"""
async def on_reply_attempt(self, reply):
""" An abstract method, to be extended in custom Reply listeners.
This method runs after each reply attempt, can run multiple times with validation.
:return: You can optionally return a parsed version of the reply to be used instead of the raw reply object.
"""
async def on_reply_complete(self):
""" An abstract method, to be extended in custom Reply listeners.
This method runs after a valid reply is returned.
"""
```
#### File: blurple/ui/alert.py
```python
import discord
import blurple.ui as ui
class Alert(ui.Base):
""" A subclass of :class:`discord.Embed` for stylish alert messages.
:param Style style: The style of the alert.
:param str title: The title of the alert, will be wrapped in emoji and alert name unless specified in options.
:param str description: An optional description of the alert, use your imagination for it's use.
:param **options: Alert options to customize it's look.
:emoji: Defaults to :class:`True`. Can be set to false to remove the emoji from the alert title.
This will automatically be removed if a custom style specifies it as an empty string.
:name: Defaults to :class:`True`. Can be set to false to remove the name of the alert from the title.
This will automatically be removed if a custom style specifies it as an empty string.
"""
def __init__(self, style: ui.Style, title: str, description: str = discord.Embed.Empty, **options):
super().__init__(
color=style[0],
title=self.process_title(style, title, **options),
description=description
)
@staticmethod
def process_title(style: ui.Style, title: str, **options):
output: str = ''
if options.get("emoji") is not False and style[1]:
output += style[1] + " "
if (name := options.get("name", style[2])) is not False and name:
output += f"`{name}:` "
return output + f"**{title}**"
```
#### File: blurple/ui/base.py
```python
from abc import ABC
import discord
class Base(discord.Embed, ABC):
async def send(self, client: discord.abc.Messageable):
""" Send the component as a message in discord.
:param client: The client used, usually a :class:`discord.abc.Messageable`. Must have implemented :func:`.send`
:returns: :class:`discord.Message`
"""
return await client.send(embed=self)
```
#### File: blurple/ui/style.py
```python
from enum import Enum
class Style(Enum):
""" A contextual class to style components.
There are 9 `main styles <https://cdn.discordapp.com/attachments/598870131182927873/814573079371448330/unknown.png>`_.
- ``PRIMARY``
- ``SECONDARY``
- ``SUCCESS``
- ``DANGER``
- ``WARNING``
- ``INFO``
- ``LIGHT``
- ``DARK``
- ``GHOST``
You can also create a `custom style <https://cdn.discordapp.com/attachments/598870131182927873/811087938678685707/unknown.png>`_:
:Example Usage:
.. code-block:: python
grape_style = (0x9266CC, "\\U0001f347", "Grape")
``PRIMARY`` and ``SECONDARY`` styles use custom emoji, so are unable to be used out of the box.
To work around this, I've provided the source .svgs in the repository for the custom emojis used throughout the project. you can add these to a server that your bot is in, then create a custom style.
Alternatively, if you want, you can support me on `ko-fi <https://ko-fi.com/s/7705c20532>`_, and I'll invite your bot to my server with the original custom emojis.
"""
PRIMARY = (0x7289DA, "<:primary:808874731763007488>", "Primary")
SECONDARY = (0x99AAB5, "<:secondary:808874731758813205>", "Secondary")
SUCCESS = (0x77B255, "\u2705", "Success")
DANGER = (0xDD2E44, "\U0001f6ab", "Danger")
WARNING = (0xFFCC4D, "\u26a0\ufe0f", "Warning")
INFO = (0x3B88C3, "\u2139\ufe0f", "Info")
LIGHT = (0xE6E7E8, "\U0001f533", "Light")
DARK = (0x31373D, "\U0001f532", "Dark")
GHOST = (0x2f3136, "", "")
def __getitem__(self, key):
return self.value[key]
```
|
{
"source": "JeremyTrendoff81/Photo-Editing-Software-Using-Cimpl",
"score": 4
}
|
#### File: JeremyTrendoff81/Photo-Editing-Software-Using-Cimpl/T16_image_filters.py
```python
from Cimpl import choose_file, create_color, create_image, get_color,\
set_color, show, copy, save_as, load_image, get_width,\
get_height, Image
# Functions provided from the school for use
def grayscale(image: Image) -> Image:
"""
PROVIDED BY ECOR1051 STAFF FOR USE
Return a grayscale copy of image.
>>> image = load_image(choose_file())
>>> gray_image = grayscale(image)
>>> show(gray_image)
"""
new_image = copy(image)
for x, y, (r, g, b) in image:
# Use the pixel's brightness as the value of RGB components for the
# shade of gray. These means that the pixel's original colour and the
# corresponding gray shade will have approximately the same brightness.
brightness = (r + g + b) // 3
# or, brightness = (r + g + b) / 3
# create_color will convert an argument of type float to an int
gray = create_color(brightness, brightness, brightness)
set_color(new_image, x, y, gray)
return new_image
# Personalized functions for more functionality
def get_individual_colors(pict: Image, x: int, y: int, rgb: str) -> int:
"""
Author: <NAME>
Returns the individual red, green or blue value of the given pixel
in the given image.
Takes an image, two integers and a string as parameters.
The Image parameter represents the image being analyzed.
The two ints are the x andy coordinates and the string represents which
color to extract.
'r' for red, 'b' for blue and 'g' for green.
>>> test = get_individual_colors(load_image('red_image.jpg'), 1, 1, 'r')
>>> print(test)
155
>>> test = get_individual_colors(load_image('red_image.jpg'), 1, 1, 'g')
>>> print(test)
0
>>> test = get_individual_colors(load_image('red_image.jpg'), 1, 1, 'b')
>>> print(test)
0
"""
r, g, b = get_color(pict, x, y)
if (rgb == 'r'):
return r
elif (rgb == 'b'):
return b
elif (rgb == 'g'):
return g
def adjust_component(color: int) -> int:
"""
Returns the centre value of preset intervals of rgb colors.
to be used in conjunction with the posterize filter.
>>>adjust_component(61)
31
>>>adjust_component(100)
95
>>>adjust_component(150)
159
>>>adjust_component(200)
223
"""
if 0 <= color <= 63:
return(31)
if 64 <= color <= 127:
return(95)
if 128 <= color <= 191:
return(159)
if 192 <= color <= 255:
return(223)
def tone_filter_color_assigner(color_list: list) -> list:
"""
Author: <NAME>
Returns the RGB values of the colors selected in the
two, three tone filters.
Accepts a list of colors inputed by the user and assigns the related RGB
value to the coorisponding position in a list.
>>> tone_filter_color_assigner(['red', 'blue'])
[(255,0,0), (0,255,0)]
"""
# Variables
colors = ['black', 'white', 'red', 'lime', 'blue', 'yellow', 'cyan',
'magenta', 'gray']
rgbs = [(0, 0, 0), (255, 255, 255), (255, 0, 0), (0, 255, 0), (0, 0, 255),
(255, 255, 0), (0, 255, 255), (255, 0, 255), (128, 128, 128)]
return_list = []
# Color Processing
for color in color_list:
for i in range(len(colors)):
if (color == colors[i]):
return_list.append(rgbs[i])
return return_list
# Red Channel
def red_channel(image: Image) -> Image:
"""
Author: <NAME>
Returns a new image where the color of each pixel has been changed to
just the red value.
Takes an Image parameter. This Image represents the image tht will
be passed through the filter.
>>> image = load_image(choose_file())
>>> red = red_channel(image)
>>> show(red)
returns image
or
>>> show(red_channel(load_image(choose_file())))
returns image
"""
new_image = copy(image) # Creates a copy of the image to filter
for x, y, (r, g, b) in image:
# Creates the new color of the pixel
filtered_red = create_color(r, 0, 0)
# Sets the new color to the pixel
set_color(new_image, x, y, filtered_red)
return new_image
# Green Channel
def green_channel(image: Image) -> Image:
"""
Author: <NAME>
Returns a new image where the color of each pixel has been changed to
just the green value.
Takes an Image parameter. This Image represents the image tht will be passed
through the filter.
>>> image = load_image(choose_file())
>>> green = green_channel(image)
>>> show(green)
returns image
or
>>> show(green_channel(load_image(choose_file())))
returns image
"""
new_image = copy(image) # Creates a copy of the image to filter
for x, y, (r, g, b) in image:
# Creates the new color of the pixel
filtered_green = create_color(0, g, 0)
# Sets the new color to the pixel
set_color(new_image, x, y, filtered_green)
return new_image
# Blue Channel
def blue_channel(image: Image) -> Image:
"""
Author: <NAME>
Returns a new image where the colour of each pixel has been changed to only
have its blue value, when given a image.
>>> image = load_image(choose_file())
>>> blue = blue_channel(image)
>>> show(blue)
returns image
or
>>> show(blue_channel(load_image(choose_file())))
returns image
"""
new_image = copy(image) # Creates a copy of the image to filter
for x, y, (r, g, b) in image:
# Creates the new color of the pixel
filtered_blue = create_color(0, 0, b)
# Sets the new color to the pixel
set_color(new_image, x, y, filtered_blue)
return new_image
# Combine
def combine(red: Image, green: Image, blue: Image) -> Image:
"""
Author: <NAME>
Retrun and image called combined_image that contains the combination
of the 3 channels.
>>>combine(load_image("red_image.jpg"),load_image("green_image.jpg"), \
load_image("blue_image.jpg")))
returns full color image called combined_image
"""
combined_image = copy(red) # Copies the image
for x, y, (r, g, b) in red:
# Assigns the RGB values to variables
red_pix = r
green_pix = get_individual_colors(green, x, y, 'g')
blue_pix = get_individual_colors(blue, x, y, 'b')
# Sets the color of the new image
set_color(combined_image, x, y, create_color(
red_pix, green_pix, blue_pix))
save_as(combined_image, 'combined_image.png') # Saves the image
return combined_image
# Two-tone
def two_tone(image: Image, color_1: str, color_2: str) -> Image:
"""
Author: <NAME>
Note: To use this filter, the Cimpl file must be saved in the same
location as this filter.
Returns a two-tone version of the image.
Takes an Image parameter to represent the image and two strings to
represent the two colors.
>>> image = load_image(choose_file())
>>> new_image = two_tone(image)
>>> show(new_image)
Returns a three-toned image and displays it
or
>>> show(two_tone(load_image(choose_file())))
Returns three-toned image and displays it
"""
# Variables needed to execute the filters
color_values = tone_filter_color_assigner([color_1, color_2])
color_1_rgb = color_values[0]
color_2_rgb = color_values[1]
# Image Processing
new_image = copy(image) # Makes a copy of the image
for x, y, (r, g, b) in new_image:
avg = (r + g + b) // 3 # Calculates the average of the RGB values
# Chooses what RGB value to use for each pixel based on the average RGB
if (avg >= 0 and avg <= 127):
set_color(new_image, x, y,
create_color(color_1_rgb[0],
color_1_rgb[1], color_1_rgb[2]))
elif (avg >= 128 and avg <= 255):
set_color(new_image, x, y,
create_color(color_2_rgb[0],
color_2_rgb[1], color_2_rgb[2]))
return new_image # Returns the image
# Three-tone
def three_tone(image: Image, color_1: str, color_2: str, color_3: str) -> Image:
"""
Author: <NAME>
Note: To use this filter, the Cimple file must be saved in the same place
as the filter.
Returns a three-tone version of the image.
Takes an Image parameter to represent the image and three strings to
represent the three colors.
>>> image = load_image(choose_file())
>>> new_image = three_tone(image)
>>> show(new_image)
Returns a three-toned image and displays it
or
>>> show(three_tone(load_image(choose_file())))
Returns three-toned image and displays it
"""
# Variables needed to execute the filter
color_values = tone_filter_color_assigner([color_1, color_2, color_3])
color_1_rgb = color_values[0]
color_2_rgb = color_values[1]
color_3_rgb = color_values[2]
# Image Processing
new_image = copy(image) # Creates a copy of the image
for x, y, (r, g, b) in new_image:
avg = (r + g + b) // 3 # calculates the average color value
# Chooses which color to use for each pixel based on the average value
if (avg >= 0 and avg <= 84):
set_color(new_image, x, y,
create_color(color_1_rgb[0],
color_1_rgb[1], color_1_rgb[2]))
elif (avg >= 85 and avg <= 170):
set_color(new_image, x, y,
create_color(color_2_rgb[0],
color_2_rgb[1], color_2_rgb[2]))
elif (avg >= 171 and avg <= 255):
set_color(new_image, x, y,
create_color(color_3_rgb[0],
color_3_rgb[1], color_3_rgb[2]))
return new_image # Returns the image
# Sepia Tinting
def sepia_tinting(image: Image) -> Image:
"""
Author: <NAME>
Note: To use this filter, the Cimpl file must be saved in the same place as
this filter.
Returns a sepia tinted version of the image.
Takes an Image parameter to represent the image being tranformed.
>>> image = load_image(choose_file())
>>> new_image = sepia_tinting(image)
>>> show(new_image)
Returns sepia tinted image and displays it
or
>>> show(sepia_tinting(load_image(choose_file())))
Returns sepia tinted image and displays it
"""
new_image = copy(image) # Makes a copy of the image
new_image = grayscale(new_image) # Puts the image through the grayscale
# filter in order to then do sepia tinting
# Image Processing
for x, y, (r, g, b) in new_image:
avg = (r + g + b) // 3 # Calculates the average color value
# Selects the degree of change with the rgb values based on the average
# value
if (avg < 63):
col = create_color(r * 1.1, g, b * 0.9)
set_color(new_image, x, y, col)
elif (avg >= 63 and avg <= 191):
col = create_color(r * 1.15, g, b * 0.85)
set_color(new_image, x, y, col)
elif (avg > 191):
col = create_color(r * 1.08, g, b * 0.93)
set_color(new_image, x, y, col)
return new_image # returns the image
# Posterize
def posterize(image: Image) -> Image:
"""
Return an image with a smaller range of color than the original
aka posterized.
Requires adjust_component function.
>>>posterize(p2-original.png)
returns posterized image
"""
poster = copy(image) # Copies the image
# Image Processing
for x, y, (r, g, b) in image:
set_color(poster, x, y, create_color(adjust_component(r),
adjust_component(g),
adjust_component(b)))
return(poster)
# Extreme Contrast
def extreme_contrast(image: Image) -> Image:
"""
Author: <NAME>
Returns a extreme contrasted version of the image
>>> image = load_image(chose_file())
>>> new_image = extreme_contrast(image)
>>> show(new_image)
returns an image
"""
new_image = copy(image) # Makes a copy of the image
for x, y, (r, g, b) in image: # Extreme contrasts each pixel
if 0 <= r <= 127:
r = 0
else:
r = 255
if 0 <= g <= 127:
g = 0
else:
g = 255
if 0 <= b <= 127:
b = 0
else:
b = 255
# Creates the new pixels by combining rgb values
filtered = create_color(r, g, b)
set_color(new_image, x, y, filtered) # Creates filtered image
return new_image # Returns the image
# Edge Detection
def detect_edges(image: Image, threshold: int) -> Image:
"""
Author: <NAME>
Note: To use this filter, the Cimple file must be saved in th same place
as the filter.
Returns a version of the image that looks like a pencil sketch.
Takes an Image parameter to represent the image and an integer to represent
a threshold.
>>> image = load_image(choose_file())
>>> new_image = detect_edges(image)
>>> show(new_image)
Returns a pencil sketch image and displays it
or
>>> show(detect_edges(load_image(choose_file())))
Returns pencil sketch image and displays it
"""
height = get_height(image) # Gets the height of the image.
new_image = copy(image) # Copies the image
# Image Processing
for x, y, (r, g, b) in new_image:
brightness = (r + g + b) // 3 # Calculates the brightness
if ((y + 1) < height): # If there is a pixel below...
# Gets the color of the pixel below
color_below = get_color(new_image, x, y + 1)
# Calculates the brightness of the pixel below
brightness_below = (
color_below[0] + color_below[1] + color_below[2]) // 3
# Calculates the contrast of the two pixels
contrast = abs(brightness - brightness_below)
else: # If there is no pixel below...
# Set the pixel's color to white.
set_color(new_image, x, y, create_color(255, 255, 255))
contrast = 0 # Sets contrast to 0
# If the contrast is greater than the threshold...
if (contrast > threshold):
# Set the pixel's color to black
set_color(new_image, x, y, create_color(0, 0, 0))
else: # Otherwise...
# Set the pixel's color to white
set_color(new_image, x, y, create_color(255, 255, 255))
return new_image
# Improved Edge Detection
def detect_edges_better(image: Image, threshold: int) -> Image:
"""
Author: <NAME>
Note: To use this filter, the Cimple file must be saved in th same place
as the filter.
Returns a version of the image that looks like a pencil sketch.
Takes an Image parameter to represent the image and an integer to
represent a threshold.
>>> image = load_image(choose_file())
>>> new_image = detect_edges(image)
>>> show(new_image)
Returns a pencil sketch image and displays it
or
>>> show(detect_edges(load_image(choose_file())))
Returns pencil sketch image and displays it
"""
new_image = copy(image) # Copies the image
height = get_height(new_image) # Gets the height of the image
width = get_width(new_image) # Gets the width of the image
# Image Processing
for x, y, (r, g, b) in new_image:
brightness = (r + g + b) // 3 # Calculates the brightness
# Statemaents for pixel below
if ((y + 1) < height): # If there is a pixel below...
# Gets the color of the pixel below
color_below = get_color(new_image, x, y + 1)
# Calculates the brightness of the pixel below
brightness_below = (
color_below[0] + color_below[1] + color_below[2]) // 3
# Calculates the contrast of the two pixels
contrast_below = abs(brightness - brightness_below)
else: # If there is no pixel below...
# Set the pixel's color to white.
set_color(new_image, x, y, create_color(255, 255, 255))
contrast_below = 0 # Sets contrast to 0
# Statements for pixel to the right
if ((x + 1) < width): # If there is a pixel to the right...
# Gets the color of the pixel to the right
color_right = get_color(new_image, x + 1, y)
# Calculates the brightness of the pixel to the right
brightness_right = (
color_right[0] + color_right[1] + color_right[2]) // 3
# Calculates the contrast of the two pixels
contrast_right = abs(brightness - brightness_right)
else: # If there is no pixel to the right...
# Set the pixel's color to white.
set_color(new_image, x, y, create_color(255, 255, 255))
contrast_right = 0 # Sets contrast to 0
# Statements to change the colors based on contrast
# If the contrast is greater than the threshold...
if (contrast_below > threshold or contrast_right > threshold):
# Set the pixel's color to black
set_color(new_image, x, y, create_color(0, 0, 0))
else: # Otherwise...
# Set the pixel's color to white
set_color(new_image, x, y, create_color(255, 255, 255))
return new_image
# Flip Vertical
def flip_vertical(image: Image) -> Image:
"""
Author: <NAME>
Note: To use this filter, the Cimple file must be saved in th same
place as the filter.
Returns a vertically flipped version of the image.
The vertical flip switches the pixels across an invisble,
vertical line through the center of the image.
Takes an Image parameter to represent the image.
>>> image = load_image(choose_file())
>>> new_image = flip_vertical(image)
>>> show(new_image)
Returns a pencil sketch image and displays it
or
>>> show(flip_vertical(load_image(choose_file())))
Returns pencil sketch image and displays it
"""
new_image = copy(image) # Copies the image
height = get_height(new_image) # Gets the height
width = get_width(new_image) # Gets the width
# Image Processing
for x in range(width // 2):
for y in range(height):
# If the pixel opposite is in bounds...
if ((width - x) < width):
# The the color of the pixels
pixel_color = get_color(new_image, x, y)
other_pixel_color = get_color(new_image, width - x, y)
# Flip the color values
set_color(new_image, x, y,
create_color(other_pixel_color[0],
other_pixel_color[1],
other_pixel_color[2]))
set_color(new_image, width - x, y,
create_color(pixel_color[0],
pixel_color[1],
pixel_color[2]))
return new_image
# Horizontal Flip
def flip_horizontal(image: Image) -> Image:
"""
Author: <NAME>
Note: To use this filter, the Cimple file must be saved in th same place
as the filter.
Returns a horizontally flipped version of the image. The horizontal flip
switches the pixels across an invisble, horizontal line through
the center of the image.
Takes an Image parameter to represent the image.
>>> image = load_image(choose_file())
>>> new_image = flip_horizontal(image)
>>> show(new_image)
Returns a pencil sketch image and displays it
or
>>> show(flip_horizontal(load_image(choose_file())))
Returns pencil sketch image and displays it
"""
new_image = copy(image) # Copies the image
height = get_height(new_image) # Gets the height
width = get_width(new_image) # Gets the width
# Image Processing
for y in range(height // 2):
for x in range(width):
# If the pixel opposite is in bounds...
if ((height - y) < height):
# The the color of the pixels
pixel_color = get_color(new_image, x, y)
other_pixel_color = get_color(new_image, x, height - y)
# Flip the color values
set_color(new_image, x, y,
create_color(other_pixel_color[0],
other_pixel_color[1],
other_pixel_color[2]))
set_color(new_image, x, height - y,
create_color(pixel_color[0],
pixel_color[1], pixel_color[2]))
return new_image
# This is a test program script if you would like to test the filter code.
# If you choose to do this, please save the 'p2-original.jpg' image with this.
# This sved image method is used so that all the filters can run quickly.
# Except for the combine filter, select the red, green and blue images in P2
# to run that.
# show(red_channel(load_image('p2-original.jpg')))
# show(green_channel(load_image('p2-original.jpg')))
# show(blue_channel(load_image('p2-original.jpg')))
# show(combine(load_image(choose_file()),load_image(choose_file()),load_image(choose_file())))
# show(two_tone(load_image('p2-original.jpg'), 'black', 'white'))
# show(three_tone(load_image('p2-original.jpg'), 'black', 'white', 'red'))
# show(sepia_tinting(load_image('p2-original.jpg')))
# show(posterize(load_image('p2-original.jpg')))
# show(extreme_contrast(load_image('p2-original.jpg')))
# show(detect_edges(load_image('p2-original.jpg'), 10))
# show(detect_edges_better(load_image('p2-original.jpg'), 10))
# show(flip_vertical(load_image('p2-original.jpg')))
# show(flip_horizontal(load_image('p2-original.jpg')))
```
|
{
"source": "jeremytrips/django_base_project",
"score": 2
}
|
#### File: api/views/deleteacountview.py
```python
from rest_framework.authtoken.views import ObtainAuthToken
from rest_framework.authtoken.models import Token
from rest_framework.permissions import IsAuthenticated, AllowAny
from rest_framework.response import Response
from rest_framework.status import HTTP_200_OK, HTTP_204_NO_CONTENT, HTTP_201_CREATED, HTTP_206_PARTIAL_CONTENT
from rest_framework.views import APIView
from django.contrib.auth import get_user_model
from users.permissions import IsEmailVerfied, IsAccountVisible
class DeleteAccount(APIView):
"""
Manage the user deletion.
"""
permission_classes = [IsEmailVerfied, ]
def post(self, request):
user = request.user
user.is_active = False
user.save()
return Response(data=["DELETED"], status=HTTP_200_OK)
```
#### File: api/views/reportuser.py
```python
from rest_framework.views import APIView
from rest_framework.status import HTTP_201_CREATED,HTTP_400_BAD_REQUEST
from rest_framework.response import Response
from rest_framework.permissions import IsAuthenticated
from django.contrib.auth import get_user_model
from users.api.serializers.userreportserializer import UserReportSerializer
from users.models import ReportedUser
from users.permissions import IsActive, IsEmailVerfied
User = get_user_model()
class ReportUser(APIView):
"""
View used to report a user.
"""
permission_classes = [IsAuthenticated, IsActive, IsEmailVerfied]
def post(self, request):
ser = UserReportSerializer(data=request.data)
if ser.is_valid():
try:
user_reported = User.objects.get(pk=ser.validated_data["reported_user"])
except User.DoesNotExist:
return Response(data=["REPORTED_USER_DOES_NOT_EXIST"], status=HTTP_400_BAD_REQUEST)
if not user_reported.settings.is_email_verified:
return Response(data=["REPORTED_USER_NOT_EMAIL_VERIFIED"], status=HTTP_400_BAD_REQUEST)
if user_reported == request.user:
return Response(data=["SELF_REPORT_NOT_ALLOWED"], status=HTTP_400_BAD_REQUEST)
report = ReportedUser.objects.create(
user_report_description=ser.validated_data["reason"]
)
report.user_reported.add(user_reported)
report.user_reporting.add(request.user)
return Response(status=HTTP_201_CREATED)
else:
return Response(status=HTTP_400_BAD_REQUEST)
```
#### File: user_base/users/emailconfirmation.py
```python
from django.core.mail import send_mail
from django.conf import settings
from users.models import EmailVerificationToken
from django.contrib.auth import get_user_model
from threading import Thread
def send_confirmation_email(user, token):
t = Thread(target=_send_confirmation_email, args=(user, token.token))
t.start()
def _send_confirmation_email(user, token):
send_mail(
subject=settings.VERIFICATION_EMAIL_SUBJECT,
message=settings.VERIFICATION_EMAIL_CONTENT.format(first_name=user.first_name, token=token),
from_email=None,
recipient_list=[user.email,],
fail_silently=False
)
def verify_email(user, furnished_token):
try:
user_token = EmailVerificationToken.objects.get(user_owner=user)
except EmailVerificationToken.DoesNotExist:
return None, False
if user_token.token == furnished_token:
user_token.delete()
return user, True
else:
return None, False
```
#### File: user_base/users/models.py
```python
from django.contrib.auth.base_user import AbstractBaseUser
from django.contrib.auth.models import PermissionsMixin
from django.db import models
from inclusive_django_range_fields import InclusiveIntegerRangeField
from users.managers import CustomUserManager
class Settings(models.Model):
is_email_verified = models.BooleanField(default=False)
account_is_visible = models.BooleanField(default = True)
push_notification_new_relation = models.BooleanField(default=True)
mail_notification_new_relation = models.BooleanField(default=True)
push_notification_new_message = models.BooleanField(default=True)
mail_notification_new_message = models.BooleanField(default=True)
class CustomUser(AbstractBaseUser, PermissionsMixin):
email = models.EmailField(unique=True) # Adresse mail pour contacter et se connecter
noma = models.CharField(max_length=10) # Numero de matricule de l etudiant(e)
student_card = models.ImageField(upload_to="image/student_card/") # Carte etudiant pour verif mail/noma/nom et validite du compte
first_name = models.CharField(max_length=20,) # prenom de l etudiant(e)
last_name = models.CharField(max_length=20,) # nom de l etudiant(e)
studies = models.CharField(max_length=30,) # etudes de l etudiant(e)
home_address = models.CharField(max_length=30,) # lieu de residence hors campus
birth_date = models.DateTimeField(auto_now=True, blank=True) # date de naissance de l etudiant(e)
description = models.CharField(max_length=250, blank=True) # description visible pour les autres utilisateurs
age_lower_bound = models.PositiveSmallIntegerField(default=16) # limite inf d age pour partenaire
age_upper_bound = models.PositiveSmallIntegerField(default=100) # limite sup d age pour partenaire
profile_pic_one = models.ImageField(blank=True, default = "/static/no_img.png", upload_to="image/profile/") # premiere image de l'utilisateur
profile_pic_two = models.ImageField(blank=True, upload_to="image/profile/") # deuxieme image de l'utilisateur
profile_pic_three = models.ImageField(blank=True, upload_to="image/profile/") # troisieme image de l'utilisateur
settings = models.OneToOneField(Settings, on_delete=models.CASCADE)
is_admin = models.BooleanField(default=False)
is_staff = models.BooleanField(default=False)
is_active = models.BooleanField(default=True)
objects = CustomUserManager()
USERNAME_FIELD = 'email'
REQUIRED_FIELDS = []
def get_user_report(self):
print(rmodels.ReportedUser.objects.get(user_reported=self))
# todo
def save(self, *args, **kwargs):
return super(CustomUser, self).save(*args, **kwargs)
def __str__(self):
return self.email
class EmailVerificationToken(models.Model):
"""
Token a created when user register to the site. The token is then send to
the user for him to verify his email address
"""
user_owner = models.ForeignKey(CustomUser, related_name="user", on_delete=models.CASCADE)
token = models.CharField(max_length=6)
class ReportedUser(models.Model):
"""
Model that will store reported user.
"""
user_reporting = models.ManyToManyField(to=CustomUser, related_name="user_reporting")
user_reported = models.ManyToManyField(to=CustomUser, related_name="user_reported")
user_report_description = models.CharField(max_length = 250)
```
#### File: user_base/users/permissions.py
```python
from rest_framework.permissions import BasePermission
class IsEmailVerfied(BasePermission):
def has_permission(self, request, view):
"""
Check that user have verified his email adress
"""
return request.user.settings.is_email_verified
class IsAccountVisible(BasePermission):
def has_permission(self, request, view):
"""
Check if user is in ghost mode.
"""
return request.user.settings.account_is_visible
class IsActive(BasePermission):
def has_permission(self, request, view):
"""
Check that the user isactive boolean is true. That boolean is set to false if uesr delete his account.
"""
return request.user.is_active
```
|
{
"source": "JeremyTsaii/LeetHub",
"score": 4
}
|
#### File: JeremyTsaii/LeetHub/add-two-numbers.py
```python
class Solution(object):
def addTwoNumbers(self, l1, l2):
"""
:type l1: ListNode
:type l2: ListNode
:rtype: ListNode
"""
p1 = l1
p2 = l2
carry = 0
head = ListNode(0)
cur = head
while(p1 or p2):
val = carry
if p1:
val += p1.val
p1 = p1.next
if p2:
val += p2.val
p2 = p2.next
carry = val//10
val = val%10
cur.next = ListNode(val)
cur = cur.next
if carry:
cur.next = ListNode(1)
return head.next
```
#### File: LeetHub/merge-k-sorted-lists/merge-k-sorted-lists.py
```python
class Solution(object):
def mergeKLists(self, lists):
"""
:type lists: List[ListNode]
:rtype: ListNode
"""
arr = []
for link in lists:
while(link):
arr.append(link.val)
link = link.next
arr.sort()
head = ListNode(0)
cur = head
for val in arr:
cur.next = ListNode(val)
cur = cur.next
return head.next
```
|
{
"source": "jeremyvdw/aurora",
"score": 2
}
|
#### File: client/api/health_check.py
```python
from abc import abstractmethod
from twitter.common import log
from twitter.common.lang import Interface
from gen.apache.aurora.api.ttypes import ScheduleStatus
class HealthCheck(Interface):
@abstractmethod
def health(self, task):
"""Checks health of the task and returns a True or False."""
class HealthStatus(object):
@classmethod
def alive(cls):
return cls(True).health()
@classmethod
def dead(cls):
return cls(False).health()
def __init__(self, health):
self._health = health
def health(self):
return self._health
class StatusHealthCheck(HealthCheck):
"""Verifies the health of a task based on the task status. A task is healthy iff,
1. A task is in state RUNNING
2. A task that satisfies (1) and is already known has the same task id.
"""
def __init__(self):
self._task_ids = {}
def health(self, task):
task_id = task.assignedTask.taskId
instance_id = task.assignedTask.instanceId
status = task.status
if status == ScheduleStatus.RUNNING:
if instance_id in self._task_ids:
if task_id == self._task_ids.get(instance_id):
return HealthStatus.alive()
else:
return HealthStatus.dead()
else:
log.info('Detected RUNNING instance %s' % instance_id)
self._task_ids[instance_id] = task_id
return HealthStatus.alive()
else:
return HealthStatus.dead()
```
#### File: client/api/scheduler_client.py
```python
import functools
import threading
import time
import traceback
import requests
from pystachio import Default, Integer, String
from thrift.protocol import TBinaryProtocol
from thrift.transport import TTransport
from twitter.common import log
from twitter.common.concurrent import Timeout, deadline
from twitter.common.quantity import Amount, Time
from twitter.common.zookeeper.kazoo_client import TwitterKazooClient
from twitter.common.zookeeper.serverset import ServerSet
from apache.aurora.common.auth.auth_module_manager import get_auth_handler
from apache.aurora.common.cluster import Cluster
from apache.aurora.common.transport import TRequestsTransport
from gen.apache.aurora.api import AuroraAdmin
from gen.apache.aurora.api.constants import BYPASS_LEADER_REDIRECT_HEADER_NAME
from gen.apache.aurora.api.ttypes import ResponseCode
try:
from urlparse import urljoin
except ImportError:
from urllib.parse import urljoin
class SchedulerClientTrait(Cluster.Trait):
zk = String # noqa
zk_port = Default(Integer, 2181) # noqa
scheduler_zk_path = String # noqa
scheduler_uri = String # noqa
proxy_url = String # noqa
auth_mechanism = Default(String, 'UNAUTHENTICATED') # noqa
def _bypass_leader_redirect_session_factory(should_bypass=False):
session = requests.session()
if should_bypass:
session.headers[BYPASS_LEADER_REDIRECT_HEADER_NAME] = 'true'
return session
class SchedulerClient(object):
THRIFT_RETRIES = 5
RETRY_TIMEOUT = Amount(1, Time.SECONDS)
class Error(Exception): pass
class CouldNotConnect(Error): pass
# TODO(wickman) Refactor per MESOS-3005 into two separate classes with separate traits:
# ZookeeperClientTrait
# DirectClientTrait
@classmethod
def get(cls, cluster, auth_factory=get_auth_handler, **kwargs):
if not isinstance(cluster, Cluster):
raise TypeError('"cluster" must be an instance of Cluster, got %s' % type(cluster))
cluster = cluster.with_trait(SchedulerClientTrait)
auth_handler = auth_factory(cluster.auth_mechanism)
if cluster.zk:
return ZookeeperSchedulerClient(cluster, port=cluster.zk_port, auth=auth_handler, **kwargs)
elif cluster.scheduler_uri:
return DirectSchedulerClient(cluster.scheduler_uri, auth=auth_handler, **kwargs)
else:
raise ValueError('"cluster" does not specify zk or scheduler_uri')
def __init__(self, auth, user_agent, verbose=False, bypass_leader_redirect=False):
self._client = None
self._auth_handler = auth
self._user_agent = user_agent
self._verbose = verbose
self._bypass_leader_redirect = bypass_leader_redirect
def get_thrift_client(self):
if self._client is None:
self._client = self._connect()
return self._client
def get_failed_auth_message(self):
return self._auth_handler.failed_auth_message
# per-class implementation -- mostly meant to set up a valid host/port
# pair and then delegate the opening to SchedulerClient._connect_scheduler
def _connect(self):
return None
def _connect_scheduler(self, uri, clock=time):
transport = TRequestsTransport(
uri,
auth=self._auth_handler.auth(),
user_agent=self._user_agent,
session_factory=functools.partial(
_bypass_leader_redirect_session_factory,
should_bypass=self._bypass_leader_redirect))
protocol = TBinaryProtocol.TBinaryProtocolAccelerated(transport)
schedulerClient = AuroraAdmin.Client(protocol)
for _ in range(self.THRIFT_RETRIES):
try:
transport.open()
return schedulerClient
except TTransport.TTransportException:
clock.sleep(self.RETRY_TIMEOUT.as_(Time.SECONDS))
continue
except Exception as e:
# Monkey-patched proxies, like socks, can generate a proxy error here.
# without adding a dependency, we can't catch those in a more specific way.
raise self.CouldNotConnect('Connection to scheduler failed: %s' % e)
raise self.CouldNotConnect('Could not connect to %s' % uri)
class ZookeeperSchedulerClient(SchedulerClient):
SERVERSET_TIMEOUT = Amount(10, Time.SECONDS)
@classmethod
def get_scheduler_serverset(cls, cluster, port=2181, verbose=False, **kw):
if cluster.zk is None:
raise ValueError('Cluster has no associated zookeeper ensemble!')
if cluster.scheduler_zk_path is None:
raise ValueError('Cluster has no defined scheduler path, must specify scheduler_zk_path '
'in your cluster config!')
hosts = [h + ':{p}' for h in cluster.zk.split(',')]
zk = TwitterKazooClient.make(str(','.join(hosts).format(p=port)), verbose=verbose)
return zk, ServerSet(zk, cluster.scheduler_zk_path, **kw)
def __init__(self, cluster, port=2181, verbose=False, _deadline=deadline, **kwargs):
SchedulerClient.__init__(self, verbose=verbose, **kwargs)
self._cluster = cluster
self._zkport = port
self._endpoint = None
self._uri = None
self._deadline = _deadline
def _resolve(self):
"""Resolve the uri associated with this scheduler from zookeeper."""
joined = threading.Event()
def on_join(elements):
joined.set()
zk, serverset = self.get_scheduler_serverset(self._cluster, verbose=self._verbose,
port=self._zkport, on_join=on_join)
joined.wait(timeout=self.SERVERSET_TIMEOUT.as_(Time.SECONDS))
try:
# Need to perform this operation in a separate thread, because kazoo will wait for the
# result of this serverset evaluation indefinitely, which will prevent people killing
# the client with keyboard interrupts.
serverset_endpoints = self._deadline(lambda: list(serverset),
timeout=self.SERVERSET_TIMEOUT.as_(Time.SECONDS), daemon=True, propagate=True)
except Timeout:
raise self.CouldNotConnect("Failed to connect to Zookeeper within %d seconds." %
self.SERVERSET_TIMEOUT.as_(Time.SECONDS))
if len(serverset_endpoints) == 0:
raise self.CouldNotConnect('No schedulers detected in %s!' % self._cluster.name)
instance = serverset_endpoints[0]
if 'https' in instance.additional_endpoints:
endpoint = instance.additional_endpoints['https']
self._uri = 'https://%s:%s' % (endpoint.host, endpoint.port)
elif 'http' in instance.additional_endpoints:
endpoint = instance.additional_endpoints['http']
self._uri = 'http://%s:%s' % (endpoint.host, endpoint.port)
zk.stop()
def _connect(self):
if self._uri is None:
self._resolve()
if self._uri is not None:
return self._connect_scheduler(urljoin(self._uri, 'api'))
@property
def url(self):
proxy_url = self._cluster.proxy_url
if proxy_url:
return proxy_url
return self.raw_url
@property
def raw_url(self):
if self._uri is None:
self._resolve()
if self._uri:
return self._uri
class DirectSchedulerClient(SchedulerClient):
def __init__(self, uri, verbose=True, **kwargs):
SchedulerClient.__init__(self, verbose=verbose, **kwargs)
self._uri = uri
def _connect(self):
return self._connect_scheduler(urljoin(self._uri, 'api'))
@property
def url(self):
return self._uri
@property
def raw_url(self):
return self._uri
class SchedulerProxy(object):
"""
This class is responsible for creating a reliable thrift client to the
twitter scheduler. Basically all the dirty work needed by the
AuroraClientAPI.
"""
CONNECT_MAXIMUM_WAIT = Amount(1, Time.MINUTES)
RPC_RETRY_INTERVAL = Amount(5, Time.SECONDS)
RPC_MAXIMUM_WAIT = Amount(10, Time.MINUTES)
class Error(Exception): pass
class TimeoutError(Error): pass
class TransientError(Error): pass
class AuthError(Error): pass
class APIVersionError(Error): pass
class ThriftInternalError(Error): pass
class NotRetriableError(Error): pass
def __init__(self, cluster, verbose=False, **kwargs):
self.cluster = cluster
# TODO(Sathya): Make this a part of cluster trait when authentication is pushed to the transport
# layer.
self._client = self._scheduler_client = None
self.verbose = verbose
self._lock = threading.RLock()
self._terminating = threading.Event()
self._kwargs = kwargs
def with_scheduler(method):
"""Decorator magic to make sure a connection is made to the scheduler"""
def _wrapper(self, *args, **kwargs):
if not self._client:
self._construct_scheduler()
return method(self, *args, **kwargs)
return _wrapper
def invalidate(self):
self._client = self._scheduler_client = None
def terminate(self):
"""Requests immediate termination of any retry attempts and invalidates client."""
self._terminating.set()
self.invalidate()
@with_scheduler
def client(self):
return self._client
@with_scheduler
def scheduler_client(self):
return self._scheduler_client
def _construct_scheduler(self):
"""
Populates:
self._scheduler_client
self._client
"""
self._scheduler_client = SchedulerClient.get(self.cluster, verbose=self.verbose, **self._kwargs)
assert self._scheduler_client, "Could not find scheduler (cluster = %s)" % self.cluster.name
start = time.time()
while (time.time() - start) < self.CONNECT_MAXIMUM_WAIT.as_(Time.SECONDS):
try:
# this can wind up generating any kind of error, because it turns into
# a call to a dynamically set authentication module.
self._client = self._scheduler_client.get_thrift_client()
break
except SchedulerClient.CouldNotConnect as e:
log.warning('Could not connect to scheduler: %s' % e)
except Exception as e:
# turn any auth module exception into an auth error.
log.debug('Warning: got an unknown exception during authentication:')
log.debug(traceback.format_exc())
raise self.AuthError('Error connecting to scheduler: %s' % e)
if not self._client:
raise self.TimeoutError('Timed out trying to connect to scheduler at %s' % self.cluster.name)
def __getattr__(self, method_name):
# If the method does not exist, getattr will return AttributeError for us.
method = getattr(AuroraAdmin.Client, method_name)
if not callable(method):
return method
@functools.wraps(method)
def method_wrapper(*args, **kwargs):
retry = kwargs.get('retry', False)
with self._lock:
start = time.time()
while not self._terminating.is_set() and (
time.time() - start) < self.RPC_MAXIMUM_WAIT.as_(Time.SECONDS):
try:
method = getattr(self.client(), method_name)
if not callable(method):
return method
resp = method(*args)
if resp is not None and resp.responseCode == ResponseCode.ERROR_TRANSIENT:
raise self.TransientError(", ".join(
[m.message for m in resp.details] if resp.details else []))
return resp
except TRequestsTransport.AuthError as e:
log.error(self.scheduler_client().get_failed_auth_message())
raise self.AuthError(e)
except TTransport.TTransportException as e:
# Client does not know if the request has been received and processed by
# the scheduler, therefore the call is retried if it is idempotent.
if not self._terminating.is_set():
if retry:
log.warning('Transport error communicating with scheduler: %s, retrying...' % e)
self.invalidate()
self._terminating.wait(self.RPC_RETRY_INTERVAL.as_(Time.SECONDS))
else:
raise self.NotRetriableError('Transport error communicating with scheduler during '
'non-idempotent operation: %s, not retrying' % e)
except (self.TimeoutError, self.TransientError) as e:
# If it is TimeoutError then the connection with scheduler could not
# be established, therefore the call did not go through.
# If it is TransientError then the scheduler could not process the call
# because its storage is not in READY state.
# In both cases, the call can be safely retried.
if not self._terminating.is_set():
log.warning('Connection error with scheduler: %s, reconnecting...' % e)
self.invalidate()
self._terminating.wait(self.RPC_RETRY_INTERVAL.as_(Time.SECONDS))
except Exception as e:
# Take any error that occurs during the RPC call, and transform it
# into something clients can handle.
if not self._terminating.is_set():
raise self.ThriftInternalError("Error during thrift call %s to %s: %s" %
(method_name, self.cluster.name, e))
if not self._terminating.is_set():
raise self.TimeoutError('Timed out attempting to issue %s to %s' % (
method_name, self.cluster.name))
return method_wrapper
```
#### File: client/binding_helpers/docker_helper.py
```python
from pystachio import Ref
from pystachio.matcher import Any, Matcher
from apache.aurora.client.binding_helper import BindingHelper
from apache.aurora.client.docker.docker_client import DockerRegistryClient
from apache.aurora.common.clusters import CLUSTERS
class DockerBindingHelper(BindingHelper):
@property
def name(self):
return 'docker'
@property
def matcher(self):
return Matcher('docker').image[Any][Any]
def bind(self, config, match, env, binding_dict):
cluster = CLUSTERS[config.cluster()]
image = match
ref_str = 'docker.image[%s][%s]' % image[2:4]
ref = Ref.from_address(ref_str)
if ref_str in binding_dict:
(image_data, image_struct) = binding_dict[ref_str]
else:
image_data = '%s:%s' % (image[2], image[3])
image_struct = DockerRegistryClient.resolve(cluster, image[2], image[3])
binding_dict[ref_str] = (image_data, image_struct)
config.bind({ref: image_struct})
```
#### File: aurora/common/transport.py
```python
import logging
from io import BytesIO
import requests
from requests import exceptions as request_exceptions
from thrift.transport.TTransport import TTransportBase, TTransportException
from twitter.common import log
try:
from urlparse import urlparse
except ImportError:
from urllib.parse import urlparse
DEFAULT_USER_AGENT = 'Python TRequestsTransport v1.0'
def default_requests_session_factory():
session = requests.session()
return session
class TRequestsTransport(TTransportBase):
"""A Thrift HTTP client based upon the requests module."""
class AuthError(Exception):
"""Indicates that a request failed due to an authentication or authorization problem. """
pass
def __init__(
self,
uri,
auth=None,
session_factory=default_requests_session_factory,
user_agent=DEFAULT_USER_AGENT):
"""Construct a TRequestsTransport.
Construct a Thrift transport based upon the requests module. URI is the
HTTP endpoint that the server should be listening on. The 'auth'
keyword is passed directly to the requests client and can be used to
provide different authentication contexts such as Kerberos
authentication via the requests-kerberos module.
:param uri: The endpoint uri
:type uri: str
:keyword auth: The requests authentication context
:type auth: requests.auth.AuthBase
:keyword session_factory: A callable that returns a requests session
:type session_factory: () -> requests.Session
:keyword user_agent: The value to use for the User-Agent header
:type user_agent: str
"""
self._session = None
self.__session_factory = session_factory
if not callable(session_factory):
raise TypeError('session_factory should be a callable that produces a requests.Session!')
self.__user_agent = user_agent
self.__wbuf = BytesIO()
self.__rbuf = BytesIO()
self.__uri = uri
try:
self.__urlparse = urlparse(uri)
except ValueError:
raise TTransportException('Failed to parse uri %r' % (uri,))
self.__timeout = None
if auth is not None and not isinstance(auth, requests.auth.AuthBase):
raise TypeError('Invalid auth type. Expected: %s but got %s'
% (requests.auth.AuthBase.__name__, auth.__class__.__name__))
self.__auth = auth
# Silence requests logs so we don't get messages for every HTTP connection.
logging.getLogger('requests').setLevel(logging.WARNING)
def isOpen(self):
return self._session is not None
def open(self):
session = self.__session_factory()
requests_default_agent = requests.utils.default_user_agent()
if session.headers.get('User-Agent', requests_default_agent) == requests_default_agent:
session.headers['User-Agent'] = self.__user_agent
self._session = session
def close(self):
session, self._session = self._session, None
session.close()
def setTimeout(self, ms):
self.__timeout = ms / 1000.0
def read(self, size):
return self.__rbuf.read(size)
def write(self, buf):
self.__wbuf.write(buf)
def flush(self):
if not self.isOpen():
self.open()
data = self.__wbuf.getvalue()
self.__wbuf = BytesIO()
self._session.headers['Accept'] = 'application/vnd.apache.thrift.binary'
self._session.headers['Content-Type'] = 'application/vnd.apache.thrift.binary'
self._session.headers['Content-Length'] = str(len(data))
self._session.headers['Host'] = self.__urlparse.hostname
try:
response = self._session.post(
self.__uri,
data=data,
timeout=self.__timeout,
auth=self.__auth)
response.raise_for_status()
self.__rbuf = BytesIO(response.content)
except request_exceptions.Timeout:
raise TTransportException(
type=TTransportException.TIMED_OUT,
message='Timed out talking to %s' % self.__uri)
except request_exceptions.RequestException as e:
if e.response is not None:
log.debug('Request failed, response headers:')
for field_name, field_value in e.response.headers.items():
log.debug(' %s: %s' % (field_name, field_value))
if e.response.status_code in (401, 403):
raise self.AuthError(e)
raise TTransportException(
type=TTransportException.UNKNOWN,
message='Unknown error talking to %s: %s' % (self.__uri, e))
```
#### File: executor/common/announcer.py
```python
import posixpath
import threading
import time
from abc import abstractmethod
from kazoo.client import KazooClient
from kazoo.retry import KazooRetry
from kazoo.security import make_acl, make_digest_acl_credential
from mesos.interface import mesos_pb2
from twitter.common import app, log
from twitter.common.concurrent.deferred import defer
from twitter.common.exceptions import ExceptionalThread
from twitter.common.metrics import LambdaGauge, Observable
from twitter.common.quantity import Amount, Time
from twitter.common.zookeeper.serverset import Endpoint, ServerSet
from apache.aurora.executor.common.announcer_zkauth_schema import ZkAuth
from apache.aurora.executor.common.status_checker import (
StatusChecker,
StatusCheckerProvider,
StatusResult
)
from apache.aurora.executor.common.task_info import (
mesos_task_instance_from_assigned_task,
resolve_ports
)
def make_endpoints(hostname, portmap, primary_port):
"""
Generate primary, additional endpoints from a portmap and primary_port.
primary_port must be a name in the portmap dictionary.
"""
# Do int check as stop-gap measure against incompatible downstream clients.
additional_endpoints = dict(
(name, Endpoint(hostname, port)) for (name, port) in portmap.items()
if isinstance(port, int))
# It's possible for the primary port to not have been allocated if this task
# is using autoregistration, so register with a port of 0.
return Endpoint(hostname, portmap.get(primary_port, 0)), additional_endpoints
def make_zk_auth(zk_auth_config):
if zk_auth_config is None:
return None
try:
with open(zk_auth_config) as fp:
try:
zk_auth = ZkAuth.json_load(fp, strict=True)
if not zk_auth.check().ok():
app.error('ZK authentication config is invalid %s' % zk_auth.check().message())
return zk_auth
except (TypeError, ValueError, AttributeError) as ex:
app.error('Problem parsing ZK authentication config %s' % ex)
except IOError as ie:
app.error('Failed to open config file %s' % ie)
def to_acl(access):
cred = access.credential().get()
if access.scheme().get() == 'digest':
cred_parts = access.credential().get().split(':')
if len(cred_parts) != 2:
app.error('Digest credential should be of the form <user>:<password>')
cred = make_digest_acl_credential(cred_parts[0], cred_parts[1])
return make_acl(access.scheme().get(),
cred,
read=access.permissions().read().get(),
write=access.permissions().write().get(),
create=access.permissions().create().get(),
delete=access.permissions().delete().get(),
admin=access.permissions().admin().get())
class AnnouncerCheckerProvider(StatusCheckerProvider):
def __init__(self, allow_custom_serverset_path=False, hostname=None, name=None):
self.name = name
self._allow_custom_serverset_path = allow_custom_serverset_path
self._override_hostname = hostname
super(AnnouncerCheckerProvider, self).__init__()
@abstractmethod
def make_zk_client(self):
"""Create a ZooKeeper client which can be asyncronously started"""
@abstractmethod
def make_zk_path(self, assigned_task):
"""Given an assigned task return the path into where we should announce the task."""
def from_assigned_task(self, assigned_task, _):
mesos_task = mesos_task_instance_from_assigned_task(assigned_task)
if not mesos_task.has_announce():
return None
portmap = resolve_ports(mesos_task, assigned_task.assignedPorts)
# Overriding hostname can be done either by explicitly specifying a value or
# by changing the value of assigned_task.slaveHost.
# assigned_task.slaveHost is the --hostname argument passed into the mesos slave.
# If no argument was passed to the mesos-slave, the slave falls back to gethostname()
if self._override_hostname:
hostname = self._override_hostname
else:
hostname = assigned_task.slaveHost
endpoint, additional = make_endpoints(
hostname,
portmap,
mesos_task.announce().primary_port().get())
client = self.make_zk_client()
if mesos_task.announce().has_zk_path():
if self._allow_custom_serverset_path:
path = mesos_task.announce().zk_path().get()
else:
app.error('Executor must be started with --announcer-allow-custom-serverset-path in order '
'to use zk_path in the Announcer config')
else:
path = self.make_zk_path(assigned_task)
initial_interval = mesos_task.health_check_config().initial_interval_secs().get()
interval = mesos_task.health_check_config().interval_secs().get()
consecutive_failures = mesos_task.health_check_config().max_consecutive_failures().get()
timeout_secs = initial_interval + (consecutive_failures * interval)
return AnnouncerChecker(
client, path, timeout_secs, endpoint, additional=additional, shard=assigned_task.instanceId,
name=self.name)
class DefaultAnnouncerCheckerProvider(AnnouncerCheckerProvider):
DEFAULT_RETRY_MAX_DELAY = Amount(5, Time.MINUTES)
DEFAULT_RETRY_POLICY = KazooRetry(
max_tries=None,
ignore_expire=True,
max_delay=DEFAULT_RETRY_MAX_DELAY.as_(Time.SECONDS),
)
def __init__(self, ensemble, root='/aurora', allow_custom_serverset_path=False,
hostname=None, zk_auth=None):
self._ensemble = ensemble
self._root = root
self._zk_auth = zk_auth
super(DefaultAnnouncerCheckerProvider, self).__init__(allow_custom_serverset_path, hostname)
def make_zk_client(self):
if self._zk_auth is None:
auth_data = None
default_acl = None
else:
auth_data = [(a.scheme().get(), a.credential().get()) for a in self._zk_auth.auth()]
default_acl = [to_acl(a) for a in self._zk_auth.acl()]
return KazooClient(self._ensemble,
connection_retry=self.DEFAULT_RETRY_POLICY,
default_acl=default_acl or None,
auth_data=auth_data or None)
def make_zk_path(self, assigned_task):
config = assigned_task.task
role, environment, name = (config.job.role, config.job.environment, config.job.name)
return posixpath.join(self._root, role, environment, name)
class ServerSetJoinThread(ExceptionalThread):
"""Background thread to reconnect to Serverset on session expiration."""
LOOP_WAIT = Amount(1, Time.SECONDS)
def __init__(self, event, joiner, loop_wait=LOOP_WAIT):
self._event = event
self._joiner = joiner
self._stopped = threading.Event()
self._loop_wait = loop_wait
super(ServerSetJoinThread, self).__init__()
self.daemon = True
def run(self):
while True:
if self._stopped.is_set():
break
self._event.wait(timeout=self._loop_wait.as_(Time.SECONDS))
if not self._event.is_set():
continue
log.debug('Join event triggered, joining serverset.')
self._event.clear()
self._joiner()
def stop(self):
self._stopped.set()
class Announcer(Observable):
class Error(Exception): pass
EXCEPTION_WAIT = Amount(15, Time.SECONDS)
def __init__(self,
serverset,
endpoint,
additional=None,
shard=None,
clock=time,
exception_wait=None):
self._membership = None
self._membership_termination = clock.time()
self._endpoint = endpoint
self._additional = additional or {}
self._shard = shard
self._serverset = serverset
self._rejoin_event = threading.Event()
self._clock = clock
self._thread = None
self._exception_wait = exception_wait or self.EXCEPTION_WAIT
def disconnected_time(self):
# Lockless membership length check
membership_termination = self._membership_termination
if membership_termination is None:
return 0
return self._clock.time() - membership_termination
def _join_inner(self):
return self._serverset.join(
endpoint=self._endpoint,
additional=self._additional,
shard=self._shard,
expire_callback=self.on_expiration)
def _join(self):
if self._membership is not None:
raise self.Error("join called, but already have membership!")
while True:
try:
self._membership = self._join_inner()
self._membership_termination = None
except Exception as e:
log.error('Failed to join ServerSet: %s' % e)
self._clock.sleep(self._exception_wait.as_(Time.SECONDS))
else:
break
def start(self):
self._thread = ServerSetJoinThread(self._rejoin_event, self._join)
self._thread.start()
self.rejoin()
def rejoin(self):
self._rejoin_event.set()
def stop(self):
thread, self._thread = self._thread, None
thread.stop()
if self._membership:
self._serverset.cancel(self._membership)
def on_expiration(self):
self._membership = None
if not self._thread:
return
self._membership_termination = self._clock.time()
log.info('Zookeeper session expired.')
self.rejoin()
class AnnouncerChecker(StatusChecker):
DEFAULT_NAME = 'announcer'
def __init__(self, client, path, timeout_secs, endpoint, additional=None, shard=None, name=None):
self._client = client
self._connect_event = client.start_async()
self._timeout_secs = timeout_secs
self._announcer = Announcer(ServerSet(client, path), endpoint, additional=additional,
shard=shard)
self._name = name or self.DEFAULT_NAME
self._status = None
self.start_event = threading.Event()
self.metrics.register(LambdaGauge('disconnected_time', self._announcer.disconnected_time))
@property
def status(self):
return self._status
def name(self):
return self._name
def _start(self):
self._connect_event.wait(timeout=self._timeout_secs)
if not self._connect_event.is_set():
self._status = StatusResult("Creating Announcer Serverset timed out.", mesos_pb2.TASK_FAILED)
else:
self._announcer.start()
self.start_event.set()
def start(self):
defer(self._start)
def stop(self):
defer(self._announcer.stop)
```
#### File: executor/common/resource_manager.py
```python
import threading
from mesos.interface import mesos_pb2
from twitter.common.metrics import LambdaGauge
from apache.aurora.executor.common.status_checker import (
StatusChecker,
StatusCheckerProvider,
StatusResult
)
from apache.aurora.executor.common.task_info import mesos_task_instance_from_assigned_task
from apache.thermos.monitoring.monitor import TaskMonitor
from apache.thermos.monitoring.resource import TaskResourceMonitor
class ResourceManager(StatusChecker):
""" Manage resources consumed by a Task """
def __init__(self, resources, resource_monitor):
"""
resources: Resources object specifying cpu, ram, disk limits for the task
resource_monitor: The ResourceMonitor to monitor resources
"""
self._resource_monitor = resource_monitor
# TODO(wickman) Remove cpu/ram reporting if MESOS-1458 is resolved.
self._max_cpu = resources.cpu().get()
self._max_ram = resources.ram().get()
self._max_disk = resources.disk().get()
self._kill_reason = None
self._kill_event = threading.Event()
@property
def _num_procs(self):
""" Total number of processes the task consists of (including child processes) """
return self._resource_monitor.sample()[1].num_procs
@property
def _ps_sample(self):
""" ProcessSample representing the aggregate resource consumption of the Task's processes """
return self._resource_monitor.sample()[1].process_sample
@property
def _disk_sample(self):
""" Integer in bytes representing the disk consumption in the Task's sandbox """
return self._resource_monitor.sample()[1].disk_usage
@property
def status(self):
sample = self._disk_sample
if sample > self._max_disk:
self._kill_event.set()
return StatusResult('Disk limit exceeded. Reserved %s bytes vs used %s bytes.' % (
self._max_disk, sample), mesos_pb2.TASK_FAILED)
def name(self):
return 'resource_manager'
def register_metrics(self):
self.metrics.register(LambdaGauge('disk_used', lambda: self._disk_sample))
self.metrics.register(LambdaGauge('disk_reserved', lambda: self._max_disk))
self.metrics.register(LambdaGauge('disk_percent',
lambda: 1.0 * self._disk_sample / self._max_disk))
self.metrics.register(LambdaGauge('cpu_used', lambda: self._ps_sample.rate))
self.metrics.register(LambdaGauge('cpu_reserved', lambda: self._max_cpu))
self.metrics.register(LambdaGauge('cpu_percent',
lambda: 1.0 * self._ps_sample.rate / self._max_cpu))
self.metrics.register(LambdaGauge('ram_used', lambda: self._ps_sample.rss))
self.metrics.register(LambdaGauge('ram_reserved', lambda: self._max_ram))
self.metrics.register(LambdaGauge('ram_percent',
lambda: 1.0 * self._ps_sample.rss / self._max_ram))
def start(self):
super(ResourceManager, self).start()
self.register_metrics()
self._resource_monitor.start()
class ResourceManagerProvider(StatusCheckerProvider):
def __init__(self, checkpoint_root, **resource_monitor_options):
self._checkpoint_root = checkpoint_root
self._resource_monitor_options = resource_monitor_options
def from_assigned_task(self, assigned_task, sandbox):
task_id = assigned_task.taskId
resources = mesos_task_instance_from_assigned_task(assigned_task).task().resources()
task_monitor = TaskMonitor(self._checkpoint_root, task_id)
resource_monitor = TaskResourceMonitor(
task_id,
task_monitor,
**self._resource_monitor_options)
return ResourceManager(resources, resource_monitor)
```
#### File: aurora/executor/http_lifecycle.py
```python
import time
from twitter.common import log
from twitter.common.quantity import Amount, Time
from apache.aurora.common.health_check.http_signaler import HttpSignaler
from .common.task_runner import TaskError, TaskRunner
class HttpLifecycleManager(TaskRunner):
"""A wrapper around a TaskRunner that performs HTTP lifecycle management."""
DEFAULT_ESCALATION_WAIT = Amount(5, Time.SECONDS)
WAIT_POLL_INTERVAL = Amount(1, Time.SECONDS)
@classmethod
def wrap(cls, runner, task_instance, portmap):
"""Return a task runner that manages the http lifecycle if lifecycle is present."""
if not task_instance.has_lifecycle() or not task_instance.lifecycle().has_http():
return runner
http_lifecycle = task_instance.lifecycle().http()
http_lifecycle_port = http_lifecycle.port().get()
graceful_shutdown_wait_secs = (
Amount(http_lifecycle.graceful_shutdown_wait_secs().get(), Time.SECONDS)
if http_lifecycle.has_graceful_shutdown_wait_secs()
else cls.DEFAULT_ESCALATION_WAIT)
shutdown_wait_secs = (
Amount(http_lifecycle.shutdown_wait_secs().get(), Time.SECONDS)
if http_lifecycle.has_shutdown_wait_secs()
else cls.DEFAULT_ESCALATION_WAIT)
if not portmap or http_lifecycle_port not in portmap:
# If DefaultLifecycle is ever to disable task lifecycle by default, we should
# raise a TaskError here, since the user has requested http lifecycle without
# binding a port to send lifecycle commands.
return runner
escalation_endpoints = [
(http_lifecycle.graceful_shutdown_endpoint().get(), graceful_shutdown_wait_secs),
(http_lifecycle.shutdown_endpoint().get(), shutdown_wait_secs)
]
return cls(runner, portmap[http_lifecycle_port], escalation_endpoints)
def __init__(self,
runner,
lifecycle_port,
escalation_endpoints,
clock=time):
self._runner = runner
self._lifecycle_port = lifecycle_port
self._escalation_endpoints = escalation_endpoints
self._clock = clock
self.__started = False
def _terminate_http(self):
http_signaler = HttpSignaler(self._lifecycle_port)
for endpoint, wait_time in self._escalation_endpoints:
handled, _ = http_signaler(endpoint, use_post_method=True)
log.info('Killing task, calling %s and waiting %s, handled is %s' % (
endpoint, str(wait_time), str(handled)))
waited = Amount(0, Time.SECONDS)
while handled:
if self._runner.status is not None:
return True
if waited >= wait_time:
break
self._clock.sleep(self.WAIT_POLL_INTERVAL.as_(Time.SECONDS))
waited += self.WAIT_POLL_INTERVAL
# --- public interface
def start(self, timeout=None):
self.__started = True
return self._runner.start(timeout=timeout if timeout is not None else self._runner.MAX_WAIT)
def stop(self, timeout=None):
"""Stop the runner. If it's already completed, no-op. If it's still running, issue a kill."""
if not self.__started:
raise TaskError('Failed to call TaskRunner.start.')
log.info('Invoking runner HTTP teardown.')
self._terminate_http()
return self._runner.stop(timeout=timeout if timeout is not None else self._runner.MAX_WAIT)
@property
def status(self):
"""Return the StatusResult of this task runner. This returns None as
long as no terminal state is reached."""
return self._runner.status
```
#### File: cli/commands/inspect.py
```python
import pprint
from pystachio.naming import frozendict
from twitter.common import app
from apache.thermos.cli.common import get_task_from_options
from apache.thermos.common.options import add_binding_to, add_port_to
def inspect_unwrap(obj):
if isinstance(obj, frozendict):
return dict((key, inspect_unwrap(val)) for (key, val) in obj.items())
if isinstance(obj, (list, tuple, set)):
return tuple(inspect_unwrap(val) for val in obj)
return obj
@app.command
@app.command_option("--task", metavar="TASKNAME", default=None, dest='task',
help="The thermos task within the config that should be inspected. Only "
"required if there are multiple tasks exported from the thermos "
"configuration.")
@app.command_option("--json", default=False, action='store_true', dest='json',
help="Read the source file in json format instead of pystachio.")
@app.command_option("-P", "--port", type="string", nargs=1, action="callback",
callback=add_port_to('prebound_ports'), dest="prebound_ports", default=[],
metavar="NAME:PORT", help="bind named PORT to NAME.")
@app.command_option("-E", "--environment", type="string", nargs=1, action="callback",
callback=add_binding_to('bindings'), default=[], dest="bindings",
metavar="NAME=VALUE",
help="bind the configuration environment variable NAME to VALUE.")
def inspect(args, options):
"""Inspect a thermos config and display the evaluated task
Usage: thermos inspect [options] config
"""
thermos_task = get_task_from_options(args, options)
ti, _ = thermos_task.task().interpolate()
pprint.pprint(inspect_unwrap(ti.get()), indent=4)
```
#### File: cli/commands/run.py
```python
from __future__ import print_function
import getpass
from twitter.common import app
from apache.thermos.cli.common import get_task_from_options, really_run
from apache.thermos.common.options import add_binding_to, add_port_to
@app.command
@app.command_option("--user", metavar="USER", default=getpass.getuser(), dest='user',
help="run as this user. if not $USER, must have setuid privilege.")
@app.command_option("--enable_chroot", dest="chroot", default=False, action='store_true',
help="chroot tasks to the sandbox before executing them, requires "
"root privileges.")
@app.command_option("--task", metavar="TASKNAME", default=None, dest='task',
help="The thermos task within the config that should be run. Only required if "
"there are multiple tasks exported from the thermos configuration.")
@app.command_option("--task_id", metavar="STRING", default=None, dest='task_id',
help="The id to which this task should be bound, synthesized from the task "
"name if none provided.")
@app.command_option("--json", default=False, action='store_true', dest='json',
help="Read the source file in json format.")
@app.command_option("--sandbox", metavar="PATH", default="/var/lib/thermos/sandbox", dest='sandbox',
help="The sandbox in which to run the task.")
@app.command_option("-P", "--port", type="string", nargs=1, action="callback",
callback=add_port_to('prebound_ports'), dest="prebound_ports", default=[],
metavar="NAME:PORT", help="bind named PORT to NAME.")
@app.command_option("-E", "--environment", type="string", nargs=1, action="callback",
callback=add_binding_to('bindings'), default=[], dest="bindings",
metavar="NAME=VALUE",
help="bind the configuration environment variable NAME to VALUE.")
@app.command_option("--daemon", default=False, action='store_true', dest='daemon',
help="fork and daemonize the thermos runner.")
def run(args, options):
"""Run a thermos task.
Usage: thermos run [options] config
"""
thermos_task = get_task_from_options(args, options)
really_run(thermos_task,
options.root,
options.sandbox,
task_id=options.task_id,
user=options.user,
prebound_ports=options.prebound_ports,
chroot=options.chroot,
daemon=options.daemon)
```
#### File: cli/commands/simplerun.py
```python
from __future__ import print_function
import getpass
import tempfile
from twitter.common import app
from apache.thermos.cli.common import really_run
from apache.thermos.common.options import add_binding_to, add_port_to
from apache.thermos.config.loader import ThermosTaskWrapper
from apache.thermos.config.schema import Process, Resources, Task
@app.command
@app.command_option("--user", metavar="USER", default=getpass.getuser(), dest='user',
help="run as this user. if not $USER, must have setuid privilege.")
@app.command_option("--name", metavar="STRING", default='simple', dest='name',
help="The name to give this task.")
@app.command_option("--task_id", metavar="STRING", default=None, dest='task_id',
help="The id to which this task should be bound, synthesized from the task "
"name if none provided.")
@app.command_option("-P", "--port", type="string", nargs=1, action="callback",
callback=add_port_to('prebound_ports'), dest="prebound_ports", default=[],
metavar="NAME:PORT", help="bind named PORT to NAME.")
@app.command_option("-E", "--environment", type="string", nargs=1, action="callback",
callback=add_binding_to('bindings'), default=[], dest="bindings",
metavar="NAME=VALUE",
help="bind the configuration environment variable NAME to VALUE.")
@app.command_option("--daemon", default=False, action='store_true', dest='daemon',
help="fork and daemonize the thermos runner.")
def simplerun(args, options):
"""Run a simple command line as a thermos task.
Usage: thermos simplerun [options] [--] commandline
"""
try:
cutoff = args.index('--')
cmdline = ' '.join(args[cutoff + 1:])
except ValueError:
cmdline = ' '.join(args)
print("Running command: '%s'" % cmdline)
thermos_task = ThermosTaskWrapper(Task(
name=options.name,
resources=Resources(cpu=1.0, ram=256 * 1024 * 1024, disk=0),
processes=[Process(name=options.name, cmdline=cmdline)]))
really_run(thermos_task,
options.root,
tempfile.mkdtemp(),
task_id=options.task_id,
user=options.user,
prebound_ports=options.prebound_ports,
chroot=False,
daemon=options.daemon)
```
#### File: cli/commands/status.py
```python
from __future__ import print_function
import os
import pwd
import re
import sys
import time
from twitter.common import app
from apache.thermos.cli.common import get_path_detector
from apache.thermos.common.ckpt import CheckpointDispatcher
from apache.thermos.monitoring.detector import TaskDetector
from gen.apache.thermos.ttypes import ProcessState, TaskState
@app.command
@app.command_option("--verbosity", default=0, dest='verbose', type='int',
help="Display more verbosity")
@app.command_option("--only", default=None, dest='only', type='choice',
choices=('active', 'finished'), help="Display only tasks of this type.")
def status(args, options):
"""Get the status of task(s).
Usage: thermos status [options] [task_name(s) or task_regexp(s)]
"""
path_detector = get_path_detector()
def format_task(detector, task_id):
checkpoint_filename = detector.get_checkpoint(task_id)
checkpoint_stat = os.stat(checkpoint_filename)
try:
checkpoint_owner = pwd.getpwuid(checkpoint_stat.st_uid).pw_name
except KeyError:
checkpoint_owner = 'uid:%s' % checkpoint_stat.st_uid
print(' %-20s [owner: %8s]' % (task_id, checkpoint_owner), end='')
if options.verbose == 0:
print()
if options.verbose > 0:
state = CheckpointDispatcher.from_file(checkpoint_filename)
if state is None or state.header is None:
print(' - checkpoint stream CORRUPT or outdated format')
return
print(' state: %8s' % TaskState._VALUES_TO_NAMES.get(state.statuses[-1].state, 'Unknown'),
end='')
print(' start: %25s' % time.asctime(time.localtime(state.header.launch_time_ms / 1000.0)))
if options.verbose > 1:
print(' user: %s' % state.header.user, end='')
if state.header.ports:
print(' ports: %s' % ' '.join('%s -> %s' % (key, val)
for key, val in state.header.ports.items()))
else:
print(' ports: None')
print(' sandbox: %s' % state.header.sandbox)
if options.verbose > 2:
print(' process table:')
for process, process_history in state.processes.items():
print(' - %s runs: %s' % (process, len(process_history)), end='')
last_run = process_history[-1]
print(' last: pid=%s, rc=%s, finish:%s, state:%s' % (
last_run.pid or 'None',
last_run.return_code if last_run.return_code is not None else '',
time.asctime(time.localtime(last_run.stop_time)) if last_run.stop_time else 'None',
ProcessState._VALUES_TO_NAMES.get(last_run.state, 'Unknown')))
print()
matchers = map(re.compile, args or ['.*'])
active = []
finished = []
for root in path_detector.get_paths():
detector = TaskDetector(root)
active.extend((detector, t_id) for _, t_id in detector.get_task_ids(state='active')
if any(pattern.match(t_id) for pattern in matchers))
finished.extend((detector, t_id)for _, t_id in detector.get_task_ids(state='finished')
if any(pattern.match(t_id) for pattern in matchers))
found = False
if options.only is None or options.only == 'active':
if active:
print('Active tasks:')
found = True
for detector, task_id in active:
format_task(detector, task_id)
print()
if options.only is None or options.only == 'finished':
if finished:
print('Finished tasks:')
found = True
for detector, task_id in finished:
format_task(detector, task_id)
print()
if not found:
print('No tasks found.')
sys.exit(1)
```
#### File: client/cli/test_api_from_cli.py
```python
from mock import call, create_autospec, patch
from apache.aurora.client.api.scheduler_client import SchedulerClient
from apache.aurora.client.cli import EXIT_UNKNOWN_ERROR
from apache.aurora.client.cli.client import AuroraCommandLine
from .util import AuroraClientCommandTest
from gen.apache.aurora.api import AuroraAdmin
from gen.apache.aurora.api.ttypes import (
JobKey,
ResponseCode,
Result,
ScheduleStatusResult,
TaskQuery
)
class TestApiFromCLI(AuroraClientCommandTest):
"""A container for tests that are probing at API functionality,
to see if the CLI can handle API-level errors.
"""
@classmethod
def create_mock_scheduled_task_no_metadata(cls):
result = cls.create_mock_scheduled_tasks()
for task in result:
task.assignedTask.task.metadata = None
return result
@classmethod
def create_status_response(cls):
resp = cls.create_simple_success_response()
resp.result = Result(
scheduleStatusResult=ScheduleStatusResult(tasks=set(cls.create_scheduled_tasks())))
return resp
@classmethod
def create_status_response_null_metadata(cls):
resp = cls.create_simple_success_response()
resp.result = Result(
scheduleStatusResult=ScheduleStatusResult(
tasks=set(cls.create_mock_scheduled_task_no_metadata())))
return resp
@classmethod
def create_failed_status_response(cls):
return cls.create_blank_response(ResponseCode.INVALID_REQUEST, 'No tasks found for query')
def test_successful_status_deep(self):
"""Test the status command more deeply: in a request with a fully specified
job, it should end up doing a query using getTasksWithoutConfigs."""
mock_scheduler_client = create_autospec(spec=SchedulerClient, instance=True)
mock_thrift_client = create_autospec(spec=AuroraAdmin.Client, instance=True)
mock_scheduler_client.get_thrift_client.return_value = mock_thrift_client
mock_thrift_client.getTasksWithoutConfigs.return_value = self.create_status_response()
with patch('apache.aurora.client.api.scheduler_client.SchedulerClient.get',
return_value=mock_scheduler_client):
cmd = AuroraCommandLine()
cmd.execute(['job', 'status', 'west/bozo/test/hello'])
assert mock_thrift_client.getTasksWithoutConfigs.mock_calls == [
call(TaskQuery(jobKeys=[JobKey(role='bozo', environment='test', name='hello')]))]
def test_status_api_failure(self):
mock_scheduler_client = create_autospec(spec=SchedulerClient, instance=True)
mock_thrift_client = create_autospec(spec=AuroraAdmin.Client, instance=True)
mock_scheduler_client.get_thrift_client.return_value = mock_thrift_client
mock_thrift_client.getTasksWithoutConfigs.side_effect = IOError("Uh-Oh")
with patch('apache.aurora.client.api.scheduler_client.SchedulerClient.get',
return_value=mock_scheduler_client):
cmd = AuroraCommandLine()
# This should create a scheduler client, set everything up, and then issue a
# getTasksWithoutConfigs call against the mock_scheduler_client. That should raise an
# exception, which results in the command failing with an error code.
result = cmd.execute(['job', 'status', 'west/bozo/test/hello'])
assert result == EXIT_UNKNOWN_ERROR
assert mock_thrift_client.getTasksWithoutConfigs.mock_calls == [
call(TaskQuery(jobKeys=[JobKey(role='bozo', environment='test', name='hello')]))]
```
#### File: aurora/common/test_cluster.py
```python
import pytest
from pystachio import Default, Integer, Required, String
from apache.aurora.common.cluster import Cluster
def test_simple():
class AudubonTrait(Cluster.Trait):
master_role = String # noqa
slave_role = Default(String, 'slave') # noqa
version = Required(Integer) # noqa
west = Cluster(name='west',
master_role='west.master',
slave_role='west.slave',
version=10)
east = Cluster(name='east', version=11)
assert east.name == 'east'
with pytest.raises(AttributeError):
east.slave_role
assert east.with_traits(AudubonTrait).slave_role == 'slave'
assert west.with_traits(AudubonTrait).slave_role == 'west.slave'
assert east.with_traits(AudubonTrait).master_role is None
with pytest.raises(TypeError):
# requires version at least
Cluster(name='east').with_traits(AudubonTrait)
```
#### File: aurora/executor/test_executor_base.py
```python
from unittest import TestCase
import mock
from mesos.interface import ExecutorDriver, mesos_pb2
from twitter.common import log
from apache.aurora.executor.executor_base import ExecutorBase
class TestExecutorBase(TestCase):
def setUp(self):
self.executor_base = ExecutorBase()
def test_status_is_terminal(self):
for terminal_status in ExecutorBase.TERMINAL_STATES:
assert ExecutorBase.status_is_terminal(terminal_status)
assert not ExecutorBase.status_is_terminal('RUNNING')
assert not ExecutorBase.status_is_terminal('BASSCANNON')
@mock.patch('twitter.common.log.info', spec=log.info)
def test_log(self, mock_info):
test_message = 'testing'
self.executor_base.log(test_message)
mock_info.assert_called_once_with('Executor [None]: %s' % test_message)
def test_registered(self):
driver = ExecutorDriver()
executor_info = mesos_pb2.ExecutorInfo()
framework_info = mesos_pb2.FrameworkInfo()
slave_info = mesos_pb2.SlaveInfo()
self.executor_base.registered(driver, executor_info, framework_info, slave_info)
assert self.executor_base._driver == driver
assert self.executor_base._executor_info == executor_info
assert self.executor_base._framework_info == framework_info
assert self.executor_base._slave_info == slave_info
def test_reregistered(self):
driver = ExecutorDriver()
slave_info = mesos_pb2.SlaveInfo()
self.executor_base.reregistered(driver, slave_info)
def test_disconnected(self):
driver = ExecutorDriver()
self.executor_base.disconnected(driver)
@mock.patch('mesos.interface.mesos_pb2.TaskStatus', spec=mesos_pb2.TaskStatus)
def test_send_update(self, MockTaskStatus):
driver = mock.Mock(ExecutorDriver)
task_id = 'task_id'
state = mesos_pb2.TASK_RUNNING
message = 'test_message'
self.executor_base.send_update(driver, task_id, state, message)
driver.sendStatusUpdate.assert_called_once_with(MockTaskStatus.return_value)
assert MockTaskStatus.return_value.state == state
assert MockTaskStatus.return_value.task_id.value == task_id
assert MockTaskStatus.return_value.message == message
def test_frameworkMessage(self):
driver = ExecutorDriver()
self.executor_base.frameworkMessage(driver, 'test_message')
def test_error(self):
driver = ExecutorDriver()
self.executor_base.error(driver, 'message')
```
#### File: cli/commands/test_simplerun.py
```python
import getpass
import mock
from apache.thermos.cli.commands.simplerun import simplerun
@mock.patch('apache.thermos.cli.commands.simplerun.really_run')
def test_simplerun(really_run_mock):
options_mock = mock.Mock(
spec_set=('root', 'user', 'name', 'task_id', 'prebound_ports', 'bindings', 'daemon'))
options_mock.root = '/tmp/root'
options_mock.user = getpass.getuser()
options_mock.name = 'simple'
options_mock.task_id = None
options_mock.prebound_ports = []
options_mock.bindings = {}
options_mock.daemon = False
simplerun(['--', 'echo', 'hello', 'world'], options_mock)
args, kw = really_run_mock.call_args
thermos_task, root, sandbox = args
assert str(thermos_task.task.name()) == options_mock.name
assert str(thermos_task.task.processes()[0].cmdline()) == 'echo hello world'
assert root == '/tmp/root'
assert sandbox is not None
```
#### File: thermos/core/test_angry.py
```python
from apache.thermos.config.schema import Process, Task
from apache.thermos.testing.runner import Runner
from gen.apache.thermos.ttypes import ProcessState, TaskState
def flaky_task():
task = Task(
name="failing_task",
max_failures=2,
processes=[
Process(name="a", max_failures=1, min_duration=1, cmdline="echo hello world"),
Process(name="b", max_failures=2, min_duration=1, cmdline="exit 1"),
Process(name="c", max_failures=1, min_duration=1, final=True, cmdline="echo hello world")
],
constraints=[{'order': ['a', 'b']}]
)
return task.interpolate()[0]
def test_flaky_runner():
runner = Runner(flaky_task(), success_rate=90, random_seed=31337)
count = 0
while True:
count += 1
print('Run #%d' % count)
rc = runner.run()
if rc == 0:
break
print('Completed in %d runs' % count)
assert runner.state.statuses[-1].state == TaskState.SUCCESS
assert runner.state.processes['a'][-1].state == ProcessState.SUCCESS
assert runner.state.processes['b'][-1].state == ProcessState.FAILED
assert runner.state.processes['c'][-1].state == ProcessState.SUCCESS
```
#### File: thermos/core/test_failing_runner.py
```python
from apache.thermos.config.schema import Process, Resources, Task
from apache.thermos.testing.runner import RunnerTestBase
from gen.apache.thermos.ttypes import ProcessState, TaskState
class TestFailingRunner(RunnerTestBase):
@classmethod
def task(cls):
ping_template = Process(
name="{{process_name}}",
min_duration=1,
max_failures=5,
cmdline="echo {{process_name}} pinging; "
"echo ping >> {{process_name}}; "
"echo current count $(cat {{process_name}} | wc -l); "
"if [ $(cat {{process_name}} | wc -l) -eq {{num_runs}} ]; then "
" exit 0; "
"else "
" exit 1; "
"fi ")
tsk = Task(
name="pingping",
resources=Resources(cpu=1.0, ram=16 * 1024 * 1024, disk=16 * 1024),
processes=[
ping_template.bind(process_name="p1", num_runs=1),
ping_template.bind(process_name="p2", num_runs=2),
ping_template.bind(process_name="p3", num_runs=3),
]
)
return tsk.interpolate()[0]
def test_runner_state_success(self):
assert self.state.statuses[-1].state == TaskState.SUCCESS
def test_runner_processes_have_expected_runs(self):
processes = self.state.processes
for k in range(1, 4):
process_name = 'p%d' % k
assert process_name in processes
assert len(processes[process_name]) == k
for j in range(k - 1):
assert processes[process_name][j].state == ProcessState.FAILED
assert processes[process_name][k - 1].state == ProcessState.SUCCESS
```
#### File: aurora/e2e/validate_serverset.py
```python
import os
import posixpath
import sys
import time
from kazoo.client import KazooClient
from kazoo.exceptions import NoNodeError
OK = 1
DID_NOT_REGISTER = 2
DID_NOT_RECOVER_FROM_EXPIRY = 3
serverset = os.getenv('SERVERSET')
client = KazooClient('localhost:2181')
client.start()
def wait_until_znodes(count, timeout=30):
now = time.time()
timeout += now
while now < timeout:
try:
children = client.get_children(serverset)
except NoNodeError:
children = []
print('Announced members: %s' % children)
if len(children) == count:
return [posixpath.join(serverset, child) for child in children]
time.sleep(1)
now += 1
return []
# job is created with 3 znodes.
znodes = wait_until_znodes(3, timeout=10)
if not znodes:
sys.exit(DID_NOT_REGISTER)
client.delete(znodes[0])
znodes = wait_until_znodes(3, timeout=10)
if not znodes:
sys.exit(DID_NOT_RECOVER_FROM_EXPIRY)
sys.exit(OK)
```
|
{
"source": "jeremywgleeson/Corec-AutoSchedule-Avail",
"score": 3
}
|
#### File: jeremywgleeson/Corec-AutoSchedule-Avail/generateKeys.py
```python
import random
import string
import json
import os
KEYS_FILE = os.path.join(os.path.dirname(os.path.realpath(__file__)), "project", "keys_file.json")
def get_random_alphanumeric_string(length):
letters_and_digits = string.ascii_letters + string.digits
result_str = ''.join((random.choice(letters_and_digits) for i in range(length)))
return result_str
def write_data(keys_data):
""" write keys_data to file """
with open(KEYS_FILE, "w") as f:
json.dump(keys_data, f, indent=2)
def create_keys(num, length):
""" generate num of unique keys and return them in a list """
existing_keys = None
# get list of all keys previously generated
try:
with open(KEYS_FILE, "r") as f:
keys_data = json.load(f)
existing_keys = keys_data["valid_keys"]
for val in keys_data["used_keys"]:
existing_keys.append(val["key"])
except IOError:
existing_keys = []
# generate unqiue keys
out_keys_list = []
for i in range(0, num):
new_key = get_random_alphanumeric_string(length)
while new_key in existing_keys:
new_key = get_random_alphanumeric_string(length)
out_keys_list.append(new_key)
return out_keys_list
def add_keys(num):
""" generate num of unique keys and add them to valid_keys """
try:
with open(KEYS_FILE, "r") as f:
keys_data = json.load(f)
except IOError:
# if not exists, init empty file
keys_data = {"valid_keys": []}
for new_key in create_keys(num, 13):
keys_data["valid_keys"].append(new_key)
print(new_key)
write_data(keys_data)
def main():
print("Used to generate user keys for Corec AutoScheduler")
num = input("How many keys would you like to add?\n: ")
print()
while not num.isdigit():
print("Please enter a valid number!")
num = input("How many keys would you like to add?\n: ")
add_keys(int(num))
if __name__ == "__main__":
main()
```
#### File: Corec-AutoSchedule-Avail/project/boiler_key.py
```python
import requests
import pyotp
import base64
from selenium import webdriver
from selenium.webdriver.firefox.options import Options
from selenium.common.exceptions import NoSuchElementException
def get_hotp_secret(link):
# link will be of valid type, but code may not be valid (already used etc.)
code = link.split("/")[-1]
HEADERS = {"User-Agent": "okhttp/3.11.0"}
PARAMS = {
"app_id": "com.duosecurity.duomobile.app.DMApplication",
"app_version": "2.3.3",
"app_build_number": "323206",
"full_disk_encryption": False,
"manufacturer": "Google",
"model": "Pixel",
"platform": "Android",
"jailbroken": False,
"version": "6.0",
"language": "EN",
"customer_protocol": 1,
}
ENDPOINT = "https://api-1b9bef70.duosecurity.com/push/v2/activation/{}"
res = requests.post(ENDPOINT.format(code), headers=HEADERS, params=PARAMS)
# print("URL: ", res.request.url)
if res.json().get("code") == 40403:
return None
if not res.json()["response"]:
return None
return res.json()["response"]["hotp_secret"]
def generate_password(hotp_secret, counter, pin):
# you must add 1 to counter after every invocation of this function
hotp = pyotp.HOTP(base64.b32encode(hotp_secret.encode()))
hotpPassword = hotp.at(counter)
password = "{},{}".format(pin, hotpPassword)
return password
def check_credentials(username, password):
opts = Options()
opts.headless = True
browser = webdriver.Firefox(options=opts)
browser.get("https://www.purdue.edu/apps/account/cas/login")
try:
if browser.current_url.startswith("https://www.purdue.edu/apps/account/cas/login"):
# time to log in
# login = BoilerKey.generateLogin()
username_field = browser.find_element_by_id("username")
password_field = browser.find_element_by_id("password")
submit_button = browser.find_element_by_name("submit")
username_field.send_keys(username)
password_field.send_keys(password)
submit_button.click()
try:
registration_fail = browser.find_element_by_class_name('error')
except NoSuchElementException:
browser.quit()
return True
except Exception as e:
"""Error using browser"""
browser.quit()
return False
```
|
{
"source": "jeremywgleeson/corec-utils",
"score": 3
}
|
#### File: jeremywgleeson/corec-utils/corec_utils.py
```python
import requests
import json
import datetime
from bs4 import BeautifulSoup
class CorecAppointment():
"""An object to hold corec appointment info
:attribute participantId: :class:`str` instance recwell participantId
hash string (can be NONE) this is used to cancel said
appointment
:attribute spots: :class:`int` instance recwell spots availble
number (can be NONE)
:attribute appointmentId: :class:`str` instance recwell appointmentId
hash string
:attribute timeSlotId: :class:`str` instance recwell timeSlotId
hash string
:attribute timeSlotInstanceId: :class:`str` instance recwell
timeSlotInstanceId hash string
:attribute timeStr: :class:`str` instance recwell time string
ex: '9:00 - 10:00 am'
:attribute date: :class:`datetime.date` instance appt date
"""
def __init__(self, participantId, spots, appointmentId, timeSlotId, timeSlotInstanceId, timeStr, date):
self.participantId = participantId
self.spots = spots
self.appointmentId = appointmentId
self.timeSlotId = timeSlotId
self.timeSlotInstanceId = timeSlotInstanceId
self.timeStr = timeStr
self.date = date
def canCancel(self):
"""Helper to determine whether Appointment can be canceled
returns True if user currently has appointment, else False
:rtype: bool
"""
if self.participantId:
return True
return False
def canReserve(self):
"""Helper to determine whether Appointment can be acquired
returns True if user can get appointment, else False
:rtype: bool
"""
if self.appointmentId and self.hasSpots():
return True
return False
def hasSpots(self):
"""Helper to determine whether Appointment has spots available
returns True if spots available, else False
:rtype: bool
"""
if self.spots:
return self.spots > 0
return False
def __str__(self):
"""toString equivalen will return string representation of object
:rtype: str
"""
return f"date={self.date}, timeStr={self.timeStr}, participantId={self.participantId}, spots={self.spots}, appointmentId={self.appointmentId}, timeSlotId={self.timeSlotId}, timeSlotInstanceId={self.timeSlotInstanceId}"
class CorecSession(requests.Session):
"""A Requests session with included helpers for corec site automation
Provides cookie persistence, connection-pooling, and configuration.
Basic Usage:
>>> import requests
>>> s = requests.Session()
>>> s.get('https://httpbin.org/get')
<Response [200]>
Or as a context manager::
>>> with requests.Session() as s:
... s.get('https://httpbin.org/get')
<Response [200]>
:attribute bookingId: :class:`str` instance recwell bookingId hash string
:attribute selectedFacilityId: :class:`str` instance recwell
selectedFacilityId hash string
"""
def __init__(self):
"""Override Session constructor to set default attributes for the gym
Still call super constructor to initialize Session
"""
# id for the corec gym (opposed to pool etc)
self.facilityId = "3b2e5aa2-1715-4852-bea4-34f472771330"
# id for bookings for corec gym (opposed to pool etc)
self.bookingId = "83456ef4-1d99-4e58-8e66-eb049941f7c1"
# flag to tell if session is authenticated
self.authed = False
# call default Session constructor
super().__init__()
def authWithRecwell(self, username, password):
"""Helper function to authenticate session with recwell site
After execution, session will be authenticated with recwell and able to
send requests for making/canceling appointments
:param username: :class:`str` instance purdue username
:param password: :class:`str` instance one-time-password correlating
with username (****,******)
:rtype: Boolean: indicating whether login was successful
"""
if self.authed:
return True
# get form data
formGet = self.get("https://recwell.purdue.edu/Account/GetLoginOptions?returnURL=%2F&isAdmin=false")
# find __RequestVerificationToken
text = formGet.text
subToFind = 'name="__RequestVerificationToken" type="hidden" value="'
startInd = text.find(subToFind, text.find("frmExternalLogin")) + len(subToFind)
RequestVerificationToken = text[startInd:text.find('"', startInd)]
# create data dict to send as request body
frmExternalLoginData = {
'__RequestVerificationToken': RequestVerificationToken,
'provider': 'Shibboleth'
}
formPost = self.post("https://recwell.purdue.edu/Account/ExternalLogin?ReturnUrl=%2Fbooking", data=frmExternalLoginData)
# now at main purdue login form
# find lt
text = formPost.text
subToFind = '<input type="hidden" name="lt" value="'
startInd = text.find(subToFind) + len(subToFind)
lt = text[startInd:text.find('"', startInd)]
# find post url
subToFind = '<form id="fm1" action="'
startInd = text.find(subToFind) + len(subToFind)
loginFormUrl = "https://www.purdue.edu" + text[startInd:text.find('"', startInd)]
# create data dict to send as request body
loginData = {
'username': username,
'password': password,
'lt': lt,
'execution': 'e1s1',
'_eventId': 'submit',
'submit': 'Login'
}
loginFormPost = self.post(loginFormUrl, data=loginData)
# check for auth failure
if "https://www.recwell.purdue.edu" not in loginFormPost.url and "https://www.purdue.edu/apps/account/cas/login" in loginFormPost.url:
# possibly invalid credentials
# raise ValueError("Invalid credentials")
return False
# now at continue site (only accessable with js off)
# find post url
text = loginFormPost.text
subToFind = '<form action="'
startInd = text.find(subToFind) + len(subToFind)
continuePressUrl = text[startInd:text.find('"', startInd)]
continuePressUrl = continuePressUrl.replace(":", ":").replace("/", "/")
# find RelayState
subToFind = 'name="RelayState" value="'
startInd = text.find(subToFind) + len(subToFind)
RelayState = text[startInd:text.find('"', startInd)]
RelayState = RelayState.replace(":", ":").replace("/", "/")
# find SAMLResponse
subToFind = 'name="SAMLResponse" value="'
startInd = text.find(subToFind) + len(subToFind)
SAMLResponse = text[startInd:text.find('"', startInd)]
# create data dict to send as request body
continuePressPayload = {
"RelayState": RelayState,
"SAMLResponse": SAMLResponse,
"submit": "Continue",
}
continuePressPayload = self.post(continuePressUrl, data=continuePressPayload)
self.authed = True
return True
def getAppointment(self, appt):
"""Helper function to send booking request
Send booking request to corec
:param appt: class: 'CorecAppointment' instance
:rtype: bool indicating success of appointment acquisition
"""
if not appt.canReserve():
return False
if not self.authed:
return False
bookingId = self.bookingId
selectedFacilityId = self.facilityId
reqData = {
"bookingId": bookingId,
"facilityId": selectedFacilityId,
"appointmentId": appt.appointmentId,
"timeSlotId": appt.timeSlotId,
"timeSlotInstanceId": appt.timeSlotInstanceId,
"year": appt.date.year,
"month": appt.date.month,
"day": appt.date.day
}
attempt = self.post("https://recwell.purdue.edu/booking/reserve", data=reqData)
# parse json out of response
try:
response = json.loads(attempt.text)
except JSONDecodeError:
# not authenticated
# raise Exception("Not logged in")
return False
return response["Success"]
def cancelAppointment(self, appt):
"""Helper function to send booking cancelation request
Send booking cancelation request to corec
:param appt: class: 'CorecAppointment' instance
:rtype: bool indicating success of appointment acquisition
"""
if not appt.canCancel():
return False
if not self.authed:
return False
bookingId = self.bookingId
selectedFacilityId = self.facilityId
delUrl = "https://recwell.purdue.edu/booking/delete/" + appt.participantId
attempt = self.post(delUrl)
# parse json out of response
try:
response = json.loads(attempt.text)
except JSONDecodeError:
# not authenticated
# raise Exception("Not logged in")
return False
return response
def getAppointmentsData(self, targetDay):
"""Helper function to get appointment data
Send get request for target day. Returns dict of
{time_str: `CorecAppointment` instance}
where time_str is string of type "8 - 9:20 AM"
:param target_day: :class:`datetime.date` instance target day to scrape
appointments for
:rtype: dict
"""
# sub_to_find = ''
# start_ind = text.find(sub_to_find) + len(sub_to_find)
# selectedFacilityId = text[start_ind:text.find('"', start_ind)]
# print(selectedFacilityId)
appDataUrl = "https://recwell.purdue.edu/booking/{}/slots/{}/{}/{}/{}".format(self.bookingId, self.facilityId, targetDay.year, targetDay.month, targetDay.day)
# check if this has odd status
appData = self.get(appDataUrl)
# print(app_data.text)
if appData.text.startswith("<!DOCTYPE html>"):
# raise Exception("Not logged in")
return None
soup = BeautifulSoup(appData.text, 'html.parser')
bookingDivs = soup.findAll("div", class_="booking-slot-item")
retData = {}
for timecard in bookingDivs:
participantId = timecard['data-participant-id']
if participantId == "00000000-0000-0000-0000-000000000000":
participantId = None
timeRange = timecard.p.strong.text.strip()
spots = timecard.span.text.strip().split(" ")[0]
try:
spots = int(spots)
except Exception:
if spots == "Booked":
spots = None
spots = 0
if timecard.div.button.has_attr('onclick'):
resStr = timecard.div.button['onclick']
resLis = resStr[8:-1].split(', ')
appointmentId = resLis[0][1:-1]
timeSlotId = resLis[1][1:-1]
timeSlotInstanceId = resLis[2][1:-1]
if spots > 0:
canRequest = True
else:
appointmentId = None
timeSlotId = None
timeSlotInstanceId = None
canRequest = False
retData[timeRange] = CorecAppointment(participantId, spots, appointmentId, timeSlotId, timeSlotInstanceId, timeRange, targetDay)
return retData
```
#### File: jeremywgleeson/corec-utils/getWhenAvailable.py
```python
import corec_utils
import boilerkey
import time
import datetime
def getWhenAvailable(username, password, timeStr, targetDate, interval):
# initialize CorecSession
with corec_utils.CorecSession() as sesh:
# ensure we log in by allowing 3 attempts
for i in range(0,3):
# log in to recwell
if not sesh.authWithRecwell(username, password):
print("Error authenticating!!!")
time.sleep(10)
else:
break
if not sesh.authed:
print("Could not authenticate! Check username/password")
return False
apptBooked = False
while not apptBooked:
appData = None
for i in range(0,3):
# This will store dictionary of availble CorecAppointment instances
# in appData
appData = sesh.getAppointmentsData(targetDate)
if not appData:
print("Error getting data! Did you enter an invalid date?")
time.sleep(10)
else:
break
if appData and timeStr in appData:
# check if appointment already reserved
if appData[timeStr].canCancel():
print(f"Appointment already reserved at {datetime.datetime.now()}")
return False
# check if another appointment already reserved for said day
if appData[timeStr].hasSpots() and not appData[timeStr].canReserve():
print(f"You already have an appointment on this day")
return False
# only move forward if spots available
if appData[timeStr].canReserve():
for i in range(0,3):
# Getting appointment requires a CorecAppointent instance as an argument
if not sesh.getAppointment(appData[timeStr]):
print("Error getting appointment!")
time.sleep(10)
else:
return True
else:
print(f"Tried to get appointment at {datetime.datetime.now()}")
else:
print("Invalid data for attempting to get appointment!")
time.sleep(interval)
def main():
# ensure credentials are set up
boilerkey.checkSetup()
USERNAME = boilerkey.getUsername()
if not USERNAME:
USERNAME = input("Enter Purdue Username: ")
targetMonth = input("Enter target month(0-12): ")
targetDay = input("Enter target day(0-31): ")
targetYear = input("Enter target year: ")
targetMonth = int(targetMonth)
targetDay = int(targetDay)
targetYear = int(targetYear)
TARGET_DAY = datetime.date(targetYear, targetMonth, targetDay)
TARGET_TIME = input("Enter time interval EXACTLY as shown on corec website\nExample: '9:20 - 10:40 AM'\n:")
INTERVAL = 30
# get appointment
if getWhenAvailable(USERNAME, boilerkey.generatePassword(), TARGET_TIME, TARGET_DAY, INTERVAL):
print(f"Successfully got appointment at {datetime.datetime.now()}")
if __name__ == "__main__":
main()
```
|
{
"source": "jeremywiebe/xi-editor",
"score": 2
}
|
#### File: xi-editor/python/bracket_example.py
```python
from xi_plugin import start_plugin, Plugin, edit
MATCHES = {"{": "}", "[": "]", "(": ")"}
class BracketCloser(Plugin):
"""Naively closes opened brackets, parens, & braces."""
def update(self, view, author, rev, start, end,
new_len, edit_type, text=None):
resp = 0
close_char = MATCHES.get(text)
if close_char:
# compute a delta from params:
new_cursor = end + new_len
# we set 'after_cursor' because we want the edit to appear to the right
# of the active cursor. we set priority=HIGH because we want this edit
# applied after concurrent edits.
resp = self.new_edit(rev, (new_cursor, new_cursor), close_char,
after_cursor=True, priority=edit.EDIT_PRIORITY_HIGH)
return resp
def main():
start_plugin(BracketCloser())
if __name__ == "__main__":
main()
```
#### File: python/xi_plugin/view.py
```python
import sys
from collections import namedtuple
Selection = namedtuple('Selection', ['start', 'end', 'is_caret'])
class View(object):
"""Represents a view into a buffer."""
def __init__(self, view_id, lines):
self.view_id = view_id
self.lines = lines
@property
def path(self):
return self.lines.path
@property
def syntax(self):
return self.lines.syntax
def get_selections(self):
selections = self.lines.peer.get_selections(self.view_id)
selections = selections['selections']
return [Selection(s, e, (s == e)) for (s, e) in selections]
def update_spans(self, *args, **kwargs):
self.lines.peer.update_spans(self.view_id, *args, **kwargs)
def add_scopes(self, *args, **kwargs):
self.lines.peer.add_scopes(self.view_id, *args, **kwargs)
def edit(self, *args, **kwargs):
self.lines.peer.edit(self.view_id, *args, **kwargs)
```
|
{
"source": "JeremyWildsmith/x86devirt",
"score": 2
}
|
#### File: JeremyWildsmith/x86devirt/x86devirt_jmp.py
```python
import angr
possibleJmps = [
{
"name": "jz",
"must": [0x40],
"not": [0x1, 0],
"priority": 1
},
{
"name": "jo",
"must": [0x800],
"not": [0],
"priority": 1
},
{
"name": "jno",
"must": [0x40],
"not": [0x800],
"priority": 1
},
{
"name": "jp",
"must": [0x4],
"not": [0],
"priority": 1
},
{
"name": "jnp",
"must": [0],
"not": [0x4],
"priority": 1
},
{
"name": "jb",
"must": [0x1],
"not": [0],
"priority": 1
},
{
"name": "jnb",
"must": [0],
"not": [0x1],
"priority": 1
},
{
"name": "ja",
"must": [0],
"not": [0x40, 0x1, 0x41],
"priority": 2
},
{
"name": "jl",
"must": [0x800, 0x80],
"not": [0x880, 0],
"priority": 2
},
{
"name": "jge",
"must": [0x880, 0],
"not": [0x800, 0x80],
"priority": 2
},
{
"name": "jg",
"must": [0x880, 0],
"not": [0x8C0, 0x800, 0x80],
"priority": 3
},
{
"name": "jnz",
"must": [0x1, 0],
"not": [0x40],
"priority": 1
},
{
"name": "jbe",
"must": [0x41, 0x40, 0x1],
"not": [0],
"priority": 2
},
{
"name": "jle",
"must": [0x40, 0xC0, 0x840, 0x80, 0x800],
"not": [0x880, 0],
"priority": 3
},
{
"name": "js",
"must": [0x80],
"not": [0],
"priority": 1
},
{
"name": "jns",
"must": [0],
"not": [0x80],
"priority": 1
},
]
controlFlowBits = 0x8C5
def getJmpStatesMap(proj):
statesMap = {}
state = proj.factory.blank_state(addr=0x0)
state.add_constraints(state.regs.edx >= 0)
state.add_constraints(state.regs.edx <= 15)
simgr = proj.factory.simulation_manager(state)
r = simgr.explore(find=0xDA, avoid=0xDE, num_find=100)
for state in r.found:
val = state.solver.eval(state.regs.edx)
val = val - 0xD
val = val / 2
if(not statesMap.has_key(val)):
statesMap[val] = {"must": [], "not": []}
statesMap[val]["must"].append(state)
state = proj.factory.blank_state(addr=0x0)
state.add_constraints(state.regs.edx >= 0)
state.add_constraints(state.regs.edx <= 15)
simgr = proj.factory.simulation_manager(state)
r = simgr.explore(find=0xDE, avoid=0xDA, num_find=100)
for state in r.found:
val = state.solver.eval(state.regs.edx)
val = val - 0xD
val = val / 2
statesMap[val]["not"].append(state)
return statesMap
def decodeJumps(inputFile):
proj = angr.Project(inputFile, main_opts={'backend': 'blob', 'custom_arch': 'i386'}, auto_load_libs=False)
stateMap = getJmpStatesMap(proj)
jumpMappings = {}
for key, val in stateMap.iteritems():
for jmp in possibleJmps:
satisfiedMustsRemaining = len(jmp["must"])
satisfiedNotsRemaining = len(jmp["not"])
for state in val["must"]:
for con in jmp["must"]:
if (state.solver.satisfiable(
extra_constraints=[state.regs.eax & controlFlowBits == con & controlFlowBits])):
satisfiedMustsRemaining -= 1;
for state in val["not"]:
for con in jmp["not"]:
if (state.solver.satisfiable(
extra_constraints=[state.regs.eax & controlFlowBits == con & controlFlowBits])):
satisfiedNotsRemaining -= 1;
if(satisfiedMustsRemaining <= 0 and satisfiedNotsRemaining <= 0):
if(not jumpMappings.has_key(key)):
jumpMappings[key] = []
jumpMappings[key].append(jmp)
finalMap = {}
for key, val in jumpMappings.iteritems():
maxPriority = 0;
jmpName = "NOE FOUND"
for j in val:
if(j["priority"] > maxPriority):
maxPriority = j["priority"]
jmpName = j["name"]
finalMap[jmpName] = key
print("Mapped " + str(key) + " to " + jmpName)
proj.terminate_execution()
return finalMap
```
|
{
"source": "jeremy-wischusen/python-database-wrappers",
"score": 3
}
|
#### File: databases/mysql/queryresult.py
```python
import datetime
class QueryResult:
def __init__(self, columns, rows):
self.columns = columns
self.rows = rows
self.number_of_rows = len(rows)
def to_json(self, add_props=None):
rows = []
for row in self.rows:
r = {}
for col, val in enumerate(row):
if isinstance(val, datetime.date):
val = self.__date_to_milliseconds(val)
if add_props and isinstance(add_props, dict):
r.update(add_props)
r[self.columns[col]] = val
rows.append(r)
return rows
def __date_to_milliseconds(self, date: datetime.date):
return int((datetime.datetime(date.year, date.month, date.day, 0, 0).timestamp() * 1000))
```
|
{
"source": "jeremy-w/punic",
"score": 2
}
|
#### File: punic/punic/config_init.py
```python
from __future__ import division, absolute_import, print_function
__all__ = ['config_init']
import yaml
from pathlib2 import Path
from prompt_toolkit.contrib.completers import WordCompleter
from prompt_toolkit import prompt
from prompt_toolkit.auto_suggest import (AutoSuggest, Suggestion)
import six
import sys
from .xcode import *
from .platform import *
class ListAutoSuggest(AutoSuggest):
def __init__(self, items):
self.items = items
def get_suggestion(self, cli, buffer, document):
# Consider only the last line for the suggestion.
text = document.text.rsplit('\n', 1)[-1]
# Only create a suggestion when this is not an empty line.
if text.strip():
# Find first matching line in history.
for string in self.items:
for line in reversed(string.splitlines()):
if line.startswith(text):
return Suggestion(line[len(text):])
def platform_nicknames():
return sorted([p.nickname for p in Platform.all])
def _xcode_versions():
Xcode.find_all()
return sorted([six.text_type(version) for version in Xcode._all_xcodes.keys()])
def _prompt(s, items, default=None):
items = [six.text_type(item) for item in items]
completer = WordCompleter(items, ignore_case=True)
kwargs = {'completer': completer, 'complete_while_typing': True,}
if default:
kwargs['default'] = six.text_type(default)
text = prompt(u'{} ({}): '.format(s, u', '.join(items)), **kwargs)
if not text:
return None
return text
def config_init(**kwargs):
"""Generate punic configuration file."""
kwargs['xcode_version'] = None
d = {'defaults': {},}
configuration = _prompt("Configuration", ['Debug', 'Release'])
if configuration:
d['defaults']['configuration'] = configuration
platform = _prompt("Platform", platform_nicknames())
if platform:
d['defaults']['platform'] = platform
xcode_version = _prompt("Xcode Version", _xcode_versions())
if xcode_version:
d['defaults']['xcode-version'] = xcode_version
stream = six.StringIO()
yaml.safe_dump(d, stream, default_flow_style=False)
sys.stdout.write(stream.getvalue())
if _prompt('Write config to `punic.yaml`', ['yes', 'no'], default='no') == 'yes':
Path('punic.yaml').open('wb').write(str(stream.getvalue()))
```
#### File: punic/punic/config.py
```python
from __future__ import division, absolute_import, print_function
__all__ = ['Config', 'config']
from pathlib2 import Path
import yaml
import logging
import os
import six
from .runner import *
from .xcode import *
from .platform import *
# TODO: This all needs to be cleaned up and made more generic. More configs will be added over time and this will only get worse
# TODO: Allow config file to be relocated and specified on command line
# TODO: Allow subcommands to easily override configs
class Config(object):
def __init__(self):
self.xcode = None
self.repo_overrides = dict()
self.root_path = Path.cwd() # type: Path
self.library_directory = Path('~/Library/Application Support/io.schwa.Punic').expanduser()
if not self.library_directory.exists():
self.library_directory.mkdir(parents=True)
self.repo_cache_directory = self.library_directory / 'repo_cache'
if not self.repo_cache_directory.exists():
self.repo_cache_directory.mkdir(parents=True)
self.punic_path = self.root_path / 'Carthage'
self.build_path = self.punic_path / 'Build'
self.checkouts_path = self.punic_path / 'Checkouts'
self.derived_data_path = self.library_directory / "DerivedData"
self.platforms = Platform.all
self.configuration = None
self.fetch = False
self.xcode = Xcode.default()
self.toolchain = None
self.dry_run = False
self.use_submodules = False
self.use_ssh = False
self.skips = []
self.verbose = False
self.echo = False
# Read in defaults from punic.yaml (or punic.yml if that exists)
punic_configuration_path = Path('punic.yaml')
if not punic_configuration_path.exists():
punic_configuration_path = Path('punic.yml')
if punic_configuration_path.exists():
self.read(punic_configuration_path)
runner.cache_path = self.library_directory / "cache.shelf"
def update(self, **kwargs):
for key, value in sorted(kwargs.items()):
if value:
if hasattr(self, key):
setattr(self, key, value)
# Special case for platforms
platform = kwargs['platform'] if 'platform' in kwargs else None
if platform:
self.platforms = parse_platforms(platform)
if self.verbose and os.environ.get('DUMP_CONFIG', False):
self.dump()
def dump(self):
logging.info('# Environment ##' + '#' * 64)
logging.info('CWD: {}'.format(os.getcwd()))
key_width = max([len(k) for k in os.environ.keys()] + [len(k) for k in self.__dict__.items()])
os.environ.keys()
for key, value in sorted(os.environ.items()):
logging.info('{:{key_width}}: {}'.format(key, value, key_width = key_width + 1))
logging.info('# Configuration ' + '#' * 64)
for key, value in sorted(self.__dict__.items()):
logging.info('{:{key_width}}: {}'.format(key, value, key_width = key_width + 1))
logging.info('#' * 80)
@property
def xcode_version(self):
return self.xcode.version if self.xcode else None
@xcode_version.setter
def xcode_version(self, value):
xcode = Xcode.with_version(value)
if value and not xcode:
raise Exception('Could not find xcode version: {}'.format(value))
if not xcode:
xcode = Xcode.default()
self.xcode = xcode
def read(self, path):
# type: (Path)
d = yaml.safe_load(path.open())
if not d:
return
if 'defaults' in d:
defaults = d['defaults']
if 'configuration' in defaults:
self.configuration = defaults['configuration']
if 'platforms' in defaults:
self.platforms = parse_platforms(defaults['platforms'])
elif 'platform' in defaults:
self.platforms = parse_platforms(defaults['platform'])
if 'xcode-version' in defaults:
self.xcode_version = six.text_type(defaults['xcode-version'])
if 'use-ssh' in defaults:
self.use_ssh = defaults['use-ssh']
if 'repo-overrides' in d:
self.repo_overrides = d['repo-overrides']
if 'skips' in d:
self.skips = d['skips'] or []
config = Config()
```
#### File: punic/punic/errors.py
```python
from __future__ import division, absolute_import, print_function
import contextlib
import logging
class RepositoryNotClonedError(Exception):
pass
class CartfileNotFound(Exception):
def __init__(self, path):
self.path = path
class PunicRepresentableError(Exception):
pass
class NoSuchRevision(Exception):
def __init__(self, repository, revision):
self.repository = repository
self.revision = revision
self.message = "No such revision <rev>'{}'</rev> in repository <ref>'{}'</ref>.".format(self.revision, self.repository)
class XcodeVersionError(Exception):
pass
@contextlib.contextmanager
def error_handling():
try:
yield
except RepositoryNotClonedError:
logging.error('<err>Error</err>: No locally cloned repository found. Did you neglect to run `punic fetch` first?')
exit(-1)
except CartfileNotFound as e:
logging.error('<err>Error</err>: No Cartfile found at path: <ref>{}</ref>'.format(e.path))
exit(-1)
except NoSuchRevision as e:
logging.error('<err>Error</err>: No such revision {} found in repository {}'.format(e.revision, e.repository))
logging.error('Are you sure you are using the latest bits? Try an explicit `punic fetch` or use `punic bootstrap` instead of `punic build`')
exit(-1)
except PunicRepresentableError as e:
logging.error(e.message)
exit(-1)
except:
raise
```
#### File: punic/punic/platform.py
```python
from __future__ import division, absolute_import, print_function
__all__ = ['Platform', 'parse_platforms']
class Platform(object):
all = []
def __init__(self, name, nickname, sdks, output_directory_name):
self.name = name
# TODO: Change to "display name"?
self.nickname = nickname
self.sdks = sdks
self.output_directory_name = output_directory_name
@classmethod
def platform_for_nickname(cls, nickname):
# type: (str) -> Platform
for platform in cls.all:
if platform.nickname.lower() == nickname.lower():
return platform
return None
@property
def device_sdk(self):
return self.sdks[0]
def __repr__(self):
return self.nickname
Platform.all = [
Platform(name='iOS', nickname='iOS', sdks=['iphoneos', 'iphonesimulator'], output_directory_name='iOS'),
Platform(name='macOS', nickname='Mac', sdks=['macosx'], output_directory_name='Mac'), # TODO: Change path to macOS? Will SDK name change?
Platform(name='watchOS', nickname='watchOS', sdks=['watchos', 'watchsimulator'], output_directory_name='watchOS'),
Platform(name='tvOS', nickname='tvOS', sdks=['appletvos', 'appletvsimulator'], output_directory_name='tvOS')
]
def parse_platforms(s):
# type: (str) -> [Platform]
if not s:
return Platform.all
else:
return [Platform.platform_for_nickname(platform.strip()) for platform in s.split(',')]
```
#### File: punic/punic/search.py
```python
__all__ = ['github_search']
import logging
from pathlib2 import Path
from punic.styling import styled
from punic.github import *
from punic.cartfile import *
from punic.specification import *
import six
def menu(prompt, items, formatter=None, default=None):
formatter = formatter or str
for index, item in enumerate(items):
print('{}: {}'.format(index + 1, formatter(item)))
while True:
s = raw_input('>')
if not s and default is not None:
return default
else:
try:
index = int(s)
return items[index - 1]
except:
continue
def github_search(punic, name, cartfile_append = True, language='swift'):
repositories = GitHub().search(name, language = language)
logging.info('Found {} repositories matching \'{}\'. Filtering...'.format(len(repositories), name))
# Get rid of forks.
repositories = [repo for repo in repositories if not repo.json['fork']]
# Get rid of zero stars.
repositories = [repo for repo in repositories if repo.json['stargazers_count']]
# Limit to 10 items
repositories = repositories[:10]
def formatter(repo):
s = '<ref>{repo.full_name}</ref>, stars: {repo.stargazers_count}, license: {repo.license}'.format(repo=repo)
return styled(s)
if not cartfile_append:
for repository in repositories:
logging.info(formatter(repository))
else:
# Get rid of no license.
repositories = [repo for repo in repositories if repo.license]
repository = menu('?', repositories, formatter)
repository = repositories[0]
append_to_cartfile(punic, repository)
def append_to_cartfile(punic, repository):
cartfile_path = punic.config.root_path / 'Cartfile'
if cartfile_path.exists():
cartfile = Cartfile()
specifications = cartfile.read(cartfile_path)
else:
specifications = []
project_identifier = ProjectIdentifier(source='github', team_name=repository.owner, project_name=repository.name)
for specification in specifications:
if specification.identifier == project_identifier:
logging.warning('Project \'<ref>{}</ref>\' already in cartfile'.format(project_identifier))
return
logging.info('Adding \'<ref>{}</ref>\' to Cartfile'.format(project_identifier))
new_specification = Specification(identifier=project_identifier, predicate=None)
cartfile = Cartfile(specifications + [new_specification])
cartfile.write(cartfile_path.open('w'))
```
#### File: punic/punic/semantic_version.py
```python
from __future__ import division, absolute_import, print_function
__all__ = ['SemanticVersion']
import re
from functools import total_ordering
@total_ordering
class SemanticVersion(object):
expression = re.compile(r'^(?P<prefix>[a-z,A-Z,_]+)?(?P<major>\d+)(?:\.(?P<minor>\d+)(?:\.(?P<patch>\d+))?)?(?:-(?P<identifiers>.+))?$', re.I)
@classmethod
def is_semantic(cls, s):
"""
>>> SemanticVersion.is_semantic("1.0")
True
>>> SemanticVersion.is_semantic("x.0")
False
"""
# type: (str) -> bool
match = SemanticVersion.expression.match(s)
return True if match else False
def __init__(self, major, minor = 0, patch=0, identifiers=None):
"""
>>> SemanticVersion(1, 0)
1.0
>>> SemanticVersion(1, 0, 0)
1.0
>>> SemanticVersion(1, identifiers = ['0'])
1.0-0
"""
assert isinstance(major, int)
assert isinstance(minor, int)
assert isinstance(patch, int)
self.major = major if major else 0
self.minor = minor if minor else 0
self.patch = patch if patch else 0
self.identifiers = [Identifier(identifier) for identifier in identifiers] if identifiers else []
@property
def _components(self):
"""
>>> SemanticVersion(1, 2, 3)._components
([1, 2, 3], [])
"""
# TODO: using a tuple breaks code
# return (self.major, self.minor, self.patch)
return ([self.major, self.minor, self.patch], self.identifiers)
def __repr__(self):
components = [self.major, self.minor] + ([self.patch] if self.patch else [])
components = [str(component) for component in components]
repr = '.'.join(components)
if len(self.identifiers) >= 1:
repr += '-' + '.'.join([str(identifier) for identifier in self.identifiers])
return repr
def __eq__(self, other):
"""
>>> SemanticVersion.string('1') == SemanticVersion.string('1')
True
>>> SemanticVersion.string('1') == SemanticVersion.string('1.0')
True
>>> SemanticVersion.string('1') == SemanticVersion.string('1.0.0')
True
>>> SemanticVersion.string('1') != SemanticVersion.string('1')
False
"""
return self._components[0] == other._components[0] and self._components[1] == other._components[1]
def __ne__(self, other):
return not self.__eq__(other)
def __lt__(self, other):
"""
>>> SemanticVersion.string('1') < SemanticVersion.string('2')
True
>>> SemanticVersion.string('1') <= SemanticVersion.string('2')
True
>>> SemanticVersion.string('1.1') > SemanticVersion.string('1.0')
True
>>> SemanticVersion.string('v5.0.0-beta6') > SemanticVersion.string('v5.0.0-beta1')
True
>>> SemanticVersion.string('v5.0.0-beta1') > SemanticVersion.string('v5.0.0-beta6')
False
>>> SemanticVersion.string('v5.0.0-10') > SemanticVersion.string('v5.0.0-2')
True
>>> SemanticVersion.string('v5.0.0-dummy10') > SemanticVersion.string('v5.0.0-dummy2')
False
>>> SemanticVersion.string('v5.0.0') > SemanticVersion.string('v5.0.0-dummy2')
True
"""
# The same version with no identifiers is rated higher then one with identifiers
if self._components[0] < other._components[0]:
return True
elif self._components[0] == other._components[0]:
if len(other._components[1]) == 0 and len(self._components[1]) > 0:
return True
elif len(self._components[1]) == 0 and len(other._components[1]) > 0:
return False
else:
return self._components[1] < other._components[1]
return False
def __hash__(self):
"""
>>> hash(SemanticVersion(1, 0)) == hash(SemanticVersion(1, 0))
True
>>> hash(SemanticVersion(1, 0)) != hash(SemanticVersion(0, 1))
True
"""
return hash(self.major * 1000000) ^ hash(self.minor * 10000) ^ hash(self.patch * 100)
@classmethod
def from_dict(cls, d):
if set(d.keys()).issubset({'major', 'minor', 'micro', 'releaselevel', 'serial'}):
return SemanticVersion(major=d.get('major'), minor=d.get('minor'), patch=d.get('micro'))
else:
raise Exception('Invalid dict')
@classmethod
def string(cls, s):
# type: (str) -> SemanticVersion
"""
>>> SemanticVersion.string('1')
1.0
>>> SemanticVersion.string('1.2')
1.2
>>> SemanticVersion.string('1.2.3')
1.2.3
>>> SemanticVersion.string('v5.0.0-beta6')
5.0-beta6
>>> SemanticVersion.string('test5.0.0-beta6')
5.0-beta6
>>> SemanticVersion.string('test5')
5.0
>>> SemanticVersion.string('garbage')
Traceback (most recent call last):
Exception: "garbage" not a semantic version.
>>> SemanticVersion.string('v')
Traceback (most recent call last):
Exception: "v" not a semantic version.
>>> SemanticVersion.string('')
Traceback (most recent call last):
Exception: "" not a semantic version.
>>> SemanticVersion.string('sync-hackathon-2015-11-09')
Traceback (most recent call last):
Exception: "sync-hackathon-2015-11-09" not a semantic version.
"""
match = SemanticVersion.expression.match(s)
if not match:
raise Exception('"{}" not a semantic version.'.format(s))
d = match.groupdict()
major = int(d['major']) if d['major'] else 0
minor = int(d['minor']) if d['minor'] else 0
patch = int(d['patch']) if d['patch'] else 0
identifiers = [Identifier(identifier) for identifier in (d['identifiers'].split('.') if d['identifiers'] else [])]
return SemanticVersion(major=major, minor=minor, patch=patch, identifiers=identifiers)
@property
def next_major(self):
"""
>>> SemanticVersion.string('1.2').next_major
2.0
"""
# type: () -> SemanticVersion
return SemanticVersion(major=self.major + 1, minor=0, patch=0)
@property
def next_minor(self):
"""
>>> SemanticVersion.string('1.2').next_major
2.0
"""
# type: () -> SemanticVersion
return SemanticVersion(major=self.major, minor=self.minor + 1, patch=0)
class Identifier(object):
def __init__(self, value):
if isinstance(value, Identifier):
value = value.string_value
self.string_value = value
self.int_value = int(value) if RepresentsInt(value) else None
def __repr__(self):
return self.string_value
def __eq__(self, other):
return self.string_value == other.string_value
def __ne__(self, other):
return not self.__eq__(other)
def __lt__(self, other):
if self.int_value is not None and other.int_value is not None:
return self.int_value < other.int_value
else:
return self.string_value < other.string_value
def __hash__(self):
return hash(self.string_value)
INT_RE = re.compile(r"^[-]?\d+$")
def RepresentsInt(s):
return INT_RE.match(str(s)) is not None
```
#### File: punic/punic/styling.py
```python
from __future__ import division, absolute_import, print_function
__all__ = ['styled']
# noinspection PyUnresolvedReferences
from six.moves.html_parser import HTMLParser
from blessings import Terminal
term = Terminal()
default_styles = {
'err': term.red,
'ref': term.yellow,
'rev': term.bold,
'cmd': term.cyan + term.underline, # 'sub': term.cyan,
'echo': term.yellow,
}
class MyHTMLParser(HTMLParser):
def __init__(self, styled, styles = None):
HTMLParser.__init__(self)
self.s = ''
self.styled = styled
self.styles = styles if styles else default_styles
self.style_stack = []
# noinspection PyUnusedLocal
def handle_starttag(self, tag, attrs):
if tag in self.styles:
self.style_stack.append(self.styles[tag])
def handle_endtag(self, tag):
if tag in self.styles:
self.style_stack.pop()
def handle_data(self, data):
if self.styled:
self.apply()
self.s += data
def apply(self):
self.s += term.normal
for style in set(self.style_stack):
self.s += style
def styled(s, styled = True, styles = None):
parser = MyHTMLParser(styled=styled, styles = styles)
parser.feed(s)
return parser.s + (term.normal if styled else '')
# '<head>***</head> Checkout out <title>SwiftLogging</title> at "<version>v1.0.1</version>"')
#
# # instantiate the parser and fed it some HTML
```
|
{
"source": "jeremywrnr/life-of-the-party",
"score": 2
}
|
#### File: software/gui/colorTest.py
```python
MODES = ['CK','Simu','Saiko']
CURRENT_MODE = 1 # 2= saiko liblo
MODE_NAME = MODES[CURRENT_MODE]
IP_ADDRESSES = ["192.168.1.200","192.168.1.201","192.168.1.122"]
class LightController:
def SendLightsSimu(self,r,g,b):
print r,g,b
def SendLightsCK(self,r,g,b):
# struct.pack(fmt, magic, ver, type, seq, port, flags, timerVal V, uni, 0, 0, 0, 0, data)
levels = [r,g,b]*10
arr = array.array('B', levels)
out = struct.pack("LHHLBxHLB255s", 0x4adc0104, 0x0001, 0x0101, 0, 0, 0, -1, 0, arr.tostring())
socket(AF_INET, SOCK_DGRAM).sendto(out, (IP_ADDRESS, port))
# print r,g,b
def SendLightsSaiko(self,r,g,b):
fRed = r/255.0
fGreen = g/255.0
fBlue = b/255.0
for address in addresses:
liblo.send(address,'/light/color/set',('f',fRed),('f',fGreen),('f',fBlue))
def UpdateControlSet(self,listOfLights):
pass
if CURRENT_MODE == 2:
import liblo
addresses = [liblo.Address(IP_ADDRESS,"2222") for IP_ADDRESS in IP_ADDRESSES]
LightController.SendLights = LightController.SendLightsSaiko
elif CURRENT_MODE == 0:
import struct
import array
from socket import socket, AF_INET, SOCK_DGRAM
port = 6038
LightController.SendLights = LightController.SendLightsCK
else:
LightController.SendLights = LightController.SendLightsSimu
import wx
import os
import sys
try:
dirName = os.path.dirname(os.path.abspath(__file__))
except:
dirName = os.path.dirname(os.path.abspath(sys.argv[0]))
sys.path.append(os.path.split(dirName)[0])
try:
from agw import cubecolourdialog as ccdSource
# from agw.cubecolourdialog import *
except ImportError: # if it's not there locally, try the wxPython lib.
# from wx.lib.agw.cubecolourdialog import *
import wx.lib.agw.cubecolourdialog as ccdSource
#import cubecolourdialog as ccdSource
from wx.lib.agw.cubecolourdialog import Colour
#Colour = ccdSource.Colour
from colourWidgets import RGBCube,HSVWheel,BrightCtrl
from myWidget import PowerCtrl,XYPanel#RGBCube = ccdSource.RGBCube
#HSVWheel = ccdSource.HSVWheel
#BrightCtrl = ccdSource.BrightCtrl
CustomPanel = ccdSource.CustomPanel
ColourPanel = ccdSource.ColourPanel
#colourAttributes = ccdSource.colourAttributes
#colourMaxValues = ccdSource.colourMaxValues
colourAttributes = ["r", "g", "b", "h", "s", "v","t","c","p"]
colourMaxValues = [255, 255, 255, 359, 255, 255, 359, 255, 255]
Distance = ccdSource.Distance
Vertex = ccdSource.Vertex
Top = ccdSource.Top
Left = ccdSource.Left
Right = ccdSource.Right
RED=ccdSource.RED
GREEN=ccdSource.GREEN
BLUE=ccdSource.BLUE
LineDescription = ccdSource.LineDescription
Slope = ccdSource.Slope
FindC = ccdSource.FindC
PointOnLine = ccdSource.PointOnLine
Intersection = ccdSource.Intersection
PtFromAngle = ccdSource.PtFromAngle
RECT_WIDTH = ccdSource.RECT_WIDTH
class NewColour(Colour):
constrainPower = False
def __init__(self,colour):
# super.__init__(colour)
# Colour.__init__(self,colour)
super(NewColour,self).__init__(colour)
self.ToXYZ()
def ToHSL(self):
self.H = self.h
self.L = (510.0-(self.s)) * (self.v/255.0)
self.S = self.s * self.v
if self.L <= 255.0:
lfactor = self.L
else:
lfactor = 510.0 - self.L
self.S /= lfactor
self.L /= 2.0
def ToXYZ(self):
self.c = self.s #2*(max(self.b,max(self.r,self.g)) - min(self.b,min(self.r,self.g)))
self.p = min(255,self.r+self.g+self.b)
self.t = self.h
if self.constrainPower:
# do stuff for ToHSV and ToRGB
pass
else:
pass
def ToHSV(self):
Colour.ToHSV(self)
self.ToXYZ()
def ToRGB(self):
Colour.ToRGB(self)
self.ToXYZ()
def HSL_ToRGB_HSV(self):
self.h = self.H
ell = self.L/255.0 * 2
ess = self.S/255.0
if ell <= 1:
ess *= ell
else:
ess *= (2 - ell)
self.v = int(255.0*((ell + ess) / 2))
self.s = int(255.0*(2*ess /(ell+ess)))
Colour.ToRGB(self)
def XYZ_ToRGB_HSV(self):
maxVal = self.p
delta = maxVal * self.c / 255.0
minVal = maxVal - delta
hue = float(self.t)
if self.t > 300 or self.t <=60:
#red max
r=int(maxVal)
if self.t > 300:
g = int(minVal)
hue = (hue - 360.0)/60.0
b = int(-(hue*delta - minVal))
else:
b=int(minVal)
hue = hue/60.0
g = int(hue*delta+minVal)
elif self.t > 60 and self.t < 180:
#green max
g = int(maxVal)
hue = (hue/60.0 - 2.0)*delta
if self.t < 120:
b = int(minVal)
r = int(minVal - hue)
else:
r = int(minVal)
b = int(minVal + hue)
else:
b = int(maxVal)
hue = (hue/60.0 - 4.0)*delta
if self.t < 240:
r = int(minVal)
g = int(minVal - hue)
else:
g = int(minVal)
r = int(minVal + hue)
power = self.p
sumpower = r+g+b / 1.0
if sumpower:
self.r=int(r*power/sumpower)
self.g=int(g*power/sumpower)
self.b=int(b*power/sumpower)
#
# self.h = self.t
# self.s = self.c
# power = self.p
# self.v = self.p
# Colour.ToRGB(self)
# colorpower = (self.r + self.g + self.b) / 1
# if colorpower:
# self.r=int(self.r*power/colorpower)
# self.g=int(self.g*power/colorpower)
# self.b=int(self.b*power/colorpower)
#
Colour.ToHSV(self)
class NewCustomPanel(CustomPanel):
def __init__(self,parent,cd):
# super(NewCustomPanel,self).__init__(parent,cd)
CustomPanel.__init__(self,parent,cd)
self.Bind(wx.EVT_LEFT_DOWN, self.OnLeftDown)
def OnLeftDown(self, event):
"""
Handles the ``wx.EVT_LEFT_DOWN`` for L{CustomPanel}.
:param `event`: a `wx.MouseEvent` event to be processed.
"""
x, y = event.GetX(), event.GetY()
selX = (x - self._customColourRect.x)/(self._smallRectangleSize.x + self._gridSpacing)
selY = (y - self._customColourRect.y)/(self._smallRectangleSize.y + self._gridSpacing)
ptr = selX + selY*8
# dc = wx.ClientDC(self)
# self.PaintHighlight(dc, False)
self._colourSelection = ptr
self._mainDialog._colour = NewColour(self._customColours[self._colourSelection])
# self.PaintCustomColour(dc, selX, selY)
# self.PaintHighlight(dc, True)
self._mainDialog.DrawAll()
self._mainDialog.SendLightsIfManual()
class CubeColourFrame(wx.Frame):
"""
This is the CubeColourFrame main class implementation.
"""
manualSend = False
def __init__(self, parent, title, lc = None, colourData=None, agwStyle=ccdSource.CCD_SHOW_ALPHA):
"""
Default class constructor.
:param `colourData`: a standard `wx.ColourData` (as used in `wx.ColourFrame`;
:param `agwStyle`: can be either ``None`` or ``ccdSource.CCD_SHOW_ALPHA``, depending if you want
to hide the alpha channel control or not.
"""
if lc == None:
self.lc = LightController()
else:
self.lc = lc
# wx.Dialog.__init__(self, parent, id=wx.ID_ANY, title=_("Optimus Shine"),
# pos=wx.DefaultPosition, size=(900, 900), style=wx.DEFAULT_DIALOG_STYLE)
wx.Frame.__init__(self, parent, -1, title, pos=wx.DefaultPosition, size=(900, 900))
if colourData:
self._colourData = colourData
else:
self._colourData = wx.ColourData()
self._colourData.SetColour(wx.Colour(128, 128, 128))
# self._oldColour = Colour(self._colourData.GetColour())
self._colour = NewColour(self._colourData.GetColour())
self._inMouse = False
self._initOver = False
self._inDrawAll = False
self._agwStyle = agwStyle
self.mainPanel = wx.Panel(self, -1)
self.xyzSizer_staticbox = wx.StaticBox(self.mainPanel, -1, "XYZ")
self.hsvSizer_staticbox = wx.StaticBox(self.mainPanel, -1, "HSB")
self.rgbValueSizer_staticbox = wx.StaticBox(self.mainPanel, -1, "RGB Values")
self.hsvValueSizer_staticbox = wx.StaticBox(self.mainPanel, -1, "HSB Values")
self.xyzValueSizer_staticbox = wx.StaticBox(self.mainPanel, -1, "XYZ Values")
self.rgbSizer_staticbox = wx.StaticBox(self.mainPanel, -1, "RGB")
self.curcolSizer_staticbox = wx.StaticBox(self.mainPanel, -1, "Current Color")
# self.alphaSizer_staticbox = wx.StaticBox(self.mainPanel, -1, "Alpha")
# self.alphaValueSizer_staticbox = wx.StaticBox(self.mainPanel, -1, "Alpha")
self.rgbBitmap = RGBCube(self.mainPanel)
self.hsvBitmap = HSVWheel(self.mainPanel)
self.brightCtrl = BrightCtrl(self.mainPanel)
# self.alphaCtrl = AlphaCtrl(self.mainPanel)
self.powerCtrl = PowerCtrl(self.mainPanel)
self.xyPanel = XYPanel(self.mainPanel)
# self.showAlpha = wx.CheckBox(self.mainPanel, -1, "Show Alpha Control")
self.autoSend = wx.CheckBox(self.mainPanel, -1, "AutoSend on\nColorChange")
self.customColours = NewCustomPanel(self.mainPanel, self._colourData)
self.addCustom = wx.Button(self.mainPanel, -1, "Add to custom colours")
# self.okButton = wx.Button(self.mainPanel, -1, "Ok")
self.cancelButton = wx.Button(self.mainPanel, -1, "Cancel")
self.sendButton = wx.Button(self.mainPanel, -1, "Send")
# self.oldColourPanel = ColourPanel(self.mainPanel, style=wx.SIMPLE_BORDER)
self.newColourPanel = ColourPanel(self.mainPanel, style=wx.SIMPLE_BORDER)
self.redSpin = wx.SpinCtrl(self.mainPanel, -1, "180", min=0, max=255,
style=wx.SP_ARROW_KEYS)
self.greenSpin = wx.SpinCtrl(self.mainPanel, -1, "180", min=0, max=255,
style=wx.SP_ARROW_KEYS)
self.blueSpin = wx.SpinCtrl(self.mainPanel, -1, "180", min=0, max=255,
style=wx.SP_ARROW_KEYS)
self.hueSpin = wx.SpinCtrl(self.mainPanel, -1, "0", min=-1, max=360,
style=wx.SP_ARROW_KEYS)
self.saturationSpin = wx.SpinCtrl(self.mainPanel, -1, "", min=0, max=255,
style=wx.SP_ARROW_KEYS)
self.brightnessSpin = wx.SpinCtrl(self.mainPanel, -1, "", min=0, max=255,
style=wx.SP_ARROW_KEYS)
self.tintSpin = wx.SpinCtrl(self.mainPanel, -1, "0", min=-1, max=360,
style=wx.SP_ARROW_KEYS)
self.chromaSpin = wx.SpinCtrl(self.mainPanel, -1, "", min=0, max=255,
style=wx.SP_ARROW_KEYS)
self.powerSpin = wx.SpinCtrl(self.mainPanel, -1, "", min=0, max=255,
style=wx.SP_ARROW_KEYS)
# self.alphaSpin = wx.SpinCtrl(self.mainPanel, -1, "", min=0, max=255,
# style=wx.SP_ARROW_KEYS)
# self.accessCode = wx.TextCtrl(self.mainPanel, -1, "", style=wx.TE_READONLY)
# self.htmlCode = wx.TextCtrl(self.mainPanel, -1, "", style=wx.TE_READONLY)
# self.webSafe = wx.TextCtrl(self.mainPanel, -1, "", style=wx.TE_READONLY)
# self.htmlName = wx.TextCtrl(self.mainPanel, -1, "", style=wx.TE_READONLY)
self.SetProperties()
self.DoLayout()
self.spinCtrls = [self.redSpin, self.greenSpin, self.blueSpin,
self.hueSpin, self.saturationSpin, self.brightnessSpin,
self.tintSpin, self.chromaSpin, self.powerSpin]
for spin in self.spinCtrls:
spin.Bind(wx.EVT_SPINCTRL, self.OnSpinCtrl)
# self.Bind(wx.EVT_SPINCTRL, self.OnAlphaSpin, self.alphaSpin)
# self.Bind(wx.EVT_BUTTON, self.OnOk, self.okButton)
# self.Bind(wx.EVT_BUTTON, self.OnCancel, self.cancelButton)
self.Bind(wx.EVT_BUTTON, self.OnSend, self.sendButton)
self.Bind(wx.EVT_BUTTON, self.OnAddCustom, self.addCustom)
self.Bind(wx.EVT_CHECKBOX, self.OnAutoSend)
# self.Bind(wx.EVT_CHECKBOX, self.OnShowAlpha)
# self.Bind(wx.EVT_CLOSE, self.OnCloseWindow)
self.Bind(wx.EVT_CHAR_HOOK, self.OnKeyUp)
self.Centre(wx.BOTH)
wx.CallAfter(self.InitDialog)
def SetProperties(self):
""" Sets some initial properties for L{CubeColourDialog} (sizes, values). """
# self.okButton.SetDefault()
# self.oldColourPanel.SetMinSize((-1, 50))
self.newColourPanel.SetMinSize((-1, 50))
self.redSpin.SetMinSize((60, -1))
self.greenSpin.SetMinSize((60, -1))
self.blueSpin.SetMinSize((60, -1))
self.hueSpin.SetMinSize((60, -1))
self.saturationSpin.SetMinSize((60, -1))
self.brightnessSpin.SetMinSize((60, -1))
self.tintSpin.SetMinSize((60, -1))
self.chromaSpin.SetMinSize((60, -1))
self.powerSpin.SetMinSize((60, -1))
# self.alphaSpin.SetMinSize((60, -1))
# self.showAlpha.SetValue(1)
self.autoSend.SetValue(1)
# self.accessCode.SetInitialSize((80, -1))
# self.webSafe.SetInitialSize((80, -1))
# self.htmlCode.SetInitialSize((80, -1))
def DoLayout(self):
""" Layouts all the controls in the L{CubeColourDialog}. """
windowSizer = wx.BoxSizer(wx.VERTICAL)
mainSizer = wx.GridBagSizer(10, 5)
hsvValueSizer = wx.StaticBoxSizer(self.hsvValueSizer_staticbox, wx.VERTICAL)
hsvGridSizer = wx.GridSizer(2, 3, 2, 10)
rgbValueSizer = wx.StaticBoxSizer(self.rgbValueSizer_staticbox, wx.HORIZONTAL)
rgbGridSizer = wx.GridSizer(2, 3, 2, 10)
xyzValueSizer = wx.StaticBoxSizer(self.xyzValueSizer_staticbox, wx.HORIZONTAL)
xyzGridSizer = wx.GridSizer(2, 3, 2, 10)
# alphaValueSizer = wx.StaticBoxSizer(self.alphaValueSizer_staticbox, wx.VERTICAL)
# alphaGridSizer = wx.BoxSizer(wx.VERTICAL)
customSizer = wx.BoxSizer(wx.VERTICAL)
buttonSizer = wx.BoxSizer(wx.VERTICAL)
sendbuttonSizer = wx.BoxSizer(wx.VERTICAL)
sendSizer = wx.BoxSizer(wx.HORIZONTAL)
curcolSizer = wx.StaticBoxSizer(self.curcolSizer_staticbox, wx.VERTICAL)
panelSizer = wx.BoxSizer(wx.VERTICAL)
# htmlSizer1 = wx.BoxSizer(wx.HORIZONTAL)
# htmlSizer2 = wx.BoxSizer(wx.VERTICAL)
# htmlSizer_a = wx.BoxSizer(wx.VERTICAL)
# htmlSizer_b = wx.BoxSizer(wx.VERTICAL)
xyzSizer = wx.StaticBoxSizer(self.xyzSizer_staticbox, wx.HORIZONTAL)
hsvSizer = wx.StaticBoxSizer(self.hsvSizer_staticbox, wx.HORIZONTAL)
rgbSizer = wx.StaticBoxSizer(self.rgbSizer_staticbox, wx.VERTICAL)
# autosendSizer = wx.StaticBoxSizer(self.autosendSizer_staticbox, wx.VERTICAL)
# mainSizer.Add(self.showAlpha, (0, 0), (1, 1), wx.LEFT|wx.TOP, 10)
# htmlLabel1 = wx.StaticText(self.mainPanel, -1, "HTML Code")
# htmlLabel2 = wx.StaticText(self.mainPanel, -1, "Web Safe")
# htmlSizer_a.Add(htmlLabel1, 0, wx.TOP, 3)
# htmlSizer_b.Add(htmlLabel2, 0, wx.TOP, 3)
# htmlSizer_a.Add(self.htmlCode, 0, wx.TOP, 3)
# htmlSizer_b.Add(self.webSafe, 0, wx.TOP, 3)
#
# htmlSizer1.Add(htmlSizer_a, 0)
# htmlSizer1.Add(htmlSizer_b, 0, wx.LEFT, 10)
# mainSizer.Add(htmlSizer1, (1, 0), (1, 1), wx.LEFT|wx.RIGHT, 10)
# htmlLabel3 = wx.StaticText(self.mainPanel, -1, "HTML Name")
# htmlSizer2.Add(htmlLabel3, 0, wx.TOP|wx.BOTTOM, 3)
# htmlSizer2.Add(self.htmlName, 0)
# mainSizer.Add(htmlSizer2, (1, 1), (1, 1), wx.LEFT|wx.RIGHT, 10)
customLabel = wx.StaticText(self.mainPanel, -1, "Custom Colours")
customSizer.Add(customLabel, 0, wx.BOTTOM, 3)
customSizer.Add(self.customColours, 0)
customSizer.Add(self.addCustom, 0, wx.TOP|wx.ALIGN_LEFT|wx.ALIGN_CENTER_VERTICAL, 5)
mainSizer.Add(customSizer, (1, 1), (1, 1),wx.LEFT|wx.RIGHT, 5)
# panelSizer.Add(accessSizer, 0, wx.TOP, 5)
xyzSizer.Add(self.xyPanel, 0, wx.ALL, 15)
xyzSizer.Add(self.powerCtrl, 0, wx.RIGHT|wx.TOP|wx.BOTTOM, 15)
mainSizer.Add(xyzSizer, (2, 2), (1, 1), wx.ALL|wx.EXPAND, 10)
rgbSizer.Add(self.rgbBitmap, 0, wx.ALL, 15)
mainSizer.Add(rgbSizer, (2, 0), (1, 1), wx.ALL|wx.EXPAND, 10)
hsvSizer.Add(self.hsvBitmap, 0, wx.ALL, 15)
hsvSizer.Add(self.brightCtrl, 0, wx.RIGHT|wx.TOP|wx.BOTTOM, 15)
mainSizer.Add(hsvSizer, (2, 1), (1, 1), wx.ALL|wx.EXPAND, 10)
# alphaSizer.Add(self.alphaCtrl, 0, wx.TOP|wx.ALIGN_CENTER, 15)
# mainSizer.Add(alphaSizer, (2, 2), (1, 1), wx.ALL|wx.EXPAND, 10)
# oldLabel = wx.StaticText(self.mainPanel, -1, "Old Colour")
# panelSizer.Add(oldLabel, 0, wx.BOTTOM, 3)
# panelSizer.Add(self.oldColourPanel, 0, wx.BOTTOM|wx.EXPAND, 20)
# newLabel = wx.StaticText(self.mainPanel, -1, "New Colour")
# accessLabel = wx.StaticText(self.mainPanel, -1, "MS Access Code")
# accessSizer.Add(accessLabel, 0, wx.BOTTOM, 3)
# accessSizer.Add(self.accessCode, 0)
sendbuttonSizer.Add(self.sendButton, 0,wx.TOP,10)
curcolSizer.Add(self.newColourPanel, 0, wx.EXPAND)
sendSizer.Add(self.autoSend)
sendSizer.Add(sendbuttonSizer,0,wx.LEFT,20)
curcolSizer.Add(sendSizer)
# panelSizer.Add(newLabel, 0, wx.TOP, 3)
# panelSizer.Add(autosendSizer, 0, wx.TOP)
# panelSizer.Add((0, 0), 1, wx.EXPAND)
# panelSizer.Add((1,0), 1, wx.BOTTOM)
# panelSizer.Add(sendbuttonSizer, 0, wx.TOP, 5)
# panelSizer.Add(autosendSizer, 0, wx.BOTTOM, 10)
mainSizer.Add(curcolSizer, (1, 0), (1, 1), wx.ALL|wx.EXPAND, 10)
redLabel = wx.StaticText(self.mainPanel, -1, "Red")
rgbGridSizer.Add(redLabel, 0)
greenLabel = wx.StaticText(self.mainPanel, -1, "Green")
rgbGridSizer.Add(greenLabel, 0)
blueLabel = wx.StaticText(self.mainPanel, -1, "Blue")
rgbGridSizer.Add(blueLabel, 0)
rgbGridSizer.Add(self.redSpin, 0, wx.EXPAND)
rgbGridSizer.Add(self.greenSpin, 0, wx.EXPAND)
rgbGridSizer.Add(self.blueSpin, 0, wx.EXPAND)
rgbValueSizer.Add(rgbGridSizer, 1, 0, 0)
mainSizer.Add(rgbValueSizer, (3, 0), (1, 1), wx.LEFT|wx.RIGHT|wx.BOTTOM|wx.EXPAND, 10)
hueLabel = wx.StaticText(self.mainPanel, -1, "Hue")
hsvGridSizer.Add(hueLabel, 0)
saturationLabel = wx.StaticText(self.mainPanel, -1, "Saturation")
hsvGridSizer.Add(saturationLabel, 0)
brightnessLabel = wx.StaticText(self.mainPanel, -1, "Brightness")
hsvGridSizer.Add(brightnessLabel, 0)
hsvGridSizer.Add(self.hueSpin, 0, wx.EXPAND)
hsvGridSizer.Add(self.saturationSpin, 0, wx.EXPAND)
hsvGridSizer.Add(self.brightnessSpin, 0, wx.EXPAND)
hsvValueSizer.Add(hsvGridSizer, 1, wx.EXPAND)
mainSizer.Add(hsvValueSizer, (3, 1), (1, 1), wx.LEFT|wx.RIGHT|wx.BOTTOM|wx.EXPAND, 10)
xyzValueSizer.Add(xyzGridSizer, 1, 0, 0)
mainSizer.Add(xyzValueSizer, (3, 2), (1, 1), wx.LEFT|wx.RIGHT|wx.BOTTOM|wx.EXPAND, 10)
tintLabel = wx.StaticText(self.mainPanel, -1, "Tint")
xyzGridSizer.Add(tintLabel, 0)
chromaLabel = wx.StaticText(self.mainPanel, -1, "Chroma")
xyzGridSizer.Add(chromaLabel, 0)
powerLabel = wx.StaticText(self.mainPanel, -1, "Power")
xyzGridSizer.Add(powerLabel, 0)
xyzGridSizer.Add(self.tintSpin, 0, wx.EXPAND)
xyzGridSizer.Add(self.chromaSpin, 0, wx.EXPAND)
xyzGridSizer.Add(self.powerSpin, 0, wx.EXPAND)
# alphaLabel = wx.StaticText(self.mainPanel, -1, "Alpha")
# alphaGridSizer.Add(alphaLabel, 0)
# alphaGridSizer.Add(self.alphaSpin, 0, wx.EXPAND|wx.TOP, 10)
# alphaValueSizer.Add(alphaGridSizer, 1, wx.EXPAND)
# mainSizer.Add(alphaValueSizer, (3, 2), (1, 1), wx.LEFT|wx.RIGHT|wx.BOTTOM|wx.EXPAND, 10)
# buttonSizer.Add(self.okButton, 0, wx.BOTTOM, 3)
buttonSizer.Add(self.cancelButton, 0)
mainSizer.Add(buttonSizer, (3, 3), (1, 1), wx.ALIGN_CENTER|wx.LEFT|wx.RIGHT, 5)
mainSizer.Hide(buttonSizer)
self.mainPanel.SetAutoLayout(True)
self.mainPanel.SetSizer(mainSizer)
mainSizer.Fit(self.mainPanel)
mainSizer.SetSizeHints(self.mainPanel)
# if self.GetAGWWindowStyleFlag() & ccdSource.CCD_SHOW_ALPHA == 0:
# mainSizer.Hide(self.showAlpha)
# mainSizer.Hide(alphaSizer)
# mainSizer.Hide(alphaValueSizer)
windowSizer.Add(self.mainPanel, 1, wx.EXPAND)
self.SetAutoLayout(True)
self.SetSizer(windowSizer)
windowSizer.Fit(self)
windowSizer.SetSizeHints(self)
self.Layout()
self.mainSizer = mainSizer
self.windowSizer = windowSizer
# self.alphaSizers = [alphaSizer, alphaValueSizer]
def InitDialog(self):
""" Initialize the L{CubeColourDialog}. """
hsvRect = self.hsvBitmap.GetClientRect()
self._centre = wx.Point(hsvRect.x + hsvRect.width/2, hsvRect.y + hsvRect.height/2)
xyRect = self.xyPanel.GetClientRect()
self._centre2 = wx.Point(xyRect.x + xyRect.width/2, xyRect.y + xyRect.height/2)
self._redLen = Distance(Vertex, Top)
self._greenLen = Distance(Vertex, Left)
self._blueLen = Distance(Vertex, Right)
self.CalcSlopes()
self.CalcCuboid()
self.CalcRects()
self.CalcRects2()
self.SetSpinVals()
self._initOver = True
wx.CallAfter(self.Refresh)
def CalcSlopes(self):
""" Calculates the line slopes in the RGB colour cube. """
self._lines = {RED: LineDescription(), GREEN: LineDescription(), BLUE: LineDescription}
self._lines[RED].slope = Slope(Top, Vertex)
self._lines[GREEN].slope = Slope(Left, Vertex)
self._lines[BLUE].slope = Slope(Right, Vertex)
for i in xrange(3):
self._lines[i].x = Vertex.x
self._lines[i].y = Vertex.y
self._lines[i].c = FindC(self._lines[i])
def CalcCuboid(self):
""" Calculates the RGB colour cube vertices. """
rLen = (self._colour.r*self._redLen)/255.0
gLen = (self._colour.g*self._greenLen)/255.0
bLen = (self._colour.b*self._blueLen)/255.0
lines = [LineDescription() for i in xrange(12)]
self._cuboid = [None]*8
self._cuboid[0] = Vertex
self._cuboid[1] = PointOnLine(Vertex, Top, int(rLen), self._redLen)
self._cuboid[3] = PointOnLine(Vertex, Left, int(gLen), self._greenLen)
self._cuboid[7] = PointOnLine(Vertex, Right, int(bLen), self._blueLen)
lines[0] = self._lines[RED]
lines[1] = self._lines[GREEN]
lines[2] = self._lines[BLUE]
lines[3].slope = self._lines[GREEN].slope
lines[3].x = self._cuboid[1].x
lines[3].y = self._cuboid[1].y
lines[3].c = FindC(lines[3])
lines[4].slope = self._lines[RED].slope
lines[4].x = self._cuboid[3].x
lines[4].y = self._cuboid[3].y
lines[4].c = FindC(lines[4])
lines[5].slope = self._lines[BLUE].slope
lines[5].x = self._cuboid[3].x
lines[5].y = self._cuboid[3].y
lines[5].c = FindC(lines[5])
lines[6].slope = self._lines[GREEN].slope
lines[6].x = self._cuboid[7].x
lines[6].y = self._cuboid[7].y
lines[6].c = FindC(lines[6])
lines[10].slope = self._lines[BLUE].slope
lines[10].x = self._cuboid[1].x
lines[10].y = self._cuboid[1].y
lines[10].c = FindC(lines[10])
lines[11].slope = self._lines[RED].slope
lines[11].x = self._cuboid[7].x
lines[11].y = self._cuboid[7].y
lines[11].c = FindC(lines[11])
self._cuboid[2] = Intersection(lines[3], lines[4])
self._cuboid[4] = Intersection(lines[5], lines[6])
self._cuboid[6] = Intersection(lines[10], lines[11])
lines[7].slope = self._lines[RED].slope
lines[7].x = self._cuboid[4].x
lines[7].y = self._cuboid[4].y
lines[7].c = FindC(lines[7])
lines[8].slope = self._lines[BLUE].slope
lines[8].x = self._cuboid[2].x
lines[8].y = self._cuboid[2].y
lines[8].c = FindC(lines[8])
self._cuboid[5] = Intersection(lines[7], lines[8])
def CalcRects(self):
""" Calculates the brightness control user-selected rect. """
pt = PtFromAngle(self._colour.h, self._colour.s, self._centre)
self._currentRect = wx.Rect(pt.x - RECT_WIDTH, pt.y - RECT_WIDTH,
2*RECT_WIDTH, 2*RECT_WIDTH)
def CalcRects2(self):
""" Calculates the brightness control user-selected rect. """
pt = PtFromAngle(self._colour.t, self._colour.c, self._centre2)
self._currentRect2 = wx.Rect(pt.x - RECT_WIDTH, pt.y - RECT_WIDTH,
2*RECT_WIDTH, 2*RECT_WIDTH)
def DrawMarkers(self, dc=None):
"""
Draws the markers for all the controls.
:param `dc`: an instance of `wx.DC`. If `dc` is ``None``, a `wx.ClientDC` is
created on the fly.
"""
if dc is None:
dc = wx.ClientDC(self)
self.hsvBitmap.DrawMarkers()
self.rgbBitmap.DrawMarkers()
self.xyPanel.DrawMarkers()
self.brightCtrl.DrawMarkers()
self.powerCtrl.DrawMarkers()
def DrawRGB(self):
""" Refreshes the RGB colour cube. """
self.rgbBitmap.Refresh()
def DrawHSB(self):
""" Refreshes the HSB colour wheel. """
self.hsvBitmap.Refresh()
def DrawXYZ(self):
""" Refreshes the XYZ colour wheel. """
self.xyPanel.Refresh()
def DrawBright(self):
""" Refreshes the brightness control. """
self.brightCtrl.Refresh()
def DrawPower(self):
""" Refreshes the powerness control. """
self.powerCtrl.Refresh()
def DrawAlpha(self):
""" Refreshes the alpha channel control. """
pass
# self.alphaCtrl.Refresh()
def SendLights(self):
self.lc.SendLights(*self.GetRGBAColour()[:-1])
def SendLightsIfManual(self):
if self.manualSend:
self.lc.SendLights(*self.GetRGBAColour()[:-1])
def SetSpinVals(self):
""" Sets the values for all the spin controls. """
self.redSpin.SetValue(self._colour.r)
self.greenSpin.SetValue(self._colour.g)
self.blueSpin.SetValue(self._colour.b)
self.hueSpin.SetValue(self._colour.h)
self.saturationSpin.SetValue(self._colour.s)
self.brightnessSpin.SetValue(self._colour.v)
self.powerSpin.SetValue(self._colour.p)
self.chromaSpin.SetValue(self._colour.c)
self.tintSpin.SetValue(self._colour.t)
# self.alphaSpin.SetValue(self._colour._alpha)
self.SetPanelColours()
if self.manualSend:
pass
else:
self.SendLights()
# self.SetCodes()
def SetPanelColours(self):
""" Assigns colours to the colour panels. """
# self.oldColourPanel.RefreshColour(self._oldColour)
self.newColourPanel.RefreshColour(self._colour)
# def SetCodes(self):
# """ Sets the HTML/MS Access codes (if any) in the text controls. """
# colour = rgb2html(self._colour)
# self.htmlCode.SetValue(colour)
# self.htmlCode.Refresh()
# if colour in HTMLCodes:
# colourName, access, webSafe = HTMLCodes[colour]
# self.webSafe.SetValue(webSafe)
# self.accessCode.SetValue(access)
# self.htmlName.SetValue(colourName)
# else:
# self.webSafe.SetValue("")
# self.accessCode.SetValue("")
# self.htmlName.SetValue("")
def OnCloseWindow(self, event):
"""
Handles the ``wx.EVT_CLOSE`` event for L{CubeColourDialog}.
:param `event`: a `wx.CloseEvent` event to be processed.
"""
# self.EndModal(wx.ID_CANCEL)
def OnKeyUp(self, event):
"""
Handles the ``wx.EVT_CHAR_HOOK`` event for L{CubeColourDialog}.
:param `event`: a `wx.KeyEvent` event to be processed.
"""
# if event.GetKeyCode() == wx.WXK_ESCAPE:
# self.EndModal(wx.ID_CANCEL)
#
event.Skip()
# def ShowModal(self):
# """
# Shows L{CubeColourDialog} as a modal dialog. Program flow does
# not return until the dialog has been dismissed with `EndModal`.
#
# :note: Overridden from `wx.Dialog`.
# """
#
# return wx.Dialog.ShowModal(self)
def SetAGWWindowStyleFlag(self, agwStyle):
"""
Sets the L{CubeColourDialog} window style flags.
:param `agwStyle`: can only be ``ccdSource.CCD_SHOW_ALPHA``.
"""
# show = self.GetAGWWindowStyleFlag() & ccdSource.CCD_SHOW_ALPHA
self._agwStyle = agwStyle
# self.mainSizer.Show(self.alphaSizers[0], show)
# self.mainSizer.Show(self.alphaSizers[1], show)
self.mainSizer.Fit(self.mainPanel)
self.mainSizer.SetSizeHints(self.mainPanel)
self.mainSizer.Layout()
self.windowSizer.Fit(self)
self.windowSizer.SetSizeHints(self)
self.Layout()
self.Refresh()
self.Update()
def GetAGWWindowStyleFlag(self):
"""
Returns the L{CubeColourDialog} window style flags.
"""
return self._agwStyle
def OnOk(self, event):
"""
Handles the Ok ``wx.EVT_BUTTON`` event for L{CubeColourDialog}.
:param `event`: a `wx.CommandEvent` event to be processed.
"""
# self.EndModal(wx.ID_OK)
def OnSend(self, event):
"""
Handles the Send ``wx.EVT_BUTTON`` event for L{CubeColourDialog}.
:param `event`: a `wx.CommandEvent` event to be processed.
"""
self.SendLights()
#elf.OnCloseWindow(event)
def OnCancel(self, event):
"""
Handles the Cancel ``wx.EVT_BUTTON`` event for L{CubeColourDialog}.
:param `event`: a `wx.CommandEvent` event to be processed.
"""
self.OnCloseWindow(event)
def OnAddCustom(self, event):
"""
Handles the Add Custom ``wx.EVT_BUTTON`` event for L{CubeColourDialog}.
:param `event`: a `wx.CommandEvent` event to be processed.
"""
self.customColours.AddCustom(self._colour)
def OnAutoSend(self, event):
"""
Enables/disables automatic sending from controls in L{CubeColourDialog}.
:param `event`: a `wx.CommandEvent` event to be processed.
"""
# agwStyle = self.GetAGWWindowStyleFlag()
automode = event.IsChecked()
if automode:
self.manualSend = False
# agwStyle |= ccdSource.CCD_SHOW_ALPHA
else:
self.manualSend = True
# agwStyle &= ~ccdSource.CCD_SHOW_ALPHA
# self.SetAGWWindowStyleFlag(agwStyle)
# def OnShowAlpha(self, event):
# """
# Shows/hides the alpha channel control in L{CubeColourDialog}.
#
# :param `event`: a `wx.CommandEvent` event to be processed.
# """
# agwStyle = self.GetAGWWindowStyleFlag()
# show = event.IsChecked()
# if show:
# agwStyle |= ccdSource.CCD_SHOW_ALPHA
# else:
# agwStyle &= ~ccdSource.CCD_SHOW_ALPHA
# self.SetAGWWindowStyleFlag(agwStyle)
def OnSpinCtrl(self, event):
"""
Handles the ``wx.EVT_SPINCTRL`` event for RGB and HSB colours.
# def OnShowAlpha(self, event):
# """
# Shows/hides the alpha channel control in L{CubeColourDialog}.
#
# :param `event`: a `wx.CommandEvent` event to be processed.
# """
# agwStyle = self.GetAGWWindowStyleFlag()
# show = event.IsChecked()
# if show:
# agwStyle |= ccdSource.CCD_SHOW_ALPHA
# else:
# agwStyle &= ~ccdSource.CCD_SHOW_ALPHA
# self.SetAGWWindowStyleFlag(agwStyle)
def MyAssignColourValue(self, attribute, colourVal, maxVal, position):
if attribute == 'h' or attribute == 't':
if colourVal>maxVal:
colourVal = colourVal-maxVal-1
elif colourVal < 0:
colourVal = maxVal+1-colourVal
self.AssignColourValue(attribute, colourVal, maxVal, position)
def OnSpinCtrl(self, event):
"""
Handles the ``wx.EVT_SPINCTRL`` event for RGB and HSB colours.
:param `event`: a `wx.SpinEvent` event to be processed.
"""
obj = event.GetEventObject()
position = self.spinCtrls.index(obj)
colourVal = event.GetInt()
attribute, maxVal = colourAttributes[position], colourMaxValues[position]
self.MyAssignColourValue(attribute, colourVal, maxVal, position)
# if self.manualSend:
# pass
# else:
# self.SendLights()
# def OnAlphaSpin(self, event):
# """
# Handles the ``wx.EVT_SPINCTRL`` event for the alpha channel.
#
# :param `event`: a `wx.SpinEvent` event to be processed.
# """
#
# colourVal = event.GetInt()
# originalVal = self._colour._alpha
# if colourVal != originalVal and self._initOver:
# if colourVal < 0:
# colourVal = 0
# if colourVal > 255:
# colourVal = 255
#
# self._colour._alpha = colourVal
# self.DrawAlpha()
def AssignColourValue(self, attribute, colourVal, maxVal, position):
""" Common code to handle spin control changes. """
originalVal = getattr(self._colour, attribute)
if colourVal != originalVal and self._initOver:
if colourVal < 0:
colourVal = 0
if colourVal > maxVal:
colourVal = maxVal
setattr(self._colour, attribute, colourVal)
if position < 3:
self._colour.ToHSV()
# self._colour.ToXYZ()
elif position < 6:
self._colour.ToRGB()
# self._colour.ToXYZ()
else:
self._colour.XYZ_ToRGB_HSV()
self.DrawAll()
def DrawAll(self):
""" Draws all the custom controls after a colour change. """
if self._initOver and not self._inDrawAll:
self._inDrawAll = True
dc1=dc2=dc3=None
#dc1 = wx.ClientDC(self.hsvBitmap)
self.hsvBitmap.DrawMarkers(dc1)
#dc2 = wx.ClientDC(self.rgbBitmap)
self.rgbBitmap.DrawMarkers(dc2)
#self.rgbBitmap.DrawLines(dc2)
#dc3 = wx.ClientDC(self.brightCtrl)
self.brightCtrl.DrawMarkers(dc3)
# dc4 = wx.ClientDC(self.alphaCtrl)
self.xyPanel.DrawMarkers(dc1)
# self.alphaCtrl.DrawMarkers(dc4)
self.CalcCuboid()
self.CalcRects()
self.CalcRects2()
self.DrawRGB()
self.DrawHSB()
self.DrawXYZ()
self.DrawBright()
self.DrawPower()
# self.DrawAlpha()
self.SetSpinVals()
self._inDrawAll = False
def GetColourData(self):
""" Returns a wxPython compatible `wx.ColourData`. """
self._colourData.SetColour(self._colour.GetPyColour())
return self._colourData
def GetRGBAColour(self):
""" Returns a 4-elements tuple of red, green, blue, alpha components. """
return (self._colour.r, self._colour.g, self._colour.b, self._colour._alpha)
def GetHSVAColour(self):
""" Returns a 4-elements tuple of hue, saturation, brightness, alpha components. """
return (self._colour.h, self._colour.s, self._colour.v, self._colour._alpha)
class MyApp(wx.App):
def OnInit(self):
frame = CubeColourFrame(None,title="Optimus Shine")#None, "Simple wxPython App")
self.SetTopWindow(frame)
import time
print "Print statements go to this stdout window by default."
frame.Show(True)
return True
app = MyApp(redirect=True)
app.MainLoop()
```
|
{
"source": "jeremy-w/VersionOne.SDK.Python",
"score": 3
}
|
#### File: VersionOne.SDK.Python/v1pysdk/base_asset.py
```python
from pprint import pformat as pf
from query import V1Query
class BaseAsset(object):
"""Provides common methods for the dynamically derived asset type classes
built by V1Meta.asset_class"""
@classmethod
def query(Class, where=None, sel=None):
'Takes a V1 Data query string and returns an iterable of all matching items'
return V1Query(Class, sel, where)
@classmethod
def select(Class, *selectlist):
return V1Query(Class).select(*selectlist)
@classmethod
def where(Class, **wherekw):
return V1Query(Class).where(**wherekw)
@classmethod
def filter(Class, filterexpr):
return V1Query(Class).filter(filterexpr)
@classmethod
def asof(Class, *asofs):
return V1Query(Class).asof(*asofs)
@classmethod
def from_query_select(Class, xml, asof=None):
"Find or instantiate an object and fill it with data that just came back from query"
idref = xml.get('id')
data = Class._v1_v1meta.unpack_asset(xml)
data['AsOf'] = asof
instance = Class._v1_v1meta.asset_from_oid(idref)
return instance.with_data(data)
@classmethod
def create(Class, **newdata):
"create new asset on server and return created asset proxy instance"
return Class._v1_v1meta.create_asset(Class._v1_asset_type_name, newdata)
class IterableType(type):
def __iter__(Class):
for instance in Class.query():
instance.needs_refresh = True
yield instance
"The type that's instantiated to make THIS class must have an __iter__, "
"so we provide a metaclass (a thing that provides a class when instantiated) "
"that knows how to be iterated over, so we can say list(v1.Story)"
__metaclass__ = IterableType
def __new__(Class, oid):
"Tries to get an instance out of the cache first, otherwise creates one"
cache_key = (Class._v1_asset_type_name, int(oid))
cache = Class._v1_v1meta.global_cache
if cache.has_key(cache_key):
self = cache[cache_key]
else:
self = object.__new__(Class)
self._v1_oid = oid
self._v1_new_data = {}
self._v1_current_data = {}
self._v1_needs_refresh = True
cache[cache_key] = self
return self
@property
def intid(self):
return self._v1_oid
@property
def data(self):
return self._v1_current_data
def __getitem__(self, key):
return self._v1_current_data[key]
@property
def idref(self):
return self._v1_asset_type_name + ':' + str(self._v1_oid)
@property
def reprref(self):
return "{0}({1})".format(self._v1_asset_type_name, self._v1_oid)
@property
def url(self):
return self._v1_v1meta.server.build_url('/assetdetail.v1', query={'oid':self.idref})
class ReprDummy:
def __init__(self, value):
self.value = value
def __repr__(self):
return self.value.reprref
def repr_dummy(self, v):
if isinstance(v, list):
return [self.ReprDummy(item) if isinstance(item, BaseAsset) else item
for item in v]
elif isinstance(v, BaseAsset):
return self.ReprDummy(v)
else:
return v
def repr_shallow(self, d):
# patch up the dict that pformat sees to avoid repr loops
return pf( dict(
(k, self.repr_dummy(v))
for (k,v)
in d.items()
if v
)
)
def __repr__(self):
out = self.reprref
if self._v1_current_data:
out += '.with_data({0})'.format(self.repr_shallow(self._v1_current_data))
if self._v1_new_data:
out += '.pending({0})'.format(self.repr_shallow(self._v1_new_data))
return out
def _v1_getattr(self, attr):
"Intercept access to missing attribute names. "
"first return uncommitted data, then refresh if needed, then get single attr, else fail"
if self._v1_new_data.has_key(attr):
value = self._v1_new_data[attr]
else:
if self._v1_needs_refresh:
self._v1_refresh()
if attr not in self._v1_current_data.keys():
self._v1_current_data[attr] = self._v1_get_single_attr(attr)
value = self._v1_current_data[attr]
return value
def _v1_setattr(self, attr, value):
'Stores a new value for later commit'
if attr.startswith('_v1_'):
object.__setattr__(self, attr, value)
else:
self._v1_new_data[attr] = value
self._v1_v1meta.add_to_dirty_list(self)
self._v1_needs_commit = True
def set(self, **kw):
self.pending(kw)
return self
def with_data(self, newdata):
"bulk-set instance data"
self._v1_current_data.update(dict(newdata))
self._v1_needs_refresh = False
return self
def pending(self, newdata):
"bulk-set data to commit"
self._v1_new_data.update(dict(newdata))
self._v1_v1meta.add_to_dirty_list(self)
self._v1_needs_commit = True
def _v1_commit(self):
'Commits the object to the server and invalidates its sync state'
if self._v1_needs_commit:
self._v1_v1meta.update_asset(self._v1_asset_type_name, self._v1_oid, self._v1_new_data)
self._v1_needs_commit = False
self._v1_new_data = {}
self._v1_current_data = {}
self._v1_needs_refresh = True
def _v1_refresh(self):
'Syncs the objects from current server data'
self._v1_current_data = self._v1_v1meta.read_asset(self._v1_asset_type_name, self._v1_oid)
self._v1_needs_refresh = False
def _v1_get_single_attr(self, attr):
return self._v1_v1meta.get_attr(self._v1_asset_type_name, self._v1_oid, attr)
def _v1_execute_operation(self, opname):
result = self._v1_v1meta.execute_operation(self._v1_asset_type_name, self._v1_oid, opname)
self._v1_needs_refresh = True
return result
```
|
{
"source": "Jeremy-Xin/Crawl_It",
"score": 2
}
|
#### File: spider/core/engine.py
```python
import asyncio
from .scheduler import Scheduler
from .downloader import Downloader
from ..base.item import Item
from .event_manager import event_manager, Events
from ..http.request import Request
from ..http.response import Response
from ..utils.log_decorator import log
import traceback
from ..pipeline.singlefile_pipeline import SingleFilePipeline
from ..utils import config_loader
class Engine(object):
'''
this is the engine of all components
'''
def __init__(self, crawler, loop=None):
self.crawler = crawler
self.scheduler = Scheduler(crawler)
self.downloader = Downloader(crawler, loop)
self.running_tasks = []
self.parallel_num = 16
self._loop = loop or asyncio.get_event_loop()
self.started = False
self.pipeline = SingleFilePipeline()
self.max_depth = 5
config_loader.from_object({'log_level':'DEBUG'})
def start_engine(self):
'''
starting point of whole system
'''
try:
# asyncio.Task(self.start())
# self._loop.run_forever()
self._loop.run_until_complete(self.start())
except Exception as ex:
print(ex)
traceback.print_tb(ex.__traceback__)
finally:
self._loop.stop()
self._loop.close()
event_manager.SendEvent(Events.ENGINE_CLOSE)
print(len(self.crawler.crawled))
return
async def start(self):
'''
start several tasks to crawl
'''
event_manager.SendEvent(Events.ENGINE_START)
self.init_crawler_request()
self.running_tasks = [asyncio.Task(self.do_works(i + 1)) for i in range(self.parallel_num)]
await asyncio.wait(self.running_tasks)
def init_crawler_request(self):
'''
initialize the requests list from crawler
'''
for req in self.crawler.start_requests():
self.scheduler.schedule_nowait(req)
async def do_works(self, idx):
'''
deal with requests one by one
'''
idle_round = 0
while True:
if self.scheduler.have_next():
idle_round = 0
await self.do_one_work(idx)
else:
await asyncio.sleep(1)
# print('sleep')
idle_round += 1
if idle_round > 10:
break
print('Worker{} Done!'.format(idx))
# async def do_works(self, idx):
# while True:
# await self.do_one_work(idx)
async def do_one_work(self, idx):
'''
process one request in a coroutine
'''
request = await self._next_request_from_scheduler()
if not request:
return
content = await self._download(request)
if not content:
return
response = Response(content, request)
for item in request.callback(response):
assert isinstance(item, (Request, Item)), "unexpected type"
if isinstance(item, Item):
await self.pipeline.process_item(item)
else:
item.depth = request.depth + 1
if item.depth > self.max_depth:
continue
await self.scheduler.schedule(item)
async def _next_request_from_scheduler(self):
return await self.scheduler.next_request()
async def _download(self, request):
return await self.downloader.do_download(request)
```
#### File: spider/http/response.py
```python
from .request import Request
class Response(object):
def __init__(self, content, request):
self.content = content
self.request = request
```
#### File: spider/parser/html_parser.py
```python
import re
import bs4
from bs4 import BeautifulSoup
from urllib.parse import urljoin, urlparse
class HtmlParser(object):
'''
helper class to parse HTML
'''
def extract_link(self, text, base_url):
'''
extract links from a HTML
'''
b_url = urlparse(base_url)
host = b_url.scheme + '://' + b_url.hostname
soup = BeautifulSoup(text, 'lxml')
links = []
for link in soup.find_all('a'):
url = str(link.get('href'))
if url.startswith('/'):
links.append(urljoin(host, url))
elif url.startswith('http://') or url.startswith('https://'):
links.append(url)
return links
```
|
{
"source": "JeremyXSC/MCN-MT",
"score": 2
}
|
#### File: reid/loss/var_loss.py
```python
import torch
import torch.nn as nn
from collections import defaultdict
class VarLoss(nn.Module):
def __init__(self, feat_dim=768):
super(VarLoss, self).__init__()
self.feat_dim = feat_dim
self.simiFunc = nn.Softmax(dim=0)
def __calDis(self, x, y): # 246s
# x, y = F.normalize(qFeature), F.normalize(gFeature)
# x, y = qFeature, gFeature
m, n = x.shape[0], y.shape[0]
disMat = torch.pow(x, 2).sum(dim=1, keepdim=True).expand(m, n) + \
torch.pow(y, 2).sum(dim=1, keepdim=True).expand(n, m).t()
disMat.addmm_(1, -2, x, y.t())
return disMat
def forward(self, x, labels):
labelMap = defaultdict(list)
# per-ID features
labVal = [int(val) for val in labels.cpu()]
for pid in set(labVal):
labelMap[pid].append(x[labels == pid, :])
# cal loss
loss = 0
for keyNum in labelMap.keys():
meanVec = labelMap[keyNum][0].mean(dim=0, keepdim=True)
dist = self.__calDis(meanVec, labelMap[keyNum][0])
import ipdb;
ipdb.set_trace()
loss += dist.mean()
return loss
```
#### File: reid/loss/virtual_ce.py
```python
from __future__ import absolute_import
import torch
from torch import nn
from torch.autograd import Variable
from torch.nn import functional as F
from scipy.stats import norm
import numpy as np
class VirtualCE(nn.Module):
def __init__(self, beta=0.1):
super(VirtualCE, self).__init__()
self.beta = beta
def forward(self, inputs, targets):
# norm first
n = inputs.shape[0]
inputs = F.normalize(inputs, p=2)
allPids = targets.cpu().numpy().tolist()
# All Centers
centerHash = {
pid: F.normalize(inputs[targets == pid, :].mean(dim=0, keepdim=True), p=2).detach() for pid in set(allPids)
}
allCenters = torch.autograd.Variable(torch.cat(list(centerHash.values()))).cuda()
centerPID = torch.from_numpy(np.asarray(list(centerHash.keys())))
# sampler vs center
samplerCenter = torch.autograd.Variable(torch.cat([allCenters[centerPID == pid, :] for pid in allPids])).cuda()
# inputs--(128*1024), allCenters--(32*1024)
vce = torch.diag(torch.exp(samplerCenter.mm(inputs.t()) / self.beta)) # 1*128
centerScore = torch.exp(allCenters.mm(inputs.t()) / self.beta).sum(dim=0) # 32(center number)*128->1*128
return -torch.log(vce.div(centerScore)).mean()
class VirtualKCE(nn.Module):
def __init__(self, beta=0.1):
super(VirtualKCE, self).__init__()
self.beta = beta
def forward(self, inputs, targets):
# norm first
n = inputs.shape[0]
inputs = F.normalize(inputs, p=2)
allPids = targets.cpu().numpy().tolist()
# All Centers
centerHash = {
pid: F.normalize(inputs[targets == pid, :].mean(dim=0, keepdim=True), p=2).detach() for pid in set(allPids)
}
allCenters = torch.autograd.Variable(torch.cat(list(centerHash.values()))).cuda()
centerPID = torch.from_numpy(np.asarray(list(centerHash.keys())))
samplerCenter = torch.autograd.Variable(torch.cat([allCenters[centerPID == pid, :] for pid in allPids])).cuda()
# inputs--(128*1024), allCenters--(32*1024)
vce = torch.diag(torch.exp(samplerCenter.mm(inputs.t()) / self.beta)) # 1*128
centerScore = torch.exp(allCenters.mm(inputs.t()) / self.beta).sum(dim=0) # 32*128->1*128
kNegScore = torch.diag(inputs.mm(inputs.t()))
return -torch.log(vce.div(kNegScore + centerScore)).mean()
```
#### File: MCN-MT/reid/trainers_meannet.py
```python
from __future__ import print_function, absolute_import
import time
import torch
from torch.autograd import Variable
from .utils.meters import AverageMeter
import numpy as np
class BaseTrainer(object):
def __init__(self, model, criterions, print_freq=1):
super(BaseTrainer, self).__init__()
self.model = model
self.criterions = criterions
self.print_freq = print_freq
def train(self, epoch, data_loader, optimizer):
self.model.train()
batch_time = AverageMeter()
data_time = AverageMeter()
losses = AverageMeter()
precisions = AverageMeter()
end = time.time()
for i, inputs in enumerate(data_loader):
data_time.update(time.time() - end)
inputs, targets = self._parse_data(inputs) # image and pid
loss, prec1 = self._forward(inputs, targets, epoch)
losses.update(loss.item(), targets.size(0))
precisions.update(prec1, targets.size(0))
optimizer.zero_grad()
loss.backward()
# add gradient clip for lstm
for param in self.model.parameters():
try:
param.grad.data.clamp(-1., 1.)
except:
continue
optimizer.step()
batch_time.update(time.time() - end)
end = time.time()
if (i + 1) % self.print_freq == 0:
print('Epoch: [{}][{}/{}]\t'
'Time {:.3f} ({:.3f})\t'
'Data {:.3f} ({:.3f})\t'
'Loss {:.3f} ({:.3f})\t'
'Prec {:.2%} ({:.2%})\t'
.format(epoch, i + 1, len(data_loader),
batch_time.val, batch_time.avg,
data_time.val, data_time.avg,
losses.val, losses.avg,
precisions.val, precisions.avg))
def _parse_data(self, inputs):
raise NotImplementedError
def _forward(self, inputs, targets):
raise NotImplementedError
class CoTeaching(object):
def __init__(self, model, coModel, newDataSet, criterions, optimizers, print_freq=1):
self.modelA = model
self.modelB = coModel # the co-teacher
self.newDataSet = newDataSet
self.criterions = criterions
self.optimizers = optimizers
self.print_freq = print_freq
def train(self, epoch, remRate=0.2):
self.modelA.train()
self.modelB.train()
batch_time = AverageMeter()
data_time = AverageMeter()
losses = AverageMeter()
precisions = AverageMeter()
end = time.time()
for i, inputs in enumerate(self.newDataSet):
data_time.update(time.time() - end)
inputs, targets = self._parse_data(inputs) # image and pid
if i % 2 == 0:
# update CNNB
lossPure, prec1 = self._forward(inputs, targets, epoch, self.modelA) # assigned samples
# noise sample mining
lossIdx = np.argsort(lossPure.data.cpu()).cuda()
inputsCNNB, tarCNNB = inputs[0][lossIdx], targets[lossIdx]
inputsCNNB, tarCNNB = [inputsCNNB[:int(remRate * lossIdx.shape[0]), ...]], tarCNNB[:int(
remRate * lossIdx.shape[0])]
# loss for cnn B
lossCNNB, precCNNB = self._forward(inputsCNNB, tarCNNB, epoch, self.modelB)
lossCNNB = lossCNNB.mean()
losses.update(lossCNNB.item(), tarCNNB.size(0))
precisions.update(precCNNB, tarCNNB.size(0))
self.optimizers[1].zero_grad()
lossCNNB.backward()
for param in self.modelB.parameters():
try:
param.grad.data.clamp(-1., 1.)
except:
continue
self.optimizers[1].step()
else:
# update CNNA
lossPure, prec1 = self._forward(inputs, targets, epoch, self.modelB)
# sample mining
lossIdx = np.argsort(lossPure.data.cpu()).cuda()
inputsCNNA, tarCNNA = inputs[0][lossIdx], targets[lossIdx]
inputsCNNA, tarCNNA = [inputsCNNA[:int(remRate * lossIdx.shape[0]), ...]], tarCNNA[:int(
remRate * lossIdx.shape[0])]
# pure noise loss
lossCNNA, precCNNA = self._forward(inputsCNNA, tarCNNA, epoch, self.modelA)
lossCNNA = lossCNNA.mean()
# update
losses.update(lossCNNA.item(), tarCNNA.size(0))
precisions.update(precCNNA, tarCNNA.size(0))
# update CNNA
self.optimizers[0].zero_grad()
lossCNNA.backward()
for param in self.modelA.parameters():
try:
param.grad.data.clamp(-1., 1.)
except:
continue
# update modelA
self.optimizers[0].step()
batch_time.update(time.time() - end)
end = time.time()
if (i + 1) % self.print_freq == 0:
print('Epoch: [{}][{}/{}]\t'
'Time {:.3f} ({:.3f})\t'
'Data {:.3f} ({:.3f})\t'
'Loss {:.3f} ({:.3f})\t'
'Prec {:.2%} ({:.2%})\t'
.format(epoch, i + 1, len(self.newDataSet),
batch_time.val, batch_time.avg,
data_time.val, data_time.avg,
losses.val, losses.avg,
precisions.val, precisions.avg))
def _parse_data(self, inputs):
imgs, fname, pids, _ = inputs
inputs = [Variable(imgs).cuda()]
targets = Variable(pids.cuda())
return inputs, targets # image and pid
def _forward(self, inputs, targets, epoch, model):
outputs = model(*inputs) # outputs=[x1,x2,x3]
# x1 triplet loss
loss_tri, _ = self.criterions[0](outputs[0], targets, epoch) # feature
# x2 triplet loss
loss_global, prec_global = self.criterions[1](outputs[1], targets, epoch)
return loss_tri + loss_global, prec_global
class Trainer(BaseTrainer):
def _parse_data(self, inputs):
imgs, _, pids, _ = inputs
inputs = [Variable(imgs)]
targets = Variable(pids.cuda())
return inputs, targets # image and pid
def _forward(self, inputs, targets, epoch):
outputs = self.model(*inputs) # outputs=[x1,x2,x3]
# new added by wc
# x1 triplet loss
loss_tri, prec_tri = self.criterions[0](outputs[0], targets, epoch) # feature
# x2 triplet loss
loss_global, prec_global = self.criterions[1](outputs[1], targets, epoch) # fc
# loss_center = self.criterions[2](outputs[0], targets)
return loss_tri + loss_global, prec_global
class RCoTeaching(object):
"""
RCT implemention
"""
def __init__(self, model, coModel, newDataSet, noiseDataSet, criterions, optimizers, print_freq=1):
self.modelA = model
self.modelB = coModel # the co-teacher
self.noiseData = noiseDataSet
self.newDataSet = newDataSet
self.criterions = criterions
self.optimizers = optimizers
self.print_freq = print_freq
def train(self, epoch, remRate=0.2):
self.modelA.train()
self.modelB.train()
batch_time = AverageMeter()
data_time = AverageMeter()
losses = AverageMeter()
precisions = AverageMeter()
end = time.time()
for i, inputs in enumerate(self.newDataSet):
data_time.update(time.time() - end)
inputs, targets = self._parse_data(inputs) # image and pid
# noise data
try:
noiseInput = next(self.noiseData)
except:
noiseLoader = iter(self.noiseData)
noiseInput = next(noiseLoader)
noiseInput, noiseLab = self._parse_data(noiseInput)
if i % 2 != 0:
# update CNNA
lossNoise, _ = self._forward(noiseInput, noiseLab, epoch, self.modelB) # assigned samples
lossPure, _ = self._forward(inputs, targets, epoch, self.modelB)
# # assigned's easy samples
lossIdx, lossPureIdx = np.argsort(lossNoise.data.cpu()).cuda(), np.argsort(lossPure.data).cuda()
smallNoise = noiseInput[0][lossIdx[:int(remRate * lossNoise.shape[0])], ...]
smallPure = inputs[0][lossPureIdx[:int(remRate * lossPure.shape[0])], ...]
smallNoiseLab = noiseLab[lossIdx[:int(remRate * lossNoise.shape[0])]]
smallPureLab = targets[lossPureIdx[:int(remRate * lossPure.shape[0])]]
newLab = torch.cat([smallNoiseLab, smallPureLab])
lossCNNA, precCNNA = self._forward([torch.cat([smallNoise, smallPure])], newLab, epoch, self.modelA)
lossCNNA = lossCNNA.mean()
losses.update(lossCNNA.item(), newLab.size(0))
precisions.update(precCNNA, newLab.size(0))
self.optimizers[0].zero_grad()
lossCNNA.backward()
for param in self.modelA.parameters():
try:
param.grad.data.clamp(-1., 1.)
except:
continue
self.optimizers[0].step()
else:
# update CNNB
lossNoise, _ = self._forward(noiseInput, noiseLab, epoch, self.modelA) # assigned samples
lossPure, _ = self._forward(inputs, targets, epoch, self.modelA)
# # assigned's easy samples
lossIdx, lossPureIdx = np.argsort(lossNoise.data.cpu()).cuda(), np.argsort(lossPure.data.cpu()).cuda()
smallNoise = noiseInput[0][lossIdx[:int(remRate * lossNoise.shape[0])], ...]
smallPure = inputs[0][lossPureIdx[:int(remRate * lossPure.shape[0])], ...]
smallNoiseLab = noiseLab[lossIdx[:int(remRate * lossNoise.shape[0])]]
smallPureLab = targets[lossPureIdx[:int(remRate * lossPure.shape[0])]]
newLab = torch.cat([smallNoiseLab, smallPureLab])
lossCNNB, precCNNB = self._forward([torch.cat([smallNoise, smallPure])], newLab, epoch, self.modelB)
lossCNNB = lossCNNB.mean()
losses.update(lossCNNB.item(), newLab.size(0))
precisions.update(precCNNB, newLab.size(0))
self.optimizers[1].zero_grad()
lossCNNB.backward()
for param in self.modelB.parameters():
try:
param.grad.data.clamp(-1., 1.)
except:
continue
self.optimizers[1].step()
batch_time.update(time.time() - end)
end = time.time()
if (i + 1) % self.print_freq == 0:
print('Epoch: [{}][{}/{}]\t'
'Time {:.3f} ({:.3f})\t'
'Data {:.3f} ({:.3f})\t'
'Loss {:.3f} ({:.3f})\t'
'Prec {:.2%} ({:.2%})\t'
.format(epoch, i + 1, len(self.newDataSet),
batch_time.val, batch_time.avg,
data_time.val, data_time.avg,
losses.val, losses.avg,
precisions.val, precisions.avg))
def _parse_data(self, inputs):
imgs, fname, pids, _ = inputs
inputs = [Variable(imgs).cuda()]
targets = Variable(pids.cuda())
return inputs, targets # image and pid
def _forward(self, inputs, targets, epoch, model):
outputs = model(*inputs) # outputs=[x1,x2,x3]
# x1 triplet loss
loss_tri, _ = self.criterions[0](outputs[0], targets, epoch) # feature
# x2 triplet loss
loss_global, prec_global = self.criterions[1](outputs[1], targets, epoch)
return loss_tri + loss_global, prec_global
class CoTrainerAsy(object):
def saveImg(self, imgList, gtList):
import shutil
import os
rootDir = self.noiseData.dataset.root
if os.path.exists('smallLoss'):
shutil.rmtree('smallLoss')
os.makedirs('smallLoss')
for name, pid in zip(imgList, gtList):
curPath = os.path.join(rootDir, name)
nameList = name.split('_')
nameList[0] = str(pid)
tarPath = os.path.join('smallLoss', '_'.join(nameList))
shutil.copyfile(curPath, tarPath)
def __init__(self, model, coModel, newDataSet, noiseDataSet, criterions, optimizers, print_freq=1):
self.modelA = model
self.modelB = coModel # the co-teacher
self.noiseData = noiseDataSet
self.newDataSet = newDataSet
self.criterions = criterions
self.optimizers = optimizers
self.print_freq = print_freq
def train(self, epoch, remRate=0.2):
self.modelA.train()
self.modelB.train()
batch_time = AverageMeter()
data_time = AverageMeter()
losses = AverageMeter()
precisions = AverageMeter()
end = time.time()
for i, inputs in enumerate(self.newDataSet):
data_time.update(time.time() - end)
inputs, targets, names = self._parse_data(inputs) # image and pid
if i % 2 == 0:
# update CNNB
lossPure, prec1 = self._forward(inputs, targets, epoch, self.modelA) # assigned samples
# # assigned's easy samples
lossIdx = np.argsort(lossPure.data.cpu()).cuda()
pureInput = [inputs[0][lossIdx[:int(remRate * lossPure.shape[0])], ...]]
pureLab = targets[lossIdx[:int(remRate * lossPure.shape[0])]].long()
# loss for cnn B
lossCNNB, precCNNB = self._forward(pureInput, pureLab, epoch, self.modelB)
lossCNNB = lossCNNB.mean()
losses.update(lossCNNB.item(), pureLab.size(0))
precisions.update(precCNNB, pureLab.size(0))
self.optimizers[1].zero_grad()
lossCNNB.backward()
for param in self.modelB.parameters():
try:
param.grad.data.clamp(-1., 1.)
except:
continue
self.optimizers[1].step()
else:
# update CNNA
try:
noiseInput = next(self.noiseData)
except:
noiseLoader = iter(self.noiseData)
noiseInput = next(noiseLoader)
noiseInput, noiseLab, noiseNames = self._parse_data(noiseInput)
lossNoise, prec1 = self._forward(noiseInput, noiseLab, epoch, self.modelB)
# sample mining
lossIdx = np.argsort(lossNoise.data.cpu()).cuda()
noiseInput, noiseLab = noiseInput[0][lossIdx], noiseLab[lossIdx]
noiseInput, noiseLab = noiseInput[:int(remRate * lossNoise.shape[0]), ...], noiseLab[:int(
remRate * lossNoise.shape[0])]
# save small samples
# noiseImg = np.asarray(noiseNames)[lossIdx][:int(remRate*lossNoise.shape[0])]
# self.saveImg(noiseImg, noiseLab) # save image
# mix update, part assigned and part unassigned
mixInput, mixLab = [torch.cat([inputs[0], noiseInput])], torch.cat([targets.long(), noiseLab])
lossMix, precCNNA = self._forward(mixInput, mixLab, epoch, self.modelA)
lossCNNA = lossMix.mean()
# update
losses.update(lossCNNA.item(), mixLab.size(0))
precisions.update(precCNNA, mixLab.size(0))
# update CNNA
self.optimizers[0].zero_grad()
lossCNNA.backward()
for param in self.modelA.parameters():
try:
param.grad.data.clamp(-1., 1.)
except:
continue
# update modelA
self.optimizers[0].step()
batch_time.update(time.time() - end)
end = time.time()
if (i + 1) % self.print_freq == 0:
print('Epoch: [{}][{}/{}]\t'
'Time {:.3f} ({:.3f})\t'
'Data {:.3f} ({:.3f})\t'
'Loss {:.3f} ({:.3f})\t'
'Prec {:.2%} ({:.2%})\t'
.format(epoch, i + 1, len(self.newDataSet),
batch_time.val, batch_time.avg,
data_time.val, data_time.avg,
losses.val, losses.avg,
precisions.val, precisions.avg))
def _parse_data(self, inputs):
imgs, fname, pids, _ = inputs
inputs = [Variable(imgs).cuda()]
targets = Variable(pids.cuda())
return inputs, targets, fname # image and pid
def _forward(self, inputs, targets, epoch, model):
outputs = model(*inputs) # outputs=[x1,x2,x3]
# x1 triplet loss
loss_tri, _ = self.criterions[0](outputs[0], targets, epoch) # feature
# x2 triplet loss
loss_global, prec_global = self.criterions[1](outputs[1], targets, epoch)
return loss_tri + loss_global, prec_global
# trainer = CoTrainerAsyMean(
# model, coModel, model_ema, coModel_ema, train_loader, unLoader, criterion, optims, alpha=args.alpha
# )
class CoTrainerAsyMean(object):
def saveImg(self, imgList, gtList):
import shutil
import os
rootDir = self.noiseData.dataset.root
if os.path.exists('smallLoss'):
shutil.rmtree('smallLoss')
os.makedirs('smallLoss')
for name, pid in zip(imgList, gtList):
curPath = os.path.join(rootDir, name)
nameList = name.split('_')
nameList[0] = str(pid)
tarPath = os.path.join('smallLoss', '_'.join(nameList))
shutil.copyfile(curPath, tarPath)
def __init__(self, model, coModel, model_ema, coModel_ema, newDataSet, noiseDataSet, criterions, optimizers, alpha=0.9, print_freq=1):
self.modelA = model
self.modelB = coModel # the co-teacher
self.modelA_ema = model_ema
self.modelB_ema = coModel_ema
self.noiseData = noiseDataSet
self.newDataSet = newDataSet
self.criterions = criterions
self.optimizers = optimizers
self.alpha = alpha
self.print_freq = print_freq
def train(self, epoch, remRate=0.2):
self.modelA.train()
self.modelB.train()
self.modelA_ema.train()
self.modelB_ema.train()
batch_time = AverageMeter()
data_time = AverageMeter()
losses = AverageMeter()
precisions = AverageMeter()
#precisions = [AverageMeter(),AverageMeter()]
end = time.time()
for i, inputs in enumerate(self.newDataSet):
data_time.update(time.time() - end)
inputs, targets, names = self._parse_data(inputs) # image and pid
if i % 2 == 0:
# update CNNB
lossPure, prec1 = self._forward(inputs, targets, epoch, self.modelA) # assigned samples
#lossPure_ema, prec1_ema = self._forward(inputs, targets, epoch, self.modelA_ema)
# # assigned's easy samples
lossIdx = np.argsort(lossPure.data.cpu()).cuda()
#lossIdx_ema = np.argsort(lossPure_ema.data.cpu()).cuda()
pureInput = [inputs[0][lossIdx[:int(remRate * lossPure.shape[0])], ...]]
#pureInput_ema = [inputs[0][lossIdx_ema[:int(remRate * lossPure_ema.shape[0])], ...]]
pureLab = targets[lossIdx[:int(remRate * lossPure.shape[0])]].long()
#pureLab_ema = targets[lossIdx_ema[:int(remRate * lossPure_ema.shape[0])]].long()
# loss for cnn B
lossCNNB, precCNNB = self._forward(pureInput, pureLab, epoch, self.modelB)
lossCNNB_ema, precCNNB_ema = self._forward(pureInput, pureLab, epoch, self.modelB_ema)
lossCNNB = lossCNNB.mean()
lossCNNB_ema = lossCNNB_ema.mean()
lossCNNB_mean = (lossCNNB + lossCNNB_ema)*0.5
losses.update(lossCNNB_mean.item(), pureLab.size(0))
precisions.update(precCNNB, pureLab.size(0))
self.optimizers[1].zero_grad()
lossCNNB_mean.backward()
for param in self.modelB.parameters():#梯度截断
try:
param.grad.data.clamp(-1., 1.)
except:
continue
self.optimizers[1].step()
self._update_ema_variables(self.modelB, self.modelB_ema, self.alpha, epoch*len(self.newDataSet)+i)
for param in self.modelB_ema.parameters():#梯度截断
try:
param.grad.data.clamp(-1., 1.)
except:
continue
else:
# update CNNA
try:
noiseInput = next(self.noiseData)
except:
noiseLoader = iter(self.noiseData)
noiseInput = next(noiseLoader)
noiseInput, noiseLab, noiseNames = self._parse_data(noiseInput)
lossNoise, prec1 = self._forward(noiseInput, noiseLab, epoch, self.modelB)
#lossNoise_ema, prec1_ema = self._forward(noiseInput, noiseLab, epoch, self.modelB_ema)
# sample mining
lossIdx = np.argsort(lossNoise.data.cpu()).cuda()
noiseInput, noiseLab = noiseInput[0][lossIdx], noiseLab[lossIdx]
noiseInput, noiseLab = noiseInput[:int(remRate * lossNoise.shape[0]), ...], noiseLab[:int(
remRate * lossNoise.shape[0])]
# save small samples
# noiseImg = np.asarray(noiseNames)[lossIdx][:int(remRate*lossNoise.shape[0])]
# self.saveImg(noiseImg, noiseLab) # save image
# mix update, part assigned and part unassigned
mixInput, mixLab = [torch.cat([inputs[0], noiseInput])], torch.cat([targets.long(), noiseLab])
lossMix, precCNNA = self._forward(mixInput, mixLab, epoch, self.modelA)
lossMix_ema, precCNNA_ema = self._forward(mixInput, mixLab, epoch, self.modelA_ema)
lossCNNA = lossMix.mean()
lossCNNA_ema = lossMix_ema.mean()
lossCNNA_mean = (lossCNNA + lossCNNA_ema)*0.5
# update
losses.update(lossCNNA_mean.item(), mixLab.size(0))
precisions.update(precCNNA, mixLab.size(0))
# update CNNA
self.optimizers[0].zero_grad()
lossCNNA_mean.backward()
for param in self.modelA.parameters():
try:
param.grad.data.clamp(-1., 1.)
except:
continue
# update modelA
self.optimizers[0].step()
self._update_ema_variables(self.modelA, self.modelA_ema, self.alpha, epoch*len(self.newDataSet)+i)
for param in self.modelA_ema.parameters():
try:
param.grad.data.clamp(-1., 1.)
except:
continue
batch_time.update(time.time() - end)
end = time.time()
if (i + 1) % self.print_freq == 0:
print('Epoch: [{}][{}/{}]\t'
'Time {:.3f} ({:.3f})\t'
'Data {:.3f} ({:.3f})\t'
'Loss {:.3f} ({:.3f})\t'
'Prec {:.2%} ({:.2%})\t'
.format(epoch, i + 1, len(self.newDataSet),
batch_time.val, batch_time.avg,
data_time.val, data_time.avg,
losses.val, losses.avg,
precisions.val, precisions.avg))
def _update_ema_variables(self, model, ema_model, alpha, global_step):
alpha = min(1 - 1 / (global_step + 1), alpha)
for ema_param, param in zip(ema_model.parameters(), model.parameters()):
ema_param.data.mul_(alpha).add_(1 - alpha, param.data)
def _parse_data(self, inputs):
imgs, fname, pids, _ = inputs
inputs = [Variable(imgs).cuda()]
targets = Variable(pids.cuda())
return inputs, targets, fname # image and pid
def _forward(self, inputs, targets, epoch, model):
outputs = model(*inputs) # outputs=[x1,x2,x3]
# x1 triplet loss
loss_tri, _ = self.criterions[0](outputs[0], targets, epoch) # feature
# x2 triplet loss
loss_global, prec_global = self.criterions[1](outputs[1], targets, epoch)
return loss_tri + loss_global, prec_global
class CoTrainerAsyMean1(object):#用ema assign samples
def saveImg(self, imgList, gtList):
import shutil
import os
rootDir = self.noiseData.dataset.root
if os.path.exists('smallLoss'):
shutil.rmtree('smallLoss')
os.makedirs('smallLoss')
for name, pid in zip(imgList, gtList):
curPath = os.path.join(rootDir, name)
nameList = name.split('_')
nameList[0] = str(pid)
tarPath = os.path.join('smallLoss', '_'.join(nameList))
shutil.copyfile(curPath, tarPath)
def __init__(self, model, coModel, model_ema, coModel_ema, newDataSet, noiseDataSet, criterions, optimizers, alpha=0.9, print_freq=1):
self.modelA = model
self.modelB = coModel # the co-teacher
self.modelA_ema = model_ema
self.modelB_ema = coModel_ema
self.noiseData = noiseDataSet
self.newDataSet = newDataSet
self.criterions = criterions
self.optimizers = optimizers
self.alpha = alpha
self.print_freq = print_freq
def train(self, epoch, remRate=0.2):
self.modelA.train()
self.modelB.train()
self.modelA_ema.train()
self.modelB_ema.train()
batch_time = AverageMeter()
data_time = AverageMeter()
losses = AverageMeter()
precisions = AverageMeter()
#precisions = [AverageMeter(),AverageMeter()]
end = time.time()
for i, inputs in enumerate(self.newDataSet):
data_time.update(time.time() - end)
inputs, targets, names = self._parse_data(inputs) # image and pid
if i % 2 == 0:
# update CNNB
lossPure, prec1 = self._forward(inputs, targets, epoch, self.modelA_ema) # assigned samples
#lossPure_ema, prec1_ema = self._forward(inputs, targets, epoch, self.modelA_ema)
# # assigned's easy samples
lossIdx = np.argsort(lossPure.data.cpu()).cuda()
#lossIdx_ema = np.argsort(lossPure_ema.data.cpu()).cuda()
pureInput = [inputs[0][lossIdx[:int(remRate * lossPure.shape[0])], ...]]
#pureInput_ema = [inputs[0][lossIdx_ema[:int(remRate * lossPure_ema.shape[0])], ...]]
pureLab = targets[lossIdx[:int(remRate * lossPure.shape[0])]].long()
#pureLab_ema = targets[lossIdx_ema[:int(remRate * lossPure_ema.shape[0])]].long()
# loss for cnn B
lossCNNB, precCNNB = self._forward(pureInput, pureLab, epoch, self.modelB)
lossCNNB_ema, precCNNB_ema = self._forward(pureInput, pureLab, epoch, self.modelB_ema)
lossCNNB = lossCNNB.mean()
lossCNNB_ema = lossCNNB_ema.mean()
lossCNNB_mean = (lossCNNB + lossCNNB_ema)*0.5
losses.update(lossCNNB_mean.item(), pureLab.size(0))
precisions.update(precCNNB, pureLab.size(0))
self.optimizers[1].zero_grad()
lossCNNB_mean.backward()
for param in self.modelB.parameters():#梯度截断
try:
param.grad.data.clamp(-1., 1.)
except:
continue
self.optimizers[1].step()
self._update_ema_variables(self.modelB, self.modelB_ema, self.alpha, epoch*len(self.newDataSet)+i)
for param in self.modelB_ema.parameters():#梯度截断
try:
param.grad.data.clamp(-1., 1.)
except:
continue
else:
# update CNNA
try:
noiseInput = next(self.noiseData)
except:
noiseLoader = iter(self.noiseData)
noiseInput = next(noiseLoader)
noiseInput, noiseLab, noiseNames = self._parse_data(noiseInput)
lossNoise, prec1 = self._forward(noiseInput, noiseLab, epoch, self.modelB_ema)
#lossNoise_ema, prec1_ema = self._forward(noiseInput, noiseLab, epoch, self.modelB_ema)
# sample mining
lossIdx = np.argsort(lossNoise.data.cpu()).cuda()
noiseInput, noiseLab = noiseInput[0][lossIdx], noiseLab[lossIdx]
noiseInput, noiseLab = noiseInput[:int(remRate * lossNoise.shape[0]), ...], noiseLab[:int(
remRate * lossNoise.shape[0])]
# save small samples
# noiseImg = np.asarray(noiseNames)[lossIdx][:int(remRate*lossNoise.shape[0])]
# self.saveImg(noiseImg, noiseLab) # save image
# mix update, part assigned and part unassigned
mixInput, mixLab = [torch.cat([inputs[0], noiseInput])], torch.cat([targets.long(), noiseLab])
lossMix, precCNNA = self._forward(mixInput, mixLab, epoch, self.modelA)
lossMix_ema, precCNNA_ema = self._forward(mixInput, mixLab, epoch, self.modelA_ema)
lossCNNA = lossMix.mean()
lossCNNA_ema = lossMix_ema.mean()
lossCNNA_mean = (lossCNNA + lossCNNA_ema)*0.5
# update
losses.update(lossCNNA_mean.item(), mixLab.size(0))
precisions.update(precCNNA, mixLab.size(0))
# update CNNA
self.optimizers[0].zero_grad()
lossCNNA_mean.backward()
for param in self.modelA.parameters():
try:
param.grad.data.clamp(-1., 1.)
except:
continue
# update modelA
self.optimizers[0].step()
self._update_ema_variables(self.modelA, self.modelA_ema, self.alpha, epoch*len(self.newDataSet)+i)
for param in self.modelA_ema.parameters():
try:
param.grad.data.clamp(-1., 1.)
except:
continue
batch_time.update(time.time() - end)
end = time.time()
if (i + 1) % self.print_freq == 0:
print('Epoch: [{}][{}/{}]\t'
'Time {:.3f} ({:.3f})\t'
'Data {:.3f} ({:.3f})\t'
'Loss {:.3f} ({:.3f})\t'
'Prec {:.2%} ({:.2%})\t'
.format(epoch, i + 1, len(self.newDataSet),
batch_time.val, batch_time.avg,
data_time.val, data_time.avg,
losses.val, losses.avg,
precisions.val, precisions.avg))
def _update_ema_variables(self, model, ema_model, alpha, global_step):
alpha = min(1 - 1 / (global_step + 1), alpha)
for ema_param, param in zip(ema_model.parameters(), model.parameters()):
ema_param.data.mul_(alpha).add_(1 - alpha, param.data)
def _parse_data(self, inputs):
imgs, fname, pids, _ = inputs
inputs = [Variable(imgs).cuda()]
targets = Variable(pids.cuda())
return inputs, targets, fname # image and pid
def _forward(self, inputs, targets, epoch, model):
outputs = model(*inputs) # outputs=[x1,x2,x3]
# x1 triplet loss
loss_tri, _ = self.criterions[0](outputs[0], targets, epoch) # feature
# x2 triplet loss
loss_global, prec_global = self.criterions[1](outputs[1], targets, epoch)
return loss_tri + loss_global, prec_global
#CoTrainerAsyMean_3model
# trainer = CoTrainerAsyMean_3model(
# model, coModel, coModel_outliers, model_ema, coModel_ema, coModel_outliers_ema, train_inliers_loader, train_outliers_loader, unLoader, criterion, optims
# )
class CoTrainerAsyMean_3model(object):
def saveImg(self, imgList, gtList):
import shutil
import os
rootDir = self.noiseData.dataset.root
if os.path.exists('smallLoss'):
shutil.rmtree('smallLoss')
os.makedirs('smallLoss')
for name, pid in zip(imgList, gtList):
curPath = os.path.join(rootDir, name)
nameList = name.split('_')
nameList[0] = str(pid)
tarPath = os.path.join('smallLoss', '_'.join(nameList))
shutil.copyfile(curPath, tarPath)
def __init__(self, model, coModel,coModel_outliers, model_ema, coModel_ema, coModel_outliers_ema, newDataSet, inNoiseDataSet, outNoiseDataSet, criterions, optimizers, alpha=0.999, print_freq=1):
self.modelA = model
self.modelB = coModel # the co-teacher
self.modelC = coModel_outliers
self.modelA_ema = model_ema
self.modelB_ema = coModel_ema
self.modelC_ema = coModel_outliers_ema
self.newDataSet = newDataSet
self.inNoiseData = inNoiseDataSet
self.outNoiseData = outNoiseDataSet
self.criterions = criterions
self.optimizers = optimizers
self.alpha = alpha
self.print_freq = print_freq
def train(self, epoch, remRate=0.2):
self.modelA.train()
self.modelB.train()
self.modelC.train()
self.modelA_ema.train()
self.modelB_ema.train()
self.modelC_ema.train()
batch_time = AverageMeter()
data_time = AverageMeter()
losses = AverageMeter()
precisions = AverageMeter()
#precisions = [AverageMeter(),AverageMeter()]
end = time.time()
for i, inputs in enumerate(self.newDataSet):
data_time.update(time.time() - end)
inputs, targets, names = self._parse_data(inputs) # image and pid
if i % 2 == 0:
# update CNNC
lossPure, prec1 = self._forward(inputs, targets, epoch, self.modelA) # assigned samples
#lossPure_ema, prec1_ema = self._forward(inputs, targets, epoch, self.modelA_ema)
# # assigned's easy samples
lossIdx = np.argsort(lossPure.data.cpu()).cuda()
#lossIdx_ema = np.argsort(lossPure_ema.data.cpu()).cuda()
pureInput = [inputs[0][lossIdx[:int(remRate * lossPure.shape[0])], ...]]
#pureInput_ema = [inputs[0][lossIdx_ema[:int(remRate * lossPure_ema.shape[0])], ...]]
pureLab = targets[lossIdx[:int(remRate * lossPure.shape[0])]].long()
#pureLab_ema = targets[lossIdx_ema[:int(remRate * lossPure_ema.shape[0])]].long()
# loss for cnn C
lossCNNC, precCNNC = self._forward(pureInput, pureLab, epoch, self.modelC)
lossCNNC_ema, precCNNC_ema = self._forward(pureInput, pureLab, epoch, self.modelC_ema)
lossCNNC = lossCNNC.mean()
lossCNNC_ema = lossCNNC_ema.mean()
lossCNNC_mean = (lossCNNC + lossCNNC_ema)*0.5
losses.update(lossCNNC_mean.item(), pureLab.size(0))
precisions.update(precCNNC, pureLab.size(0))
self.optimizers[2].zero_grad()
lossCNNC_mean.backward()
for param in self.modelC.parameters():#梯度截断
try:
param.grad.data.clamp(-1., 1.)
except:
continue
self.optimizers[2].step()
self._update_ema_variables(self.modelC, self.modelC_ema, self.alpha, epoch*len(self.newDataSet)+i)
for param in self.modelC_ema.parameters():#梯度截断
try:
param.grad.data.clamp(-1., 1.)
except:
continue
else:
# update CNNA
try:
noiseInput = next(self.outNoiseData)
except:
noiseLoader = iter(self.outNoiseData)
noiseInput = next(noiseLoader)
noiseInput, noiseLab, noiseNames = self._parse_data(noiseInput)
lossNoise, prec1 = self._forward(noiseInput, noiseLab, epoch, self.modelC)
#lossNoise_ema, prec1_ema = self._forward(noiseInput, noiseLab, epoch, self.modelB_ema)
# sample mining
lossIdx = np.argsort(lossNoise.data.cpu()).cuda()
noiseInput, noiseLab = noiseInput[0][lossIdx], noiseLab[lossIdx]
noiseInput, noiseLab = noiseInput[:int(remRate * lossNoise.shape[0]), ...], noiseLab[:int(
remRate * lossNoise.shape[0])]
# save small samples
# noiseImg = np.asarray(noiseNames)[lossIdx][:int(remRate*lossNoise.shape[0])]
# self.saveImg(noiseImg, noiseLab) # save image
# mix update, part assigned and part unassigned
mixInput, mixLab = [torch.cat([inputs[0], noiseInput])], torch.cat([targets.long(), noiseLab])
lossMix, precCNNA = self._forward(mixInput, mixLab, epoch, self.modelA)
lossMix_ema, precCNNA_ema = self._forward(mixInput, mixLab, epoch, self.modelA_ema)
lossCNNA = lossMix.mean()
lossCNNA_ema = lossMix_ema.mean()
lossCNNA_mean = (lossCNNA + lossCNNA_ema)*0.5
# update
losses.update(lossCNNA_mean.item(), mixLab.size(0))
precisions.update(precCNNA, mixLab.size(0))
# update CNNA
self.optimizers[0].zero_grad()
lossCNNA_mean.backward()
for param in self.modelA.parameters():
try:
param.grad.data.clamp(-1., 1.)
except:
continue
# update modelA
self.optimizers[0].step()
self._update_ema_variables(self.modelA, self.modelA_ema, self.alpha, epoch*len(self.newDataSet)+i)
for param in self.modelA_ema.parameters():
try:
param.grad.data.clamp(-1., 1.)
except:
continue
batch_time.update(time.time() - end)
end = time.time()
if (i + 1) % self.print_freq == 0:
print('[AC]Epoch: [{}][{}/{}]\t'
'Time {:.3f} ({:.3f})\t'
'Data {:.3f} ({:.3f})\t'
'Loss {:.3f} ({:.3f})\t'
'Prec {:.2%} ({:.2%})\t'
.format(epoch, i + 1, len(self.newDataSet),
batch_time.val, batch_time.avg,
data_time.val, data_time.avg,
losses.val, losses.avg,
precisions.val, precisions.avg))
for i, inputs in enumerate(self.newDataSet):
data_time.update(time.time() - end)
inputs, targets, names = self._parse_data(inputs) # image and pid
if i % 2 == 0:
# update CNNB
lossPure, prec1 = self._forward(inputs, targets, epoch, self.modelA) # assigned samples
#lossPure_ema, prec1_ema = self._forward(inputs, targets, epoch, self.modelA_ema)
# # assigned's easy samples
lossIdx = np.argsort(lossPure.data.cpu()).cuda()
#lossIdx_ema = np.argsort(lossPure_ema.data.cpu()).cuda()
pureInput = [inputs[0][lossIdx[:int(remRate * lossPure.shape[0])], ...]]
#pureInput_ema = [inputs[0][lossIdx_ema[:int(remRate * lossPure_ema.shape[0])], ...]]
pureLab = targets[lossIdx[:int(remRate * lossPure.shape[0])]].long()
#pureLab_ema = targets[lossIdx_ema[:int(remRate * lossPure_ema.shape[0])]].long()
# loss for cnn B
lossCNNB, precCNNB = self._forward(pureInput, pureLab, epoch, self.modelB)
lossCNNB_ema, precCNNB_ema = self._forward(pureInput, pureLab, epoch, self.modelB_ema)
lossCNNB = lossCNNB.mean()
lossCNNB_ema = lossCNNB_ema.mean()
lossCNNB_mean = (lossCNNB + lossCNNB_ema)*0.5
losses.update(lossCNNB_mean.item(), pureLab.size(0))
precisions.update(precCNNB, pureLab.size(0))
self.optimizers[1].zero_grad()
lossCNNB_mean.backward()
for param in self.modelB.parameters():#梯度截断
try:
param.grad.data.clamp(-1., 1.)
except:
continue
self.optimizers[1].step()
self._update_ema_variables(self.modelB, self.modelB_ema, self.alpha, epoch*len(self.newDataSet)+i)
for param in self.modelB_ema.parameters():#梯度截断
try:
param.grad.data.clamp(-1., 1.)
except:
continue
else:
# update CNNA
try:
noiseInput = next(self.inNoiseData)
except:
noiseLoader = iter(self.inNoiseData)
noiseInput = next(noiseLoader)
noiseInput, noiseLab, noiseNames = self._parse_data(noiseInput)
lossNoise, prec1 = self._forward(noiseInput, noiseLab, epoch, self.modelB)
#lossNoise_ema, prec1_ema = self._forward(noiseInput, noiseLab, epoch, self.modelB_ema)
# sample mining
lossIdx = np.argsort(lossNoise.data.cpu()).cuda()
noiseInput, noiseLab = noiseInput[0][lossIdx], noiseLab[lossIdx]
noiseInput, noiseLab = noiseInput[:int(remRate * lossNoise.shape[0]), ...], noiseLab[:int(
remRate * lossNoise.shape[0])]
# save small samples
# noiseImg = np.asarray(noiseNames)[lossIdx][:int(remRate*lossNoise.shape[0])]
# self.saveImg(noiseImg, noiseLab) # save image
# mix update, part assigned and part unassigned
mixInput, mixLab = [torch.cat([inputs[0], noiseInput])], torch.cat([targets.long(), noiseLab])
lossMix, precCNNA = self._forward(mixInput, mixLab, epoch, self.modelA)
lossMix_ema, precCNNA_ema = self._forward(mixInput, mixLab, epoch, self.modelA_ema)
lossCNNA = lossMix.mean()
lossCNNA_ema = lossMix_ema.mean()
lossCNNA_mean = (lossCNNA + lossCNNA_ema)*0.5
# update
losses.update(lossCNNA_mean.item(), mixLab.size(0))
precisions.update(precCNNA, mixLab.size(0))
# update CNNA
self.optimizers[0].zero_grad()
lossCNNA_mean.backward()
for param in self.modelA.parameters():
try:
param.grad.data.clamp(-1., 1.)
except:
continue
# update modelA
self.optimizers[0].step()
self._update_ema_variables(self.modelA, self.modelA_ema, self.alpha, epoch*len(self.newDataSet)+i)
for param in self.modelA_ema.parameters():
try:
param.grad.data.clamp(-1., 1.)
except:
continue
batch_time.update(time.time() - end)
end = time.time()
if (i + 1) % self.print_freq == 0:
print('[AB]Epoch: [{}][{}/{}]\t'
'Time {:.3f} ({:.3f})\t'
'Data {:.3f} ({:.3f})\t'
'Loss {:.3f} ({:.3f})\t'
'Prec {:.2%} ({:.2%})\t'
.format(epoch, i + 1, len(self.newDataSet),
batch_time.val, batch_time.avg,
data_time.val, data_time.avg,
losses.val, losses.avg,
precisions.val, precisions.avg))
def _update_ema_variables(self, model, ema_model, alpha, global_step):
alpha = min(1 - 1 / (global_step + 1), alpha)
for ema_param, param in zip(ema_model.parameters(), model.parameters()):
ema_param.data.mul_(alpha).add_(1 - alpha, param.data)
def _parse_data(self, inputs):
imgs, fname, pids, _ = inputs
inputs = [Variable(imgs).cuda()]
targets = Variable(pids.cuda())
return inputs, targets, fname # image and pid
def _forward(self, inputs, targets, epoch, model):
outputs = model(*inputs) # outputs=[x1,x2,x3]
# x1 triplet loss
loss_tri, _ = self.criterions[0](outputs[0], targets, epoch) # feature
# x2 triplet loss
loss_global, prec_global = self.criterions[1](outputs[1], targets, epoch)
return loss_tri + loss_global, prec_global
# trainer = CoTrainerAsyMean_4model(
# model, coModel, co2Model, co3Model, model_ema, coModel_ema, co2Model_ema, co3Model_ema, train_in3_loader, train_out3_loader, train_out2_loader, unLoader, criterion, optims
# )
class CoTrainerAsyMean_4model(object):
def saveImg(self, imgList, gtList):
import shutil
import os
rootDir = self.noiseData.dataset.root
if os.path.exists('smallLoss'):
shutil.rmtree('smallLoss')
os.makedirs('smallLoss')
for name, pid in zip(imgList, gtList):
curPath = os.path.join(rootDir, name)
nameList = name.split('_')
nameList[0] = str(pid)
tarPath = os.path.join('smallLoss', '_'.join(nameList))
shutil.copyfile(curPath, tarPath)
def __init__(self, model, coModel, co2Model, co3Model, model_ema, coModel_ema, co2Model_ema, co3Model_ema, train_in3_loader, train_out3_loader, train_out2_loader, unLoader, criterions, optimizers, alpha=0.999, print_freq=1):
self.modelA = model
self.modelB = coModel # the co-teacher
self.modelC = co2Model
self.modelD = co3Model
self.modelA_ema = model_ema
self.modelB_ema = coModel_ema
self.modelC_ema = co2Model_ema
self.modelD_ema = co3Model_ema
#self.noiseData = noiseDataSet
self.train_in3_data = train_in3_loader
self.train_out3_data = train_out3_loader
self.train_out2_data = train_out2_loader
self.train_out_data = unLoader
self.criterions = criterions
self.optimizers = optimizers
self.alpha = alpha
self.print_freq = print_freq
def train(self, epoch, remRate=0.2):
self.modelA.train()
self.modelB.train()
self.modelC.train()
self.modelD.train()
self.modelA_ema.train()
self.modelB_ema.train()
self.modelC_ema.train()
self.modelD_ema.train()
batch_time = AverageMeter()
data_time = AverageMeter()
losses = AverageMeter()
precisions = AverageMeter()
end = time.time()
for i, inputs in enumerate(self.train_in3_data):
data_time.update(time.time() - end)
inputs, targets, names = self._parse_data(inputs) # image and pid
#print('inputs.type: {} ,inputs.size: {} '.format(type(inputs), len(inputs)))
#print('inputs.type: {} ,inputs[0].size: {} '.format(type(inputs), len(inputs[0])))
#print('inputs.type: {} ,inputs[0][0].size: {} '.format(type(inputs), len(inputs[0][0])))
#print('inputs.type: {} ,inputs[1].size: {} '.format(type(inputs), len(inputs[1])))
# print(inputs[0])
#print(np.array(inputs).shape)
if i % 2 == 0:
# update CNND
lossPure, prec1 = self._forward(inputs, targets, epoch, self.modelA) # assigned samples
# # assigned's easy samples
lossIdx = np.argsort(lossPure.data.cpu()).cuda()
pureInput = [inputs[0][lossIdx[:int(remRate * lossPure.shape[0])], ...]]
pureLab = targets[lossIdx[:int(remRate * lossPure.shape[0])]].long()
# loss for cnn D
lossCNND, precCNND = self._forward(pureInput, pureLab, epoch, self.modelD)
lossCNND_ema, precCNND_ema = self._forward(pureInput, pureLab, epoch, self.modelD_ema)
lossCNND = lossCNND.mean()
lossCNND_ema = lossCNND_ema.mean()
lossCNND_mean = (lossCNND + lossCNND_ema)*0.5
losses.update(lossCNND_mean.item(), pureLab.size(0))
precisions.update(precCNND, pureLab.size(0))
self.optimizers[3].zero_grad()
lossCNND_mean.backward()
for param in self.modelD.parameters():
try:
param.grad.data.clamp(-1., 1.)
except:
continue
self.optimizers[3].step()
self._update_ema_variables(self.modelD, self.modelD_ema, self.alpha, epoch*len(self.train_in3_data)+i)
for param in self.modelD_ema.parameters():#梯度截断
try:
param.grad.data.clamp(-1., 1.)
except:
continue
else:
# update CNNA
try:
noiseInput = next(self.train_out_data)
except:
noiseLoader = iter(self.train_out_data)
noiseInput = next(noiseLoader)
noiseInput, noiseLab, noiseNames = self._parse_data(noiseInput)
lossNoise, prec1 = self._forward(noiseInput, noiseLab, epoch, self.modelD)
# sample mining
lossIdx = np.argsort(lossNoise.data.cpu()).cuda()
noiseInput, noiseLab = noiseInput[0][lossIdx], noiseLab[lossIdx]
noiseInput, noiseLab = noiseInput[:int(remRate * lossNoise.shape[0]), ...], noiseLab[:int(
remRate * lossNoise.shape[0])]
# save small samples
# noiseImg = np.asarray(noiseNames)[lossIdx][:int(remRate*lossNoise.shape[0])]
# self.saveImg(noiseImg, noiseLab) # save image
# mix update, part assigned and part unassigned
mixInput, mixLab = [torch.cat([inputs[0], noiseInput])], torch.cat([targets.long(), noiseLab])
lossMix, precCNNA = self._forward(mixInput, mixLab, epoch, self.modelA)
lossMix_ema, precCNNA_ema = self._forward(mixInput, mixLab, epoch, self.modelA_ema)
lossCNNA = lossMix.mean()
lossCNNA_ema = lossMix_ema.mean()
lossCNNA_mean = (lossCNNA + lossCNNA_ema)*0.5
# update
losses.update(lossCNNA_mean.item(), mixLab.size(0))
precisions.update(precCNNA, mixLab.size(0))
# update CNNA
self.optimizers[0].zero_grad()
lossCNNA_mean.backward()
for param in self.modelA.parameters():
try:
param.grad.data.clamp(-1., 1.)
except:
continue
# update modelA
self.optimizers[0].step()
self._update_ema_variables(self.modelA, self.modelA_ema, self.alpha, epoch*len(self.train_in3_data)+i)
for param in self.modelA_ema.parameters():
try:
param.grad.data.clamp(-1., 1.)
except:
continue
batch_time.update(time.time() - end)
end = time.time()
if (i + 1) % self.print_freq == 0:
print('[AD]Epoch: [{}][{}/{}]\t'
'Time {:.3f} ({:.3f})\t'
'Data {:.3f} ({:.3f})\t'
'Loss {:.3f} ({:.3f})\t'
'Prec {:.2%} ({:.2%})\t'
.format(epoch, i + 1, len(self.train_in3_data),
batch_time.val, batch_time.avg,
data_time.val, data_time.avg,
losses.val, losses.avg,
precisions.val, precisions.avg))
for i, inputs in enumerate(self.train_in3_data):
data_time.update(time.time() - end)
inputs, targets, names = self._parse_data(inputs) # image and pid
#print('inputs.type: {} ,inputs.size: {} '.format(type(inputs), len(inputs)))
#print('inputs.type: {} ,inputs[0].size: {} '.format(type(inputs), len(inputs[0])))
#print('inputs.type: {} ,inputs[0][0].size: {} '.format(type(inputs), len(inputs[0][0])))
#print('inputs.type: {} ,inputs[1].size: {} '.format(type(inputs), len(inputs[1])))
# print(inputs[0])
#print(np.array(inputs).shape)
if i % 2 == 0:
# update CNNC
lossPure, prec1 = self._forward(inputs, targets, epoch, self.modelA) # assigned samples
# # assigned's easy samples
lossIdx = np.argsort(lossPure.data.cpu()).cuda()
pureInput = [inputs[0][lossIdx[:int(remRate * lossPure.shape[0])], ...]]
pureLab = targets[lossIdx[:int(remRate * lossPure.shape[0])]].long()
# loss for cnn C
lossCNNC, precCNNC = self._forward(pureInput, pureLab, epoch, self.modelC)
lossCNNC_ema, precCNNC_ema = self._forward(pureInput, pureLab, epoch, self.modelC_ema)
lossCNNC = lossCNNC.mean()
lossCNNC_ema = lossCNNC_ema.mean()
lossCNNC_mean = (lossCNNC + lossCNNC_ema)*0.5
losses.update(lossCNNC_mean.item(), pureLab.size(0))
precisions.update(precCNNC, pureLab.size(0))
self.optimizers[2].zero_grad()
lossCNNC_mean.backward()
for param in self.modelC.parameters():
try:
param.grad.data.clamp(-1., 1.)
except:
continue
self.optimizers[2].step()
self._update_ema_variables(self.modelC, self.modelC_ema, self.alpha, epoch*len(self.train_in3_data)+i)
for param in self.modelC_ema.parameters():#梯度截断
try:
param.grad.data.clamp(-1., 1.)
except:
continue
else:
# update CNNA
try:
noiseInput = next(self.train_out2_data)
except:
noiseLoader = iter(self.train_out2_data)
noiseInput = next(noiseLoader)
noiseInput, noiseLab, noiseNames = self._parse_data(noiseInput)
lossNoise, prec1 = self._forward(noiseInput, noiseLab, epoch, self.modelC)
# sample mining
lossIdx = np.argsort(lossNoise.data.cpu()).cuda()
noiseInput, noiseLab = noiseInput[0][lossIdx], noiseLab[lossIdx]
noiseInput, noiseLab = noiseInput[:int(remRate * lossNoise.shape[0]), ...], noiseLab[:int(
remRate * lossNoise.shape[0])]
# save small samples
# noiseImg = np.asarray(noiseNames)[lossIdx][:int(remRate*lossNoise.shape[0])]
# self.saveImg(noiseImg, noiseLab) # save image
# mix update, part assigned and part unassigned
mixInput, mixLab = [torch.cat([inputs[0], noiseInput])], torch.cat([targets.long(), noiseLab])
lossMix, precCNNA = self._forward(mixInput, mixLab, epoch, self.modelA)
lossMix_ema, precCNNA_ema = self._forward(mixInput, mixLab, epoch, self.modelA_ema)
lossCNNA = lossMix.mean()
lossCNNA_ema = lossMix_ema.mean()
lossCNNA_mean = (lossCNNA + lossCNNA_ema)*0.5
# update
losses.update(lossCNNA_mean.item(), mixLab.size(0))
precisions.update(precCNNA, mixLab.size(0))
# update CNNA
self.optimizers[0].zero_grad()
lossCNNA_mean.backward()
for param in self.modelA.parameters():
try:
param.grad.data.clamp(-1., 1.)
except:
continue
# update modelA
self.optimizers[0].step()
self._update_ema_variables(self.modelA, self.modelA_ema, self.alpha, epoch*len(self.train_in3_data)+i)
for param in self.modelA_ema.parameters():
try:
param.grad.data.clamp(-1., 1.)
except:
continue
batch_time.update(time.time() - end)
end = time.time()
if (i + 1) % self.print_freq == 0:
print('[AC]Epoch: [{}][{}/{}]\t'
'Time {:.3f} ({:.3f})\t'
'Data {:.3f} ({:.3f})\t'
'Loss {:.3f} ({:.3f})\t'
'Prec {:.2%} ({:.2%})\t'
.format(epoch, i + 1, len(self.train_in3_data),
batch_time.val, batch_time.avg,
data_time.val, data_time.avg,
losses.val, losses.avg,
precisions.val, precisions.avg))
for i, inputs in enumerate(self.train_in3_data):
data_time.update(time.time() - end)
inputs, targets, names = self._parse_data(inputs) # image and pid
#print('inputs.type: {} ,inputs.size: {} '.format(type(inputs), len(inputs)))
#print('inputs.type: {} ,inputs[0].size: {} '.format(type(inputs), len(inputs[0])))
#print('inputs.type: {} ,inputs[0][0].size: {} '.format(type(inputs), len(inputs[0][0])))
#print('inputs.type: {} ,inputs[1].size: {} '.format(type(inputs), len(inputs[1])))
# print(inputs[0])
#print(np.array(inputs).shape)
if i % 2 == 0:
# update CNNB
lossPure, prec1 = self._forward(inputs, targets, epoch, self.modelA) # assigned samples
# # assigned's easy samples
lossIdx = np.argsort(lossPure.data.cpu()).cuda()
pureInput = [inputs[0][lossIdx[:int(remRate * lossPure.shape[0])], ...]]
pureLab = targets[lossIdx[:int(remRate * lossPure.shape[0])]].long()
# loss for cnn B
lossCNNB, precCNNB = self._forward(pureInput, pureLab, epoch, self.modelB)
lossCNNB_ema, precCNNB_ema = self._forward(pureInput, pureLab, epoch, self.modelB_ema)
lossCNNB = lossCNNB.mean()
lossCNNB_ema = lossCNNB_ema.mean()
lossCNNB_mean = (lossCNNB + lossCNNB_ema)*0.5
losses.update(lossCNNB_mean.item(), pureLab.size(0))
precisions.update(precCNNB, pureLab.size(0))
self.optimizers[1].zero_grad()
lossCNNB_mean.backward()
for param in self.modelB.parameters():
try:
param.grad.data.clamp(-1., 1.)
except:
continue
self.optimizers[1].step()
self._update_ema_variables(self.modelB, self.modelB_ema, self.alpha, epoch*len(self.train_in3_data)+i)
for param in self.modelB_ema.parameters():#梯度截断
try:
param.grad.data.clamp(-1., 1.)
except:
continue
else:
# update CNNA
try:
noiseInput = next(self.train_out3_data)
except:
noiseLoader = iter(self.train_out3_data)
noiseInput = next(noiseLoader)
noiseInput, noiseLab, noiseNames = self._parse_data(noiseInput)
lossNoise, prec1 = self._forward(noiseInput, noiseLab, epoch, self.modelB)
# sample mining
lossIdx = np.argsort(lossNoise.data.cpu()).cuda()
noiseInput, noiseLab = noiseInput[0][lossIdx], noiseLab[lossIdx]
noiseInput, noiseLab = noiseInput[:int(remRate * lossNoise.shape[0]), ...], noiseLab[:int(
remRate * lossNoise.shape[0])]
# save small samples
# noiseImg = np.asarray(noiseNames)[lossIdx][:int(remRate*lossNoise.shape[0])]
# self.saveImg(noiseImg, noiseLab) # save image
# mix update, part assigned and part unassigned
mixInput, mixLab = [torch.cat([inputs[0], noiseInput])], torch.cat([targets.long(), noiseLab])
lossMix, precCNNA = self._forward(mixInput, mixLab, epoch, self.modelA)
lossMix_ema, precCNNA_ema = self._forward(mixInput, mixLab, epoch, self.modelA_ema)
lossCNNA = lossMix.mean()
lossCNNA_ema = lossMix_ema.mean()
lossCNNA_mean = (lossCNNA + lossCNNA_ema)*0.5
# update
losses.update(lossCNNA_mean.item(), mixLab.size(0))
precisions.update(precCNNA, mixLab.size(0))
# update CNNA
self.optimizers[0].zero_grad()
lossCNNA_mean.backward()
for param in self.modelA.parameters():
try:
param.grad.data.clamp(-1., 1.)
except:
continue
# update modelA
self.optimizers[0].step()
self._update_ema_variables(self.modelA, self.modelA_ema, self.alpha, epoch*len(self.train_in3_data)+i)
for param in self.modelA_ema.parameters():
try:
param.grad.data.clamp(-1., 1.)
except:
continue
batch_time.update(time.time() - end)
end = time.time()
if (i + 1) % self.print_freq == 0:
print('[AB]Epoch: [{}][{}/{}]\t'
'Time {:.3f} ({:.3f})\t'
'Data {:.3f} ({:.3f})\t'
'Loss {:.3f} ({:.3f})\t'
'Prec {:.2%} ({:.2%})\t'
.format(epoch, i + 1, len(self.train_in3_data),
batch_time.val, batch_time.avg,
data_time.val, data_time.avg,
losses.val, losses.avg,
precisions.val, precisions.avg))
def _update_ema_variables(self, model, ema_model, alpha, global_step):
alpha = min(1 - 1 / (global_step + 1), alpha)
for ema_param, param in zip(ema_model.parameters(), model.parameters()):
ema_param.data.mul_(alpha).add_(1 - alpha, param.data)
def _parse_data(self, inputs):
imgs, fname, pids, _ = inputs
inputs = [Variable(imgs).cuda()]
targets = Variable(pids.cuda())
return inputs, targets, fname # image and pid
def _forward(self, inputs, targets, epoch, model):
outputs = model(*inputs) # outputs=[x1,x2,x3]
# x1 triplet loss
loss_tri, _ = self.criterions[0](outputs[0], targets, epoch) # feature
# x2 triplet loss
loss_global, prec_global = self.criterions[1](outputs[1], targets, epoch)
return loss_tri + loss_global, prec_global
# trainer = CoTrainerAsyMean_5model(
# model, coModel, co2Model, co3Model, co4Model, model_ema, coModel_ema, co2Model_ema, co3Model_ema, co4Model_ema, train_in4_loader, train_out4_loader, train_out3_loader, train_out2_loader, unLoader, criterion, optims
# )
class CoTrainerAsyMean_5model(object):
def saveImg(self, imgList, gtList):
import shutil
import os
rootDir = self.noiseData.dataset.root
if os.path.exists('smallLoss'):
shutil.rmtree('smallLoss')
os.makedirs('smallLoss')
for name, pid in zip(imgList, gtList):
curPath = os.path.join(rootDir, name)
nameList = name.split('_')
nameList[0] = str(pid)
tarPath = os.path.join('smallLoss', '_'.join(nameList))
shutil.copyfile(curPath, tarPath)
def __init__(self, model, coModel, co2Model, co3Model, co4Model, model_ema, coModel_ema, co2Model_ema, co3Model_ema, co4Model_ema, train_in4_loader, train_out4_loader, train_out3_loader, train_out2_loader, unLoader, criterions, optimizers, alpha=0.999, print_freq=1):
self.modelA = model
self.modelB = coModel # the co-teacher
self.modelC = co2Model
self.modelD = co3Model
self.modelE = co4Model
self.modelA_ema = model_ema
self.modelB_ema = coModel_ema
self.modelC_ema = co2Model_ema
self.modelD_ema = co3Model_ema
self.modelE_ema = co4Model_ema
#self.noiseData = noiseDataSet
self.train_in4_data = train_in4_loader
self.train_out4_data = train_out4_loader
self.train_out3_data = train_out3_loader
self.train_out2_data = train_out2_loader
self.train_out_data = unLoader
self.criterions = criterions
self.optimizers = optimizers
self.alpha = alpha
self.print_freq = print_freq
def train(self, epoch, remRate=0.2):
self.modelA.train()
self.modelB.train()
self.modelC.train()
self.modelD.train()
self.modelE.train()
self.modelA_ema.train()
self.modelB_ema.train()
self.modelC_ema.train()
self.modelD_ema.train()
self.modelE_ema.train()
batch_time = AverageMeter()
data_time = AverageMeter()
losses = AverageMeter()
precisions = AverageMeter()
end = time.time()
for i, inputs in enumerate(self.train_in4_data):
data_time.update(time.time() - end)
inputs, targets, names = self._parse_data(inputs) # image and pid
#print('inputs.type: {} ,inputs.size: {} '.format(type(inputs), len(inputs)))
#print('inputs.type: {} ,inputs[0].size: {} '.format(type(inputs), len(inputs[0])))
#print('inputs.type: {} ,inputs[0][0].size: {} '.format(type(inputs), len(inputs[0][0])))
#print('inputs.type: {} ,inputs[1].size: {} '.format(type(inputs), len(inputs[1])))
# print(inputs[0])
#print(np.array(inputs).shape)
if i % 2 == 0:
# update CNNE
lossPure, prec1 = self._forward(inputs, targets, epoch, self.modelA) # assigned samples
# # assigned's easy samples
lossIdx = np.argsort(lossPure.data.cpu()).cuda()
pureInput = [inputs[0][lossIdx[:int(remRate * lossPure.shape[0])], ...]]
pureLab = targets[lossIdx[:int(remRate * lossPure.shape[0])]].long()
# loss for cnn D
lossCNNE, precCNNE = self._forward(pureInput, pureLab, epoch, self.modelE)
lossCNNE_ema, precCNNE_ema = self._forward(pureInput, pureLab, epoch, self.modelE_ema)
lossCNNE = lossCNNE.mean()
lossCNNE_ema = lossCNNE_ema.mean()
lossCNNE_mean = (lossCNNE + lossCNNE_ema)*0.5
losses.update(lossCNNE_mean.item(), pureLab.size(0))
precisions.update(precCNNE, pureLab.size(0))
self.optimizers[4].zero_grad()
lossCNNE_mean.backward()
for param in self.modelE.parameters():
try:
param.grad.data.clamp(-1., 1.)
except:
continue
self.optimizers[4].step()
self._update_ema_variables(self.modelE, self.modelE_ema, self.alpha, epoch*len(self.train_in4_data)+i)
for param in self.modelE_ema.parameters():#梯度截断
try:
param.grad.data.clamp(-1., 1.)
except:
continue
else:
# update CNNA
try:
noiseInput = next(self.train_out_data)
except:
noiseLoader = iter(self.train_out_data)
noiseInput = next(noiseLoader)
noiseInput, noiseLab, noiseNames = self._parse_data(noiseInput)
lossNoise, prec1 = self._forward(noiseInput, noiseLab, epoch, self.modelE)
# sample mining
lossIdx = np.argsort(lossNoise.data.cpu()).cuda()
noiseInput, noiseLab = noiseInput[0][lossIdx], noiseLab[lossIdx]
noiseInput, noiseLab = noiseInput[:int(remRate * lossNoise.shape[0]), ...], noiseLab[:int(
remRate * lossNoise.shape[0])]
# save small samples
# noiseImg = np.asarray(noiseNames)[lossIdx][:int(remRate*lossNoise.shape[0])]
# self.saveImg(noiseImg, noiseLab) # save image
# mix update, part assigned and part unassigned
mixInput, mixLab = [torch.cat([inputs[0], noiseInput])], torch.cat([targets.long(), noiseLab])
lossMix, precCNNA = self._forward(mixInput, mixLab, epoch, self.modelA)
lossMix_ema, precCNNA_ema = self._forward(mixInput, mixLab, epoch, self.modelA_ema)
lossCNNA = lossMix.mean()
lossCNNA_ema = lossMix_ema.mean()
lossCNNA_mean = (lossCNNA + lossCNNA_ema)*0.5
# update
losses.update(lossCNNA_mean.item(), mixLab.size(0))
precisions.update(precCNNA, mixLab.size(0))
# update CNNA
self.optimizers[0].zero_grad()
lossCNNA_mean.backward()
for param in self.modelA.parameters():
try:
param.grad.data.clamp(-1., 1.)
except:
continue
# update modelA
self.optimizers[0].step()
self._update_ema_variables(self.modelA, self.modelA_ema, self.alpha, epoch*len(self.train_in4_data)+i)
for param in self.modelA_ema.parameters():
try:
param.grad.data.clamp(-1., 1.)
except:
continue
batch_time.update(time.time() - end)
end = time.time()
if (i + 1) % self.print_freq == 0:
print('[AE]Epoch: [{}][{}/{}]\t'
'Time {:.3f} ({:.3f})\t'
'Data {:.3f} ({:.3f})\t'
'Loss {:.3f} ({:.3f})\t'
'Prec {:.2%} ({:.2%})\t'
.format(epoch, i + 1, len(self.train_in4_data),
batch_time.val, batch_time.avg,
data_time.val, data_time.avg,
losses.val, losses.avg,
precisions.val, precisions.avg))
for i, inputs in enumerate(self.train_in4_data):
data_time.update(time.time() - end)
inputs, targets, names = self._parse_data(inputs) # image and pid
#print('inputs.type: {} ,inputs.size: {} '.format(type(inputs), len(inputs)))
#print('inputs.type: {} ,inputs[0].size: {} '.format(type(inputs), len(inputs[0])))
#print('inputs.type: {} ,inputs[0][0].size: {} '.format(type(inputs), len(inputs[0][0])))
#print('inputs.type: {} ,inputs[1].size: {} '.format(type(inputs), len(inputs[1])))
# print(inputs[0])
#print(np.array(inputs).shape)
if i % 2 == 0:
# update CNND
lossPure, prec1 = self._forward(inputs, targets, epoch, self.modelA) # assigned samples
# # assigned's easy samples
lossIdx = np.argsort(lossPure.data.cpu()).cuda()
pureInput = [inputs[0][lossIdx[:int(remRate * lossPure.shape[0])], ...]]
pureLab = targets[lossIdx[:int(remRate * lossPure.shape[0])]].long()
# loss for cnn D
lossCNND, precCNND = self._forward(pureInput, pureLab, epoch, self.modelD)
lossCNND_ema, precCNND_ema = self._forward(pureInput, pureLab, epoch, self.modelD_ema)
lossCNND = lossCNND.mean()
lossCNND_ema = lossCNND_ema.mean()
lossCNND_mean = (lossCNND + lossCNND_ema)*0.5
losses.update(lossCNND_mean.item(), pureLab.size(0))
precisions.update(precCNND, pureLab.size(0))
self.optimizers[3].zero_grad()
lossCNND_mean.backward()
for param in self.modelD.parameters():
try:
param.grad.data.clamp(-1., 1.)
except:
continue
self.optimizers[3].step()
self._update_ema_variables(self.modelD, self.modelD_ema, self.alpha, epoch*len(self.train_in4_data)+i)
for param in self.modelD_ema.parameters():#梯度截断
try:
param.grad.data.clamp(-1., 1.)
except:
continue
else:
# update CNNA
try:
noiseInput = next(self.train_out2_data)
except:
noiseLoader = iter(self.train_out2_data)
noiseInput = next(noiseLoader)
noiseInput, noiseLab, noiseNames = self._parse_data(noiseInput)
lossNoise, prec1 = self._forward(noiseInput, noiseLab, epoch, self.modelD)
# sample mining
lossIdx = np.argsort(lossNoise.data.cpu()).cuda()
noiseInput, noiseLab = noiseInput[0][lossIdx], noiseLab[lossIdx]
noiseInput, noiseLab = noiseInput[:int(remRate * lossNoise.shape[0]), ...], noiseLab[:int(
remRate * lossNoise.shape[0])]
# save small samples
# noiseImg = np.asarray(noiseNames)[lossIdx][:int(remRate*lossNoise.shape[0])]
# self.saveImg(noiseImg, noiseLab) # save image
# mix update, part assigned and part unassigned
mixInput, mixLab = [torch.cat([inputs[0], noiseInput])], torch.cat([targets.long(), noiseLab])
lossMix, precCNNA = self._forward(mixInput, mixLab, epoch, self.modelA)
lossMix_ema, precCNNA_ema = self._forward(mixInput, mixLab, epoch, self.modelA_ema)
lossCNNA = lossMix.mean()
lossCNNA_ema = lossMix_ema.mean()
lossCNNA_mean = (lossCNNA + lossCNNA_ema)*0.5
# update
losses.update(lossCNNA_mean.item(), mixLab.size(0))
precisions.update(precCNNA, mixLab.size(0))
# update CNNA
self.optimizers[0].zero_grad()
lossCNNA_mean.backward()
for param in self.modelA.parameters():
try:
param.grad.data.clamp(-1., 1.)
except:
continue
# update modelA
self.optimizers[0].step()
self._update_ema_variables(self.modelA, self.modelA_ema, self.alpha, epoch*len(self.train_in4_data)+i)
for param in self.modelA_ema.parameters():
try:
param.grad.data.clamp(-1., 1.)
except:
continue
batch_time.update(time.time() - end)
end = time.time()
if (i + 1) % self.print_freq == 0:
print('[AD]Epoch: [{}][{}/{}]\t'
'Time {:.3f} ({:.3f})\t'
'Data {:.3f} ({:.3f})\t'
'Loss {:.3f} ({:.3f})\t'
'Prec {:.2%} ({:.2%})\t'
.format(epoch, i + 1, len(self.train_in4_data),
batch_time.val, batch_time.avg,
data_time.val, data_time.avg,
losses.val, losses.avg,
precisions.val, precisions.avg))
for i, inputs in enumerate(self.train_in4_data):
data_time.update(time.time() - end)
inputs, targets, names = self._parse_data(inputs) # image and pid
#print('inputs.type: {} ,inputs.size: {} '.format(type(inputs), len(inputs)))
#print('inputs.type: {} ,inputs[0].size: {} '.format(type(inputs), len(inputs[0])))
#print('inputs.type: {} ,inputs[0][0].size: {} '.format(type(inputs), len(inputs[0][0])))
#print('inputs.type: {} ,inputs[1].size: {} '.format(type(inputs), len(inputs[1])))
# print(inputs[0])
#print(np.array(inputs).shape)
if i % 2 == 0:
# update CNNC
lossPure, prec1 = self._forward(inputs, targets, epoch, self.modelA) # assigned samples
# # assigned's easy samples
lossIdx = np.argsort(lossPure.data.cpu()).cuda()
pureInput = [inputs[0][lossIdx[:int(remRate * lossPure.shape[0])], ...]]
pureLab = targets[lossIdx[:int(remRate * lossPure.shape[0])]].long()
# loss for cnn C
lossCNNC, precCNNC = self._forward(pureInput, pureLab, epoch, self.modelC)
lossCNNC_ema, precCNNC_ema = self._forward(pureInput, pureLab, epoch, self.modelC_ema)
lossCNNC = lossCNNC.mean()
lossCNNC_ema = lossCNNC_ema.mean()
lossCNNC_mean = (lossCNNC + lossCNNC_ema)*0.5
losses.update(lossCNNC_mean.item(), pureLab.size(0))
precisions.update(precCNNC, pureLab.size(0))
self.optimizers[2].zero_grad()
lossCNNC_mean.backward()
for param in self.modelC.parameters():
try:
param.grad.data.clamp(-1., 1.)
except:
continue
self.optimizers[2].step()
self._update_ema_variables(self.modelC, self.modelC_ema, self.alpha, epoch*len(self.train_in4_data)+i)
for param in self.modelC_ema.parameters():#梯度截断
try:
param.grad.data.clamp(-1., 1.)
except:
continue
else:
# update CNNA
try:
noiseInput = next(self.train_out3_data)
except:
noiseLoader = iter(self.train_out3_data)
noiseInput = next(noiseLoader)
noiseInput, noiseLab, noiseNames = self._parse_data(noiseInput)
lossNoise, prec1 = self._forward(noiseInput, noiseLab, epoch, self.modelC)
# sample mining
lossIdx = np.argsort(lossNoise.data.cpu()).cuda()
noiseInput, noiseLab = noiseInput[0][lossIdx], noiseLab[lossIdx]
noiseInput, noiseLab = noiseInput[:int(remRate * lossNoise.shape[0]), ...], noiseLab[:int(
remRate * lossNoise.shape[0])]
# save small samples
# noiseImg = np.asarray(noiseNames)[lossIdx][:int(remRate*lossNoise.shape[0])]
# self.saveImg(noiseImg, noiseLab) # save image
# mix update, part assigned and part unassigned
mixInput, mixLab = [torch.cat([inputs[0], noiseInput])], torch.cat([targets.long(), noiseLab])
lossMix, precCNNA = self._forward(mixInput, mixLab, epoch, self.modelA)
lossMix_ema, precCNNA_ema = self._forward(mixInput, mixLab, epoch, self.modelA_ema)
lossCNNA = lossMix.mean()
lossCNNA_ema = lossMix_ema.mean()
lossCNNA_mean = (lossCNNA + lossCNNA_ema)*0.5
# update
losses.update(lossCNNA_mean.item(), mixLab.size(0))
precisions.update(precCNNA, mixLab.size(0))
# update CNNA
self.optimizers[0].zero_grad()
lossCNNA_mean.backward()
for param in self.modelA.parameters():
try:
param.grad.data.clamp(-1., 1.)
except:
continue
# update modelA
self.optimizers[0].step()
self._update_ema_variables(self.modelA, self.modelA_ema, self.alpha, epoch*len(self.train_in4_data)+i)
for param in self.modelA_ema.parameters():
try:
param.grad.data.clamp(-1., 1.)
except:
continue
batch_time.update(time.time() - end)
end = time.time()
if (i + 1) % self.print_freq == 0:
print('[AC]Epoch: [{}][{}/{}]\t'
'Time {:.3f} ({:.3f})\t'
'Data {:.3f} ({:.3f})\t'
'Loss {:.3f} ({:.3f})\t'
'Prec {:.2%} ({:.2%})\t'
.format(epoch, i + 1, len(self.train_in4_data),
batch_time.val, batch_time.avg,
data_time.val, data_time.avg,
losses.val, losses.avg,
precisions.val, precisions.avg))
for i, inputs in enumerate(self.train_in4_data):
data_time.update(time.time() - end)
inputs, targets, names = self._parse_data(inputs) # image and pid
#print('inputs.type: {} ,inputs.size: {} '.format(type(inputs), len(inputs)))
#print('inputs.type: {} ,inputs[0].size: {} '.format(type(inputs), len(inputs[0])))
#print('inputs.type: {} ,inputs[0][0].size: {} '.format(type(inputs), len(inputs[0][0])))
#print('inputs.type: {} ,inputs[1].size: {} '.format(type(inputs), len(inputs[1])))
# print(inputs[0])
#print(np.array(inputs).shape)
if i % 2 == 0:
# update CNNB
lossPure, prec1 = self._forward(inputs, targets, epoch, self.modelA) # assigned samples
# # assigned's easy samples
lossIdx = np.argsort(lossPure.data.cpu()).cuda()
pureInput = [inputs[0][lossIdx[:int(remRate * lossPure.shape[0])], ...]]
pureLab = targets[lossIdx[:int(remRate * lossPure.shape[0])]].long()
# loss for cnn B
lossCNNB, precCNNB = self._forward(pureInput, pureLab, epoch, self.modelB)
lossCNNB_ema, precCNNB_ema = self._forward(pureInput, pureLab, epoch, self.modelB_ema)
lossCNNB = lossCNNB.mean()
lossCNNB_ema = lossCNNB_ema.mean()
lossCNNB_mean = (lossCNNB + lossCNNB_ema)*0.5
losses.update(lossCNNB_mean.item(), pureLab.size(0))
precisions.update(precCNNB, pureLab.size(0))
self.optimizers[1].zero_grad()
lossCNNB_mean.backward()
for param in self.modelB.parameters():
try:
param.grad.data.clamp(-1., 1.)
except:
continue
self.optimizers[1].step()
self._update_ema_variables(self.modelB, self.modelB_ema, self.alpha, epoch*len(self.train_in4_data)+i)
for param in self.modelB_ema.parameters():#梯度截断
try:
param.grad.data.clamp(-1., 1.)
except:
continue
else:
# update CNNA
try:
noiseInput = next(self.train_out4_data)
except:
noiseLoader = iter(self.train_out4_data)
noiseInput = next(noiseLoader)
noiseInput, noiseLab, noiseNames = self._parse_data(noiseInput)
lossNoise, prec1 = self._forward(noiseInput, noiseLab, epoch, self.modelB)
# sample mining
lossIdx = np.argsort(lossNoise.data.cpu()).cuda()
noiseInput, noiseLab = noiseInput[0][lossIdx], noiseLab[lossIdx]
noiseInput, noiseLab = noiseInput[:int(remRate * lossNoise.shape[0]), ...], noiseLab[:int(
remRate * lossNoise.shape[0])]
# save small samples
# noiseImg = np.asarray(noiseNames)[lossIdx][:int(remRate*lossNoise.shape[0])]
# self.saveImg(noiseImg, noiseLab) # save image
# mix update, part assigned and part unassigned
mixInput, mixLab = [torch.cat([inputs[0], noiseInput])], torch.cat([targets.long(), noiseLab])
lossMix, precCNNA = self._forward(mixInput, mixLab, epoch, self.modelA)
lossMix_ema, precCNNA_ema = self._forward(mixInput, mixLab, epoch, self.modelA_ema)
lossCNNA = lossMix.mean()
lossCNNA_ema = lossMix_ema.mean()
lossCNNA_mean = (lossCNNA + lossCNNA_ema)*0.5
# update
losses.update(lossCNNA_mean.item(), mixLab.size(0))
precisions.update(precCNNA, mixLab.size(0))
# update CNNA
self.optimizers[0].zero_grad()
lossCNNA_mean.backward()
for param in self.modelA.parameters():
try:
param.grad.data.clamp(-1., 1.)
except:
continue
# update modelA
self.optimizers[0].step()
self._update_ema_variables(self.modelA, self.modelA_ema, self.alpha, epoch*len(self.train_in4_data)+i)
for param in self.modelA_ema.parameters():
try:
param.grad.data.clamp(-1., 1.)
except:
continue
batch_time.update(time.time() - end)
end = time.time()
if (i + 1) % self.print_freq == 0:
print('[AB]Epoch: [{}][{}/{}]\t'
'Time {:.3f} ({:.3f})\t'
'Data {:.3f} ({:.3f})\t'
'Loss {:.3f} ({:.3f})\t'
'Prec {:.2%} ({:.2%})\t'
.format(epoch, i + 1, len(self.train_in4_data),
batch_time.val, batch_time.avg,
data_time.val, data_time.avg,
losses.val, losses.avg,
precisions.val, precisions.avg))
def _update_ema_variables(self, model, ema_model, alpha, global_step):
alpha = min(1 - 1 / (global_step + 1), alpha)
for ema_param, param in zip(ema_model.parameters(), model.parameters()):
ema_param.data.mul_(alpha).add_(1 - alpha, param.data)
def _parse_data(self, inputs):
imgs, fname, pids, _ = inputs
inputs = [Variable(imgs).cuda()]
targets = Variable(pids.cuda())
return inputs, targets, fname # image and pid
def _forward(self, inputs, targets, epoch, model):
outputs = model(*inputs) # outputs=[x1,x2,x3]
# x1 triplet loss
loss_tri, _ = self.criterions[0](outputs[0], targets, epoch) # feature
# x2 triplet loss
loss_global, prec_global = self.criterions[1](outputs[1], targets, epoch)
return loss_tri + loss_global, prec_global
class CoTrainerAsyMean_6model(object):
def saveImg(self, imgList, gtList):
import shutil
import os
rootDir = self.noiseData.dataset.root
if os.path.exists('smallLoss'):
shutil.rmtree('smallLoss')
os.makedirs('smallLoss')
for name, pid in zip(imgList, gtList):
curPath = os.path.join(rootDir, name)
nameList = name.split('_')
nameList[0] = str(pid)
tarPath = os.path.join('smallLoss', '_'.join(nameList))
shutil.copyfile(curPath, tarPath)
def __init__(self, model, coModel, co2Model, co3Model, co4Model, co5Model, model_ema, coModel_ema, co2Model_ema, co3Model_ema,
co4Model_ema, co5Model_ema, train_in5_loader, train_out5_loader, train_out4_loader, train_out3_loader, train_out2_loader, unLoader,
criterions, optimizers, alpha=0.999, print_freq=1):
self.modelA = model
self.modelB = coModel # the co-teacher
self.modelC = co2Model
self.modelD = co3Model
self.modelE = co4Model
self.modelF = co5Model
self.modelA_ema = model_ema
self.modelB_ema = coModel_ema
self.modelC_ema = co2Model_ema
self.modelD_ema = co3Model_ema
self.modelE_ema = co4Model_ema
self.modelF_ema = co5Model_ema
# self.noiseData = noiseDataSet
self.train_in5_data = train_in5_loader
self.train_out5_data = train_out5_loader
self.train_out4_data = train_out4_loader
self.train_out3_data = train_out3_loader
self.train_out2_data = train_out2_loader
self.train_out_data = unLoader
self.criterions = criterions
self.optimizers = optimizers
self.alpha = alpha
self.print_freq = print_freq
def train(self, epoch, remRate=0.2):
self.modelA.train()
self.modelB.train()
self.modelC.train()
self.modelD.train()
self.modelE.train()
self.modelF.train()
self.modelA_ema.train()
self.modelB_ema.train()
self.modelC_ema.train()
self.modelD_ema.train()
self.modelE_ema.train()
self.modelF_ema.train()
batch_time = AverageMeter()
data_time = AverageMeter()
losses = AverageMeter()
precisions = AverageMeter()
end = time.time()
for i, inputs in enumerate(self.train_in5_data):
data_time.update(time.time() - end)
inputs, targets, names = self._parse_data(inputs) # image and pid
# print('inputs.type: {} ,inputs.size: {} '.format(type(inputs), len(inputs)))
# print('inputs.type: {} ,inputs[0].size: {} '.format(type(inputs), len(inputs[0])))
# print('inputs.type: {} ,inputs[0][0].size: {} '.format(type(inputs), len(inputs[0][0])))
# print('inputs.type: {} ,inputs[1].size: {} '.format(type(inputs), len(inputs[1])))
# print(inputs[0])
# print(np.array(inputs).shape)
if i % 2 == 0:
# update CNNF
lossPure, prec1 = self._forward(inputs, targets, epoch, self.modelA) # assigned samples
# # assigned's easy samples
lossIdx = np.argsort(lossPure.data.cpu()).cuda()
pureInput = [inputs[0][lossIdx[:int(remRate * lossPure.shape[0])], ...]]
pureLab = targets[lossIdx[:int(remRate * lossPure.shape[0])]].long()
# loss for cnn F
lossCNNF, precCNNF = self._forward(pureInput, pureLab, epoch, self.modelF)
lossCNNF_ema, precCNNF_ema = self._forward(pureInput, pureLab, epoch, self.modelF_ema)
lossCNNF = lossCNNF.mean()
lossCNNF_ema = lossCNNF_ema.mean()
lossCNNF_mean = (lossCNNF + lossCNNF_ema) * 0.5
losses.update(lossCNNF_mean.item(), pureLab.size(0))
precisions.update(precCNNF, pureLab.size(0))
self.optimizers[5].zero_grad()
lossCNNF_mean.backward()
for param in self.modelE.parameters():
try:
param.grad.data.clamp(-1., 1.)
except:
continue
self.optimizers[5].step()
self._update_ema_variables(self.modelF, self.modelF_ema, self.alpha,
epoch * len(self.train_in5_data) + i)
for param in self.modelE_ema.parameters(): # 梯度截断
try:
param.grad.data.clamp(-1., 1.)
except:
continue
else:
# update CNNA
try:
noiseInput = next(self.train_out_data)
except:
noiseLoader = iter(self.train_out_data)
noiseInput = next(noiseLoader)
noiseInput, noiseLab, noiseNames = self._parse_data(noiseInput)
lossNoise, prec1 = self._forward(noiseInput, noiseLab, epoch, self.modelF)
# sample mining
lossIdx = np.argsort(lossNoise.data.cpu()).cuda()
noiseInput, noiseLab = noiseInput[0][lossIdx], noiseLab[lossIdx]
noiseInput, noiseLab = noiseInput[:int(remRate * lossNoise.shape[0]), ...], noiseLab[:int(
remRate * lossNoise.shape[0])]
# save small samples
# noiseImg = np.asarray(noiseNames)[lossIdx][:int(remRate*lossNoise.shape[0])]
# self.saveImg(noiseImg, noiseLab) # save image
# mix update, part assigned and part unassigned
mixInput, mixLab = [torch.cat([inputs[0], noiseInput])], torch.cat([targets.long(), noiseLab])
lossMix, precCNNA = self._forward(mixInput, mixLab, epoch, self.modelA)
lossMix_ema, precCNNA_ema = self._forward(mixInput, mixLab, epoch, self.modelA_ema)
lossCNNA = lossMix.mean()
lossCNNA_ema = lossMix_ema.mean()
lossCNNA_mean = (lossCNNA + lossCNNA_ema) * 0.5
# update
losses.update(lossCNNA_mean.item(), mixLab.size(0))
precisions.update(precCNNA, mixLab.size(0))
# update CNNA
self.optimizers[0].zero_grad()
lossCNNA_mean.backward()
for param in self.modelA.parameters():
try:
param.grad.data.clamp(-1., 1.)
except:
continue
# update modelA
self.optimizers[0].step()
self._update_ema_variables(self.modelA, self.modelA_ema, self.alpha,
epoch * len(self.train_in5_data) + i)
for param in self.modelA_ema.parameters():
try:
param.grad.data.clamp(-1., 1.)
except:
continue
batch_time.update(time.time() - end)
end = time.time()
if (i + 1) % self.print_freq == 0:
print('[AF]Epoch: [{}][{}/{}]\t'
'Time {:.3f} ({:.3f})\t'
'Data {:.3f} ({:.3f})\t'
'Loss {:.3f} ({:.3f})\t'
'Prec {:.2%} ({:.2%})\t'
.format(epoch, i + 1, len(self.train_in5_data),
batch_time.val, batch_time.avg,
data_time.val, data_time.avg,
losses.val, losses.avg,
precisions.val, precisions.avg))
#================================================================================================================
for i, inputs in enumerate(self.train_in5_data):
data_time.update(time.time() - end)
inputs, targets, names = self._parse_data(inputs) # image and pid
# print('inputs.type: {} ,inputs.size: {} '.format(type(inputs), len(inputs)))
# print('inputs.type: {} ,inputs[0].size: {} '.format(type(inputs), len(inputs[0])))
# print('inputs.type: {} ,inputs[0][0].size: {} '.format(type(inputs), len(inputs[0][0])))
# print('inputs.type: {} ,inputs[1].size: {} '.format(type(inputs), len(inputs[1])))
# print(inputs[0])
# print(np.array(inputs).shape)
if i % 2 == 0:
# update CNNE
lossPure, prec1 = self._forward(inputs, targets, epoch, self.modelA) # assigned samples
# # assigned's easy samples
lossIdx = np.argsort(lossPure.data.cpu()).cuda()
pureInput = [inputs[0][lossIdx[:int(remRate * lossPure.shape[0])], ...]]
pureLab = targets[lossIdx[:int(remRate * lossPure.shape[0])]].long()
# loss for cnn D
lossCNNE, precCNNE = self._forward(pureInput, pureLab, epoch, self.modelE)
lossCNNE_ema, precCNNE_ema = self._forward(pureInput, pureLab, epoch, self.modelE_ema)
lossCNNE = lossCNNE.mean()
lossCNNE_ema = lossCNNE_ema.mean()
lossCNNE_mean = (lossCNNE + lossCNNE_ema) * 0.5
losses.update(lossCNNE_mean.item(), pureLab.size(0))
precisions.update(precCNNE, pureLab.size(0))
self.optimizers[4].zero_grad()
lossCNNE_mean.backward()
for param in self.modelE.parameters():
try:
param.grad.data.clamp(-1., 1.)
except:
continue
self.optimizers[4].step()
self._update_ema_variables(self.modelE, self.modelE_ema, self.alpha,
epoch * len(self.train_in5_data) + i)
for param in self.modelE_ema.parameters(): # 梯度截断
try:
param.grad.data.clamp(-1., 1.)
except:
continue
else:
# update CNNA
try:
noiseInput = next(self.train_out_data)
except:
noiseLoader = iter(self.train_out_data)
noiseInput = next(noiseLoader)
noiseInput, noiseLab, noiseNames = self._parse_data(noiseInput)
lossNoise, prec1 = self._forward(noiseInput, noiseLab, epoch, self.modelE)
# sample mining
lossIdx = np.argsort(lossNoise.data.cpu()).cuda()
noiseInput, noiseLab = noiseInput[0][lossIdx], noiseLab[lossIdx]
noiseInput, noiseLab = noiseInput[:int(remRate * lossNoise.shape[0]), ...], noiseLab[:int(
remRate * lossNoise.shape[0])]
# save small samples
# noiseImg = np.asarray(noiseNames)[lossIdx][:int(remRate*lossNoise.shape[0])]
# self.saveImg(noiseImg, noiseLab) # save image
# mix update, part assigned and part unassigned
mixInput, mixLab = [torch.cat([inputs[0], noiseInput])], torch.cat([targets.long(), noiseLab])
lossMix, precCNNA = self._forward(mixInput, mixLab, epoch, self.modelA)
lossMix_ema, precCNNA_ema = self._forward(mixInput, mixLab, epoch, self.modelA_ema)
lossCNNA = lossMix.mean()
lossCNNA_ema = lossMix_ema.mean()
lossCNNA_mean = (lossCNNA + lossCNNA_ema) * 0.5
# update
losses.update(lossCNNA_mean.item(), mixLab.size(0))
precisions.update(precCNNA, mixLab.size(0))
# update CNNA
self.optimizers[0].zero_grad()
lossCNNA_mean.backward()
for param in self.modelA.parameters():
try:
param.grad.data.clamp(-1., 1.)
except:
continue
# update modelA
self.optimizers[0].step()
self._update_ema_variables(self.modelA, self.modelA_ema, self.alpha,
epoch * len(self.train_in5_data) + i)
for param in self.modelA_ema.parameters():
try:
param.grad.data.clamp(-1., 1.)
except:
continue
batch_time.update(time.time() - end)
end = time.time()
if (i + 1) % self.print_freq == 0:
print('[AE]Epoch: [{}][{}/{}]\t'
'Time {:.3f} ({:.3f})\t'
'Data {:.3f} ({:.3f})\t'
'Loss {:.3f} ({:.3f})\t'
'Prec {:.2%} ({:.2%})\t'
.format(epoch, i + 1, len(self.train_in5_data),
batch_time.val, batch_time.avg,
data_time.val, data_time.avg,
losses.val, losses.avg,
precisions.val, precisions.avg))
for i, inputs in enumerate(self.train_in5_data):
data_time.update(time.time() - end)
inputs, targets, names = self._parse_data(inputs) # image and pid
# print('inputs.type: {} ,inputs.size: {} '.format(type(inputs), len(inputs)))
# print('inputs.type: {} ,inputs[0].size: {} '.format(type(inputs), len(inputs[0])))
# print('inputs.type: {} ,inputs[0][0].size: {} '.format(type(inputs), len(inputs[0][0])))
# print('inputs.type: {} ,inputs[1].size: {} '.format(type(inputs), len(inputs[1])))
# print(inputs[0])
# print(np.array(inputs).shape)
if i % 2 == 0:
# update CNND
lossPure, prec1 = self._forward(inputs, targets, epoch, self.modelA) # assigned samples
# # assigned's easy samples
lossIdx = np.argsort(lossPure.data.cpu()).cuda()
pureInput = [inputs[0][lossIdx[:int(remRate * lossPure.shape[0])], ...]]
pureLab = targets[lossIdx[:int(remRate * lossPure.shape[0])]].long()
# loss for cnn D
lossCNND, precCNND = self._forward(pureInput, pureLab, epoch, self.modelD)
lossCNND_ema, precCNND_ema = self._forward(pureInput, pureLab, epoch, self.modelD_ema)
lossCNND = lossCNND.mean()
lossCNND_ema = lossCNND_ema.mean()
lossCNND_mean = (lossCNND + lossCNND_ema) * 0.5
losses.update(lossCNND_mean.item(), pureLab.size(0))
precisions.update(precCNND, pureLab.size(0))
self.optimizers[3].zero_grad()
lossCNND_mean.backward()
for param in self.modelD.parameters():
try:
param.grad.data.clamp(-1., 1.)
except:
continue
self.optimizers[3].step()
self._update_ema_variables(self.modelD, self.modelD_ema, self.alpha,
epoch * len(self.train_in5_data) + i)
for param in self.modelD_ema.parameters(): # 梯度截断
try:
param.grad.data.clamp(-1., 1.)
except:
continue
else:
# update CNNA
try:
noiseInput = next(self.train_out2_data)
except:
noiseLoader = iter(self.train_out2_data)
noiseInput = next(noiseLoader)
noiseInput, noiseLab, noiseNames = self._parse_data(noiseInput)
lossNoise, prec1 = self._forward(noiseInput, noiseLab, epoch, self.modelD)
# sample mining
lossIdx = np.argsort(lossNoise.data.cpu()).cuda()
noiseInput, noiseLab = noiseInput[0][lossIdx], noiseLab[lossIdx]
noiseInput, noiseLab = noiseInput[:int(remRate * lossNoise.shape[0]), ...], noiseLab[:int(
remRate * lossNoise.shape[0])]
# save small samples
# noiseImg = np.asarray(noiseNames)[lossIdx][:int(remRate*lossNoise.shape[0])]
# self.saveImg(noiseImg, noiseLab) # save image
# mix update, part assigned and part unassigned
mixInput, mixLab = [torch.cat([inputs[0], noiseInput])], torch.cat([targets.long(), noiseLab])
lossMix, precCNNA = self._forward(mixInput, mixLab, epoch, self.modelA)
lossMix_ema, precCNNA_ema = self._forward(mixInput, mixLab, epoch, self.modelA_ema)
lossCNNA = lossMix.mean()
lossCNNA_ema = lossMix_ema.mean()
lossCNNA_mean = (lossCNNA + lossCNNA_ema) * 0.5
# update
losses.update(lossCNNA_mean.item(), mixLab.size(0))
precisions.update(precCNNA, mixLab.size(0))
# update CNNA
self.optimizers[0].zero_grad()
lossCNNA_mean.backward()
for param in self.modelA.parameters():
try:
param.grad.data.clamp(-1., 1.)
except:
continue
# update modelA
self.optimizers[0].step()
self._update_ema_variables(self.modelA, self.modelA_ema, self.alpha,
epoch * len(self.train_in5_data) + i)
for param in self.modelA_ema.parameters():
try:
param.grad.data.clamp(-1., 1.)
except:
continue
batch_time.update(time.time() - end)
end = time.time()
if (i + 1) % self.print_freq == 0:
print('[AD]Epoch: [{}][{}/{}]\t'
'Time {:.3f} ({:.3f})\t'
'Data {:.3f} ({:.3f})\t'
'Loss {:.3f} ({:.3f})\t'
'Prec {:.2%} ({:.2%})\t'
.format(epoch, i + 1, len(self.train_in5_data),
batch_time.val, batch_time.avg,
data_time.val, data_time.avg,
losses.val, losses.avg,
precisions.val, precisions.avg))
for i, inputs in enumerate(self.train_in5_data):
data_time.update(time.time() - end)
inputs, targets, names = self._parse_data(inputs) # image and pid
# print('inputs.type: {} ,inputs.size: {} '.format(type(inputs), len(inputs)))
# print('inputs.type: {} ,inputs[0].size: {} '.format(type(inputs), len(inputs[0])))
# print('inputs.type: {} ,inputs[0][0].size: {} '.format(type(inputs), len(inputs[0][0])))
# print('inputs.type: {} ,inputs[1].size: {} '.format(type(inputs), len(inputs[1])))
# print(inputs[0])
# print(np.array(inputs).shape)
if i % 2 == 0:
# update CNNC
lossPure, prec1 = self._forward(inputs, targets, epoch, self.modelA) # assigned samples
# # assigned's easy samples
lossIdx = np.argsort(lossPure.data.cpu()).cuda()
pureInput = [inputs[0][lossIdx[:int(remRate * lossPure.shape[0])], ...]]
pureLab = targets[lossIdx[:int(remRate * lossPure.shape[0])]].long()
# loss for cnn C
lossCNNC, precCNNC = self._forward(pureInput, pureLab, epoch, self.modelC)
lossCNNC_ema, precCNNC_ema = self._forward(pureInput, pureLab, epoch, self.modelC_ema)
lossCNNC = lossCNNC.mean()
lossCNNC_ema = lossCNNC_ema.mean()
lossCNNC_mean = (lossCNNC + lossCNNC_ema) * 0.5
losses.update(lossCNNC_mean.item(), pureLab.size(0))
precisions.update(precCNNC, pureLab.size(0))
self.optimizers[2].zero_grad()
lossCNNC_mean.backward()
for param in self.modelC.parameters():
try:
param.grad.data.clamp(-1., 1.)
except:
continue
self.optimizers[2].step()
self._update_ema_variables(self.modelC, self.modelC_ema, self.alpha,
epoch * len(self.train_in5_data) + i)
for param in self.modelC_ema.parameters(): # 梯度截断
try:
param.grad.data.clamp(-1., 1.)
except:
continue
else:
# update CNNA
try:
noiseInput = next(self.train_out3_data)
except:
noiseLoader = iter(self.train_out3_data)
noiseInput = next(noiseLoader)
noiseInput, noiseLab, noiseNames = self._parse_data(noiseInput)
lossNoise, prec1 = self._forward(noiseInput, noiseLab, epoch, self.modelC)
# sample mining
lossIdx = np.argsort(lossNoise.data.cpu()).cuda()
noiseInput, noiseLab = noiseInput[0][lossIdx], noiseLab[lossIdx]
noiseInput, noiseLab = noiseInput[:int(remRate * lossNoise.shape[0]), ...], noiseLab[:int(
remRate * lossNoise.shape[0])]
# save small samples
# noiseImg = np.asarray(noiseNames)[lossIdx][:int(remRate*lossNoise.shape[0])]
# self.saveImg(noiseImg, noiseLab) # save image
# mix update, part assigned and part unassigned
mixInput, mixLab = [torch.cat([inputs[0], noiseInput])], torch.cat([targets.long(), noiseLab])
lossMix, precCNNA = self._forward(mixInput, mixLab, epoch, self.modelA)
lossMix_ema, precCNNA_ema = self._forward(mixInput, mixLab, epoch, self.modelA_ema)
lossCNNA = lossMix.mean()
lossCNNA_ema = lossMix_ema.mean()
lossCNNA_mean = (lossCNNA + lossCNNA_ema) * 0.5
# update
losses.update(lossCNNA_mean.item(), mixLab.size(0))
precisions.update(precCNNA, mixLab.size(0))
# update CNNA
self.optimizers[0].zero_grad()
lossCNNA_mean.backward()
for param in self.modelA.parameters():
try:
param.grad.data.clamp(-1., 1.)
except:
continue
# update modelA
self.optimizers[0].step()
self._update_ema_variables(self.modelA, self.modelA_ema, self.alpha,
epoch * len(self.train_in5_data) + i)
for param in self.modelA_ema.parameters():
try:
param.grad.data.clamp(-1., 1.)
except:
continue
batch_time.update(time.time() - end)
end = time.time()
if (i + 1) % self.print_freq == 0:
print('[AC]Epoch: [{}][{}/{}]\t'
'Time {:.3f} ({:.3f})\t'
'Data {:.3f} ({:.3f})\t'
'Loss {:.3f} ({:.3f})\t'
'Prec {:.2%} ({:.2%})\t'
.format(epoch, i + 1, len(self.train_in5_data),
batch_time.val, batch_time.avg,
data_time.val, data_time.avg,
losses.val, losses.avg,
precisions.val, precisions.avg))
for i, inputs in enumerate(self.train_in5_data):
data_time.update(time.time() - end)
inputs, targets, names = self._parse_data(inputs) # image and pid
# print('inputs.type: {} ,inputs.size: {} '.format(type(inputs), len(inputs)))
# print('inputs.type: {} ,inputs[0].size: {} '.format(type(inputs), len(inputs[0])))
# print('inputs.type: {} ,inputs[0][0].size: {} '.format(type(inputs), len(inputs[0][0])))
# print('inputs.type: {} ,inputs[1].size: {} '.format(type(inputs), len(inputs[1])))
# print(inputs[0])
# print(np.array(inputs).shape)
if i % 2 == 0:
# update CNNB
lossPure, prec1 = self._forward(inputs, targets, epoch, self.modelA) # assigned samples
# # assigned's easy samples
lossIdx = np.argsort(lossPure.data.cpu()).cuda()
pureInput = [inputs[0][lossIdx[:int(remRate * lossPure.shape[0])], ...]]
pureLab = targets[lossIdx[:int(remRate * lossPure.shape[0])]].long()
# loss for cnn B
lossCNNB, precCNNB = self._forward(pureInput, pureLab, epoch, self.modelB)
lossCNNB_ema, precCNNB_ema = self._forward(pureInput, pureLab, epoch, self.modelB_ema)
lossCNNB = lossCNNB.mean()
lossCNNB_ema = lossCNNB_ema.mean()
lossCNNB_mean = (lossCNNB + lossCNNB_ema) * 0.5
losses.update(lossCNNB_mean.item(), pureLab.size(0))
precisions.update(precCNNB, pureLab.size(0))
self.optimizers[1].zero_grad()
lossCNNB_mean.backward()
for param in self.modelB.parameters():
try:
param.grad.data.clamp(-1., 1.)
except:
continue
self.optimizers[1].step()
self._update_ema_variables(self.modelB, self.modelB_ema, self.alpha,
epoch * len(self.train_in5_data) + i)
for param in self.modelB_ema.parameters(): # 梯度截断
try:
param.grad.data.clamp(-1., 1.)
except:
continue
else:
# update CNNA
try:
noiseInput = next(self.train_out4_data)
except:
noiseLoader = iter(self.train_out4_data)
noiseInput = next(noiseLoader)
noiseInput, noiseLab, noiseNames = self._parse_data(noiseInput)
lossNoise, prec1 = self._forward(noiseInput, noiseLab, epoch, self.modelB)
# sample mining
lossIdx = np.argsort(lossNoise.data.cpu()).cuda()
noiseInput, noiseLab = noiseInput[0][lossIdx], noiseLab[lossIdx]
noiseInput, noiseLab = noiseInput[:int(remRate * lossNoise.shape[0]), ...], noiseLab[:int(
remRate * lossNoise.shape[0])]
# save small samples
# noiseImg = np.asarray(noiseNames)[lossIdx][:int(remRate*lossNoise.shape[0])]
# self.saveImg(noiseImg, noiseLab) # save image
# mix update, part assigned and part unassigned
mixInput, mixLab = [torch.cat([inputs[0], noiseInput])], torch.cat([targets.long(), noiseLab])
lossMix, precCNNA = self._forward(mixInput, mixLab, epoch, self.modelA)
lossMix_ema, precCNNA_ema = self._forward(mixInput, mixLab, epoch, self.modelA_ema)
lossCNNA = lossMix.mean()
lossCNNA_ema = lossMix_ema.mean()
lossCNNA_mean = (lossCNNA + lossCNNA_ema) * 0.5
# update
losses.update(lossCNNA_mean.item(), mixLab.size(0))
precisions.update(precCNNA, mixLab.size(0))
# update CNNA
self.optimizers[0].zero_grad()
lossCNNA_mean.backward()
for param in self.modelA.parameters():
try:
param.grad.data.clamp(-1., 1.)
except:
continue
# update modelA
self.optimizers[0].step()
self._update_ema_variables(self.modelA, self.modelA_ema, self.alpha,
epoch * len(self.train_in5_data) + i)
for param in self.modelA_ema.parameters():
try:
param.grad.data.clamp(-1., 1.)
except:
continue
batch_time.update(time.time() - end)
end = time.time()
if (i + 1) % self.print_freq == 0:
print('[AB]Epoch: [{}][{}/{}]\t'
'Time {:.3f} ({:.3f})\t'
'Data {:.3f} ({:.3f})\t'
'Loss {:.3f} ({:.3f})\t'
'Prec {:.2%} ({:.2%})\t'
.format(epoch, i + 1, len(self.train_in5_data),
batch_time.val, batch_time.avg,
data_time.val, data_time.avg,
losses.val, losses.avg,
precisions.val, precisions.avg))
def _update_ema_variables(self, model, ema_model, alpha, global_step):
alpha = min(1 - 1 / (global_step + 1), alpha)
for ema_param, param in zip(ema_model.parameters(), model.parameters()):
ema_param.data.mul_(alpha).add_(1 - alpha, param.data)
def _parse_data(self, inputs):
imgs, fname, pids, _ = inputs
inputs = [Variable(imgs).cuda()]
targets = Variable(pids.cuda())
return inputs, targets, fname # image and pid
def _forward(self, inputs, targets, epoch, model):
outputs = model(*inputs) # outputs=[x1,x2,x3]
# x1 triplet loss
loss_tri, _ = self.criterions[0](outputs[0], targets, epoch) # feature
# x2 triplet loss
loss_global, prec_global = self.criterions[1](outputs[1], targets, epoch)
return loss_tri + loss_global, prec_global
class CoTrainerAsyMean2(object):#α-liner
def saveImg(self, imgList, gtList):
import shutil
import os
rootDir = self.noiseData.dataset.root
if os.path.exists('smallLoss'):
shutil.rmtree('smallLoss')
os.makedirs('smallLoss')
for name, pid in zip(imgList, gtList):
curPath = os.path.join(rootDir, name)
nameList = name.split('_')
nameList[0] = str(pid)
tarPath = os.path.join('smallLoss', '_'.join(nameList))
shutil.copyfile(curPath, tarPath)
def __init__(self, model, coModel, model_ema, coModel_ema, newDataSet, noiseDataSet, criterions, optimizers, alpha=0.999, print_freq=1):
self.modelA = model
self.modelB = coModel # the co-teacher
self.modelA_ema = model_ema
self.modelB_ema = coModel_ema
self.noiseData = noiseDataSet
self.newDataSet = newDataSet
self.criterions = criterions
self.optimizers = optimizers
self.alpha = alpha
self.print_freq = print_freq
def train(self, epoch, remRate=0.2):
self.modelA.train()
self.modelB.train()
self.modelA_ema.train()
self.modelB_ema.train()
batch_time = AverageMeter()
data_time = AverageMeter()
losses = AverageMeter()
precisions = AverageMeter()
#precisions = [AverageMeter(),AverageMeter()]
end = time.time()
for i, inputs in enumerate(self.newDataSet):
data_time.update(time.time() - end)
inputs, targets, names = self._parse_data(inputs) # image and pid
self.alpha = self.alpha -(self.alpha-0.5)* (i+1)/len(self.newDataSet)
if i % 2 == 0:
# update CNNB
lossPure, prec1 = self._forward(inputs, targets, epoch, self.modelA) # assigned samples
#lossPure_ema, prec1_ema = self._forward(inputs, targets, epoch, self.modelA_ema)
# # assigned's easy samples
lossIdx = np.argsort(lossPure.data.cpu()).cuda()
#lossIdx_ema = np.argsort(lossPure_ema.data.cpu()).cuda()
pureInput = [inputs[0][lossIdx[:int(remRate * lossPure.shape[0])], ...]]
#pureInput_ema = [inputs[0][lossIdx_ema[:int(remRate * lossPure_ema.shape[0])], ...]]
pureLab = targets[lossIdx[:int(remRate * lossPure.shape[0])]].long()
#pureLab_ema = targets[lossIdx_ema[:int(remRate * lossPure_ema.shape[0])]].long()
# loss for cnn B
lossCNNB, precCNNB = self._forward(pureInput, pureLab, epoch, self.modelB)
lossCNNB_ema, precCNNB_ema = self._forward(pureInput, pureLab, epoch, self.modelB_ema)
lossCNNB = lossCNNB.mean()
lossCNNB_ema = lossCNNB_ema.mean()
lossCNNB_mean = (lossCNNB + lossCNNB_ema)*0.5
losses.update(lossCNNB_mean.item(), pureLab.size(0))
precisions.update(precCNNB, pureLab.size(0))
self.optimizers[1].zero_grad()
lossCNNB_mean.backward()
for param in self.modelB.parameters():#梯度截断
try:
param.grad.data.clamp(-1., 1.)
except:
continue
self.optimizers[1].step()
self._update_ema_variables(self.modelB, self.modelB_ema, self.alpha, epoch*len(self.newDataSet)+i)
for param in self.modelB_ema.parameters():#梯度截断
try:
param.grad.data.clamp(-1., 1.)
except:
continue
else:
# update CNNA
try:
noiseInput = next(self.noiseData)
except:
noiseLoader = iter(self.noiseData)
noiseInput = next(noiseLoader)
noiseInput, noiseLab, noiseNames = self._parse_data(noiseInput)
lossNoise, prec1 = self._forward(noiseInput, noiseLab, epoch, self.modelB)
#lossNoise_ema, prec1_ema = self._forward(noiseInput, noiseLab, epoch, self.modelB_ema)
# sample mining
lossIdx = np.argsort(lossNoise.data.cpu()).cuda()
noiseInput, noiseLab = noiseInput[0][lossIdx], noiseLab[lossIdx]
noiseInput, noiseLab = noiseInput[:int(remRate * lossNoise.shape[0]), ...], noiseLab[:int(
remRate * lossNoise.shape[0])]
# save small samples
# noiseImg = np.asarray(noiseNames)[lossIdx][:int(remRate*lossNoise.shape[0])]
# self.saveImg(noiseImg, noiseLab) # save image
# mix update, part assigned and part unassigned
mixInput, mixLab = [torch.cat([inputs[0], noiseInput])], torch.cat([targets.long(), noiseLab])
lossMix, precCNNA = self._forward(mixInput, mixLab, epoch, self.modelA)
lossMix_ema, precCNNA_ema = self._forward(mixInput, mixLab, epoch, self.modelA_ema)
lossCNNA = lossMix.mean()
lossCNNA_ema = lossMix_ema.mean()
lossCNNA_mean = (lossCNNA + lossCNNA_ema)*0.5
# update
losses.update(lossCNNA_mean.item(), mixLab.size(0))
precisions.update(precCNNA, mixLab.size(0))
# update CNNA
self.optimizers[0].zero_grad()
lossCNNA_mean.backward()
for param in self.modelA.parameters():
try:
param.grad.data.clamp(-1., 1.)
except:
continue
# update modelA
self.optimizers[0].step()
self._update_ema_variables(self.modelA, self.modelA_ema, self.alpha, epoch*len(self.newDataSet)+i)
for param in self.modelA_ema.parameters():
try:
param.grad.data.clamp(-1., 1.)
except:
continue
batch_time.update(time.time() - end)
end = time.time()
if (i + 1) % self.print_freq == 0:
print('Epoch: [{}][{}/{}]\t'
'Time {:.3f} ({:.3f})\t'
'Data {:.3f} ({:.3f})\t'
'Loss {:.3f} ({:.3f})\t'
'Prec {:.2%} ({:.2%})\t'
.format(epoch, i + 1, len(self.newDataSet),
batch_time.val, batch_time.avg,
data_time.val, data_time.avg,
losses.val, losses.avg,
precisions.val, precisions.avg))
def _update_ema_variables(self, model, ema_model, alpha, global_step):
alpha = min(1 - 1 / (global_step + 1), alpha)
for ema_param, param in zip(ema_model.parameters(), model.parameters()):
ema_param.data.mul_(alpha).add_(1 - alpha, param.data)
def _parse_data(self, inputs):
imgs, fname, pids, _ = inputs
inputs = [Variable(imgs).cuda()]
targets = Variable(pids.cuda())
return inputs, targets, fname # image and pid
def _forward(self, inputs, targets, epoch, model):
outputs = model(*inputs) # outputs=[x1,x2,x3]
# x1 triplet loss
loss_tri, _ = self.criterions[0](outputs[0], targets, epoch) # feature
# x2 triplet loss
loss_global, prec_global = self.criterions[1](outputs[1], targets, epoch)
return loss_tri + loss_global, prec_global
class CoTrainerAsySep(object):
def __init__(self, model, coModel, newDataSet, noiseDataSet, criterions, optimizers, print_freq=1):
self.modelA = model
self.modelB = coModel # the co-teacher
self.noiseData = noiseDataSet
self.newDataSet = newDataSet
self.criterions = criterions
self.optimizers = optimizers
self.print_freq = print_freq
def train(self, epoch, remRate=0.2):
self.modelA.train()
self.modelB.train()
batch_time = AverageMeter()
data_time = AverageMeter()
losses = AverageMeter()
precisions = AverageMeter()
end = time.time()
for i, inputs in enumerate(self.newDataSet):
data_time.update(time.time() - end)
inputs, targets = self._parse_data(inputs) # image and pid
if i % 2 == 0:
# update CNNB
lossPure, prec1 = self._forward(inputs, targets, epoch, self.modelA) # assigned samples
# # assigned's easy samples
lossIdx = np.argsort(lossPure.data.cpu()).cuda()
pureInput = [inputs[0][lossIdx[:int(remRate * lossPure.shape[0])], ...]]
pureLab = targets[lossIdx[:int(remRate * lossPure.shape[0])]]
# loss for cnn B
lossCNNB, precCNNB = self._forward(pureInput, pureLab, epoch, self.modelB)
lossCNNB = lossCNNB.mean()
losses.update(lossCNNB.item(), pureLab.size(0))
precisions.update(precCNNB, pureLab.size(0))
self.optimizers[1].zero_grad()
lossCNNB.backward()
for param in self.modelB.parameters():
try:
param.grad.data.clamp(-1., 1.)
except:
continue
self.optimizers[1].step()
else:
# update CNNA
try:
noiseInput = next(self.noiseData)
except:
noiseLoader = iter(self.noiseData)
noiseInput = next(noiseLoader)
noiseInput, noiseLab = self._parse_data(noiseInput)
lossNoise, prec1 = self._forward(noiseInput, noiseLab, epoch, self.modelB)
# sample mining
lossIdx = np.argsort(lossNoise.data.cpu()).cuda()
noiseInput, noiseLab = noiseInput[0][lossIdx], noiseLab[lossIdx]
noiseInput, noiseLab = noiseInput[:int(remRate * lossNoise.shape[0]), ...], noiseLab[:int(
remRate * lossNoise.shape[0])]
# mix update, part assigned and part unassigned
# mixInput, mixLab = [torch.cat([inputs[0],noiseInput])], torch.cat([targets,noiseLab])
lossCNNAnoise, precCNNAnoise = self._forward([noiseInput], noiseLab, epoch, self.modelA)
lossCNNApure, precCNNApure = self._forward(inputs, targets, epoch, self.modelA)
lossCNNA = 0.1 * lossCNNAnoise.mean() + lossCNNApure.mean()
# update
losses.update(lossCNNA.item(), targets.size(0))
precisions.update(precCNNApure, targets.size(0))
# update CNNA
self.optimizers[0].zero_grad()
lossCNNA.backward()
for param in self.modelA.parameters():
try:
param.grad.data.clamp(-1., 1.)
except:
continue
# update modelA
self.optimizers[0].step()
batch_time.update(time.time() - end)
end = time.time()
if (i + 1) % self.print_freq == 0:
print('Epoch: [{}][{}/{}]\t'
'Time {:.3f} ({:.3f})\t'
'Data {:.3f} ({:.3f})\t'
'Loss {:.3f} ({:.3f})\t'
'Prec {:.2%} ({:.2%})\t'
.format(epoch, i + 1, len(self.newDataSet),
batch_time.val, batch_time.avg,
data_time.val, data_time.avg,
losses.val, losses.avg,
precisions.val, precisions.avg))
def _parse_data(self, inputs):
imgs, fname, pids, _ = inputs
inputs = [Variable(imgs).cuda()]
targets = Variable(pids.cuda())
return inputs, targets # image and pid
def _forward(self, inputs, targets, epoch, model):
outputs = model(*inputs) # outputs=[x1,x2,x3]
# x1 triplet loss
loss_tri, _ = self.criterions[0](outputs[0], targets, epoch) # feature
# x2 triplet loss
loss_global, prec_global = self.criterions[1](outputs[1], targets, epoch)
return loss_tri + loss_global, prec_global
class EvoTrainer(object):
def __init__(self, model, newDataSet, noiseDataSet, criterions, optimizer, print_freq=1):
self.model = model
self.noiseData = noiseDataSet
self.newDataSet = newDataSet
self.criterions = criterions
self.optimizer = optimizer
self.print_freq = print_freq
def train(self, epoch, remRate=0.2):
self.model.train()
batch_time = AverageMeter()
data_time = AverageMeter()
losses = AverageMeter()
precisions = AverageMeter()
end = time.time()
for i, inputs in enumerate(self.newDataSet):
data_time.update(time.time() - end)
inputs, targets = self._parse_data(inputs) # image and pid
# update CNNA
lossPure, prec1 = self._forward(inputs, targets, epoch, self.model) # assigned samples
pureIdx = np.argsort(lossPure.data.cpu()).cuda()
pureInput, targets = inputs[0][pureIdx], targets[pureIdx]
pureInput, targets = pureInput[:int(remRate * lossPure.shape[0]), ...], targets[
:int(remRate * lossPure.shape[0])]
# update CNNA
try:
noiseInput = next(noiseLoader)
except:
noiseLoader = iter(self.noiseData)
noiseInput = next(noiseLoader)
noiseInput, noiseLab = self._parse_data(noiseInput)
lossNoise, prec1 = self._forward(noiseInput, noiseLab, epoch, self.model)
# sample mining
lossIdx = np.argsort(lossNoise.data.cpu()).cuda()
noiseInput, noiseLab = noiseInput[0][lossIdx], noiseLab[lossIdx]
noiseInput, noiseLab = noiseInput[:int(remRate * lossNoise.shape[0]), ...], noiseLab[:int(
remRate * lossNoise.shape[0])]
# mix update, part assigned and part unassigned
mixInput, mixLab = [torch.cat([pureInput, noiseInput])], torch.cat([targets, noiseLab])
lossMix, precCNNA = self._forward(mixInput, mixLab, epoch, self.model)
lossCNNA = lossMix.mean()
# update
losses.update(lossCNNA.item(), mixLab.size(0))
precisions.update(precCNNA, mixLab.size(0))
# update CNNA
self.optimizer.zero_grad()
lossCNNA.backward()
for param in self.model.parameters():
try:
param.grad.data.clamp(-1., 1.)
except:
continue
# update modelA
self.optimizer.step()
batch_time.update(time.time() - end)
end = time.time()
if (i + 1) % self.print_freq == 0:
print('Epoch: [{}][{}/{}]\t'
'Time {:.3f} ({:.3f})\t'
'Data {:.3f} ({:.3f})\t'
'Loss {:.3f} ({:.3f})\t'
'Prec {:.2%} ({:.2%})\t'
.format(epoch, i + 1, len(self.newDataSet),
batch_time.val, batch_time.avg,
data_time.val, data_time.avg,
losses.val, losses.avg,
precisions.val, precisions.avg))
def _parse_data(self, inputs):
imgs, fname, pids, _ = inputs
inputs = [Variable(imgs).cuda()]
targets = Variable(pids.cuda())
return inputs, targets # image and pid
def _forward(self, inputs, targets, epoch, model):
outputs = model(*inputs) # outputs=[x1,x2,x3]
# x1 triplet loss
loss_tri, _ = self.criterions[0](outputs[0], targets, epoch) # feature
# x2 triplet loss
loss_global, prec_global = self.criterions[1](outputs[1], targets, epoch)
return loss_tri + loss_global, prec_global
```
|
{
"source": "Jeremyyang920/datadog-agent",
"score": 2
}
|
#### File: datadog-agent/tasks/cluster_agent_cloudfoundry.py
```python
import os
from invoke import task
from .build_tags import get_default_build_tags
from .cluster_agent_helpers import build_common, clean_common, refresh_assets_common, version_common
# constants
BIN_PATH = os.path.join(".", "bin", "datadog-cluster-agent-cloudfoundry")
@task
def build(ctx, rebuild=False, build_include=None, build_exclude=None, race=False, development=True, skip_assets=False):
"""
Build Cluster Agent for Cloud Foundry
Example invokation:
inv cluster-agent-cloudfoundry.build
"""
build_common(
ctx,
BIN_PATH,
get_default_build_tags(build="cluster-agent-cloudfoundry"),
"-cloudfoundry",
rebuild,
build_include,
build_exclude,
race,
development,
skip_assets,
)
@task
def refresh_assets(ctx, development=True):
"""
Clean up and refresh cluster agent's assets and config files
"""
refresh_assets_common(ctx, BIN_PATH, [], development)
@task
def integration_tests(ctx, install_deps=False, race=False, remote_docker=False): # noqa: U100
"""
Run integration tests for cluster-agent-cloudfoundry
"""
pass # TODO
@task
def clean(ctx):
"""
Remove temporary objects and binary artifacts
"""
clean_common(ctx, "datadog-cluster-agent")
@task
def version(ctx, url_safe=False, git_sha_length=7):
"""
Get the agent version.
url_safe: get the version that is able to be addressed as a url
git_sha_length: different versions of git have a different short sha length,
use this to explicitly set the version
(the windows builder and the default ubuntu version have such an incompatibility)
"""
version_common(ctx, url_safe, git_sha_length)
```
#### File: libs/common/github.py
```python
import errno
import json
import os
import re
from .githubapp import GithubApp, GithubAppException
errno_regex = re.compile(r".*\[Errno (\d+)\] (.*)")
__all__ = ["Github", "GithubException"]
class GithubException(Exception):
pass
class Github(object):
BASE_URL = "https://api.github.com"
def __init__(self, api_token=None):
self.api_token = api_token if api_token else self._api_token()
def repo(self, repo_name):
"""
Gets the repo info.
"""
path = "/repos/{}".format(repo_name)
return self.make_request(path, method="GET", output_format="json")
def trigger_workflow(self, repo_name, workflow_name, ref, inputs=None):
"""
Create a pipeline targeting a given reference of a project.
ref must be a branch or a tag.
"""
if inputs is None:
inputs = dict()
path = "/repos/{}/actions/workflows/{}/dispatches".format(repo_name, workflow_name)
data = json.dumps({"ref": ref, "inputs": inputs})
return self.make_request(path, method="POST", data=data)
def workflow_run(self, repo_name, run_id):
"""
Gets info on a specific workflow.
"""
path = "/repos/{}/actions/runs/{}".format(repo_name, run_id)
return self.make_request(path, method="GET", output_format="json")
def download_artifact(self, repo_name, artifact_id, destination_dir):
"""
Downloads the artifact identified by artifact_id to destination_dir.
"""
path = "/repos/{}/actions/artifacts/{}/zip".format(repo_name, artifact_id)
content = self.make_request(path, method="GET", output_format="raw")
zip_target_path = os.path.join(destination_dir, "{}.zip".format(artifact_id))
with open(zip_target_path, "wb") as f:
f.write(content)
return zip_target_path
def workflow_run_artifacts(self, repo_name, run_id):
"""
Gets list of artifacts for a workflow run.
"""
path = "/repos/{}/actions/runs/{}/artifacts".format(repo_name, run_id)
return self.make_request(path, method="GET", output_format="json")
def latest_workflow_run_for_ref(self, repo_name, workflow_name, ref):
"""
Gets latest workflow run for a given reference
"""
runs = self.workflow_runs(repo_name, workflow_name)
ref_runs = [run for run in runs["workflow_runs"] if run["head_branch"] == ref]
return max(ref_runs, key=lambda run: run['created_at'], default=None)
def workflow_runs(self, repo_name, workflow_name):
"""
Gets all workflow runs for a workflow.
"""
path = "/repos/{}/actions/workflows/{}/runs".format(repo_name, workflow_name)
return self.make_request(path, method="GET", output_format="json")
def make_request(self, endpoint, headers=None, method="GET", data=None, output_format="text"):
"""
Utility to make an HTTP request to the Gitlab API.
endpoint is the HTTP endpoint that will be requested.
headers is a dict of HTTP headers that can be added to the request.
Adds "Authorization: token {self.api_token}" and "Accept: application/vnd.github.v3+json"
to the headers to be able to authenticate ourselves to Github.
The method parameter dictates the type of request made (GET or POST).
If method is GET, the data parameter is ignored (no body can be sent in a GET request).
The output_format allows changing the structure of the response:
- text: a string containing the body of the response.
- json: an object containing the deserialized json body response. Works only if the response
is a json object.
- raw: a binary blob. Mainly useful when downloading things.
"""
import requests
url = self.BASE_URL + endpoint
headers = dict(headers or [])
headers["Authorization"] = "token {}".format(self.api_token)
headers["Accept"] = "application/vnd.github.v3+json"
for _ in range(5): # Retry up to 5 times
try:
if method == 'GET':
r = requests.get(url, headers=headers)
if method == 'POST':
if data:
r = requests.post(url, headers=headers, data=data)
else:
r = requests.post(url, headers=headers)
if r.status_code < 400: # Success
if output_format == "json":
return r.json()
if output_format == "raw":
return r.content
return r.text
if r.status_code == 401:
print("HTTP 401: The token is invalid. Is the Github App still allowed to perform this action?")
print("Github says: {}".format(r.json()["error_description"]))
except requests.exceptions.Timeout:
print("Connection to Github ({}) timed out.".format(url))
except requests.exceptions.RequestException as e:
m = errno_regex.match(str(e))
if not m:
print("Unknown error raised connecting to {}: {}".format(url, e))
# Parse errno to give a better explanation
# Requests doesn't have granularity at the level we want:
# http://docs.python-requests.org/en/master/_modules/requests/exceptions/
errno_code = int(m.group(1))
message = m.group(2)
if errno_code == errno.ENOEXEC:
print("Error resolving {}: {}".format(url, message))
elif errno_code == errno.ECONNREFUSED:
print("Connection to Github ({}) refused".format(url))
else:
print("Error while connecting to {}: {}".format(url, str(e)))
raise GithubException("Failed while making HTTP request: {} {}".format(method, url))
def _api_token(self):
try:
token = GithubApp().get_token()
except GithubAppException:
raise GithubException("Couldn't get API token.")
return token
```
|
{
"source": "jeremyyeo/dbt-cloud-cli",
"score": 2
}
|
#### File: command/job/run.py
```python
import os
import requests
from typing import Optional, List
from pydantic import Field, validator
from dbt_cloud.command.command import DbtCloudAccountCommand
from dbt_cloud.field import JOB_ID_FIELD
class DbtCloudJobRunCommand(DbtCloudAccountCommand):
"""Triggers a dbt Cloud job run and returns a status JSON response."""
job_id: int = JOB_ID_FIELD
cause: str = Field(
default="Triggered via API",
description="A text description of the reason for running this job",
)
git_sha: Optional[str] = Field(
description="The git sha to check out before running this job"
)
git_branch: Optional[str] = Field(
description="The git branch to check out before running this job"
)
schema_override: Optional[str] = Field(
description="Override the destination schema in the configured target for this job"
)
dbt_version_override: Optional[str] = Field(
description="Override the version of dbt used to run this job"
)
threads_override: Optional[int] = Field(
description="Override the number of threads used to run this job"
)
target_name_override: Optional[str] = Field(
description="Override the target.name context variable used when running this job"
)
generate_docs_override: Optional[bool] = Field(
description="Override whether or not this job generates docs (true=yes, false=no)"
)
timeout_seconds_override: Optional[int] = Field(
description="Override the timeout in seconds for this job"
)
steps_override: Optional[List[str]] = Field(
description="Override the list of steps for this job"
)
@validator("steps_override")
def check_steps_override_is_none_if_empty(cls, value):
return value or None
@property
def api_url(self) -> str:
return f"{super().api_url}/jobs/{self.job_id}/run/"
def execute(self) -> requests.Response:
response = requests.post(
url=self.api_url,
headers=self.request_headers,
json=self.get_payload(),
)
return response
```
#### File: command/project/list.py
```python
import requests
from dbt_cloud.command.command import DbtCloudAccountCommand
class DbtCloudProjectListCommand(DbtCloudAccountCommand):
"""Returns a list of projects in the account."""
@property
def api_url(self) -> str:
return f"{super().api_url}/projects"
def execute(self) -> requests.Response:
response = requests.get(url=self.api_url, headers=self.request_headers)
return response
```
#### File: command/run/get_artifact.py
```python
import requests
from pydantic import Field
from dbt_cloud.command.command import DbtCloudAccountCommand
from dbt_cloud.field import RUN_ID_FIELD
class DbtCloudRunGetArtifactCommand(DbtCloudAccountCommand):
"""Fetches an artifact file from a completed run."""
run_id: int = RUN_ID_FIELD
step: int = Field(
None,
description="The index of the Step in the Run to query for artifacts. The first step in the run has the index 1. If the step parameter is omitted, then this endpoint will return the artifacts compiled for the last step in the run.",
)
path: str = Field(
...,
description="Paths are rooted at the target/ directory. Use manifest.json, catalog.json, or run_results.json to download dbt-generated artifacts for the run.",
)
@property
def api_url(self) -> str:
return f"{super().api_url}/runs/{self.run_id}/artifacts/{self.path}"
def execute(self) -> requests.Response:
response = requests.get(
url=self.api_url, headers=self.request_headers, params={"step": self.step}
)
return response
```
|
{
"source": "jeremyyu8/deepxde",
"score": 3
}
|
#### File: deepxde/data/func.py
```python
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from .data import Data
from ..utils import run_if_any_none
class Func(Data):
"""Function approximation.
"""
def __init__(
self, geom, func, num_train, num_test, dist_train="uniform", online=False
):
self.geom = geom
self.func = func
self.num_train = num_train
self.num_test = num_test
self.dist_train = dist_train
self.online = online
self.train_x, self.train_y = None, None
self.test_x, self.test_y = None, None
def losses(self, targets, outputs, loss, model):
return [loss(targets, outputs)]
def train_next_batch(self, batch_size=None):
if self.online:
self.train_x = self.geom.random_points(batch_size, "pseudo")
self.train_y = self.func(self.train_x)
elif self.train_x is None:
if self.dist_train == "uniform":
self.train_x = self.geom.uniform_points(self.num_train, True)
else:
self.train_x = self.geom.random_points(self.num_train, "sobol")
self.train_y = self.func(self.train_x)
return self.train_x, self.train_y
@run_if_any_none("test_x", "test_y")
def test(self):
self.test_x = self.geom.uniform_points(self.num_test, True)
self.test_y = self.func(self.test_x)
return self.test_x, self.test_y
```
#### File: deepxde/maps/pfnn.py
```python
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from .fnn import FNN
from .. import config
from ..backend import tf
from ..utils import timing
class PFNN(FNN):
"""Parallel Feed-forward neural networks.
Feed-forward neural networks that support independent "branches" or sub-network inside the network.
Args:
layer_size: A nested list to define the architecture of the neural network (how the layers are connected).
If `layer_size[i]` is int, it represent one layer shared by all the outputs;
if `layer_size[i]` is list, it represent `len(layer_size[i])` sub-layers, each of which exclusively used by one output.
Note that `len(layer_size[i])` should equal to the number of outputs.
Every number specify the number of neurons of that layer.
"""
def __init__(
self,
layer_size,
activation,
kernel_initializer,
regularization=None,
dropout_rate=0,
batch_normalization=None,
):
super(PFNN, self).__init__(
layer_size,
activation,
kernel_initializer,
regularization,
dropout_rate,
batch_normalization,
)
@timing
def build(self):
def layer_map(_y, layer_size, net):
if net.batch_normalization is None:
_y = net.dense(_y, layer_size, activation=net.activation)
elif net.batch_normalization == "before":
_y = net.dense_batchnorm_v1(_y, layer_size)
elif net.batch_normalization == "after":
_y = net.dense_batchnorm_v2(_y, layer_size)
else:
raise ValueError("batch_normalization")
if net.dropout_rate > 0:
_y = tf.layers.dropout(_y, rate=net.dropout_rate, training=net.dropout)
return _y
print("Building feed-forward neural network...")
self.x = tf.placeholder(config.real(tf), [None, self.layer_size[0]])
y = self.x
if self._input_transform is not None:
y = self._input_transform(y)
# hidden layers
for i_layer in range(len(self.layer_size) - 2):
if isinstance(self.layer_size[i_layer + 1], (list, tuple)):
if isinstance(y, (list, tuple)):
# e.g. [8, 8, 8] -> [16, 16, 16]
if len(self.layer_size[i_layer + 1]) != len(
self.layer_size[i_layer]
):
raise ValueError(
"Number of sub-layers should be the same when feed-forwarding"
)
y = [
layer_map(y[i_net], self.layer_size[i_layer + 1][i_net], self)
for i_net in range(len(self.layer_size[i_layer + 1]))
]
else:
# e.g. 64 -> [8, 8, 8]
y = [
layer_map(y, self.layer_size[i_layer + 1][i_net], self)
for i_net in range(len(self.layer_size[i_layer + 1]))
]
else:
# e.g. 64 -> 64
y = layer_map(y, self.layer_size[i_layer + 1], self)
# output layers
if isinstance(y, (list, tuple)):
# e.g. [3, 3, 3] -> 3
if len(self.layer_size[-2]) != self.layer_size[-1]:
raise ValueError(
"Number of sub-layers should be the same as number of outputs"
)
y = [self.dense(y[i_net], 1) for i_net in range(len(y))]
self.y = tf.concat(y, axis=1)
else:
self.y = self.dense(y, self.layer_size[-1])
if self._output_transform is not None:
self.y = self._output_transform(self.x, self.y)
self.y_ = tf.placeholder(config.real(tf), [None, self.layer_size[-1]])
self.built = True
```
#### File: deepxde/examples/dataset.py
```python
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import deepxde as dde
def main():
fname_train = "dataset/dataset.train"
fname_test = "dataset/dataset.test"
data = dde.data.DataSet(
fname_train=fname_train,
fname_test=fname_test,
col_x=(0,),
col_y=(1,),
standardize=True,
)
layer_size = [1] + [50] * 3 + [1]
activation = "tanh"
initializer = "Glorot normal"
net = dde.maps.FNN(layer_size, activation, initializer)
model = dde.Model(data, net)
model.compile("adam", lr=0.001, metrics=["l2 relative error"])
losshistory, train_state = model.train(epochs=50000)
dde.saveplot(losshistory, train_state, issave=True, isplot=True)
if __name__ == "__main__":
main()
```
|
{
"source": "JeremyZhao1989/leanTest",
"score": 2
}
|
#### File: strack_desktop/chat_ui/ChatUI.py
```python
import os
import Qt.QtGui as QtGui
import cgtk_qt
current_dir = os.path.dirname(__file__)
UI = os.path.join(current_dir, "chat.ui")
FormClass, BaseClass = cgtk_qt.load_ui_type(UI)
class ChatUI(FormClass, BaseClass):
def __init__(self, parent=None):
super(ChatUI, self).__init__(parent)
# setup ui
self.setupUi(self)
if __name__ == "__main__":
cgtk_qt.render_gui(ChatUI)
```
#### File: leanTest/cgtkLibs/cgtk_email.py
```python
from email.header import Header
from email.mime.text import MIMEText
from email.mime.image import MIMEImage
from email.mime.multipart import MIMEMultipart
from email.utils import parseaddr, formataddr
import smtplib
from cgtk_config import studio_config
def format_addr(s):
name, addr = parseaddr(s)
return formataddr((
Header(name, 'utf-8').encode(),
addr.encode('utf-8') if isinstance(addr, unicode) else addr))
def send_mail(to_addr, subject_string, body, image=None):
email_cfg = studio_config.get('email')
from_addr = email_cfg['from']
password = email_cfg['password']
smtp_server = email_cfg['smtp_server']
body_string = body
msg_root = MIMEMultipart('related')
msg_root['From'] = format_addr('cgtk-%s<%s>' % (email_cfg["name"], from_addr))
msg_root['To'] = format_addr('user<%s>' % to_addr)
msg_root['Subject'] = Header(subject_string, 'utf-8').encode()
if image is not None:
msg_text = MIMEText(body_string + '<br><img src="cid:image1">', 'html')
msg_root.attach(msg_text)
fp = open(image, 'rb')
msg_image = MIMEImage(fp.read())
fp.close()
msg_image.add_header('Content-ID', '<image1>')
msg_root.attach(msg_image)
else:
msg_text = MIMEText(body_string, 'html')
msg_root.attach(msg_text)
smtp_port = int(email_cfg['port'])
if email_cfg['ssl'] == 'yes':
server = smtplib.SMTP_SSL(smtp_server, smtp_port)
else:
server = smtplib.SMTP(smtp_server, smtp_port)
# server.set_debuglevel(1)
server.login(from_addr, password)
server.sendmail(from_addr, [to_addr], msg_root.as_string())
server.quit()
if __name__ == "__main__":
send_mail("<EMAIL>", "CGTK Test", "Hello, this is a test", image=None)
```
#### File: cgtkLibs/cgtk_os/path_to_unc.py
```python
import sys
import re
import subprocess
import logging
import cgtk_log
log = cgtk_log.cgtk_log(level=logging.INFO)
def path_to_unc(path):
"""
Convert a path with a driver letter to a UNC path, so driver letters should need not be mapped
Args:
path (basestring): path to convert
Returns:
(basestring): input path converted
"""
if sys.platform == "win32": # fixme: lets make it cross platform
path = path.replace('/', '\\')
drive_re = re.compile(r"(?P<drive>[A-Z]:) +(?P<map>\S+)")
mappings = {}
p = subprocess.Popen("net use",
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
shell=True)
p.wait()
for line in p.stdout:
search = drive_re.search(line)
if search:
gd = search.groupdict()
mappings[gd['drive']] = gd['map']
for key in mappings:
if key in path:
path = path.replace(key, mappings[key])
return path
if __name__ == "__main__":
print path_to_unc("E:\\USB/AaronWork/aas_playblast")
```
#### File: cgtkLibs/cgtk_qt/DragMoveWidget.py
```python
import Qt.QtGui as QtGui
import Qt.QtCore as QtCore
class DragMoveWidget(QtGui.QWidget):
def __init__(self, *args, **kwargs):
super(DragMoveWidget, self).__init__(*args, **kwargs)
self.__last_clicked_pos = None
@property
def last_clicked_pos(self):
return self.__last_clicked_pos
def mousePressEvent(self, event):
super(DragMoveWidget, self).mousePressEvent(event)
self.__last_clicked_pos = (event.globalPos(), QtCore.QPoint(self.pos()))
def mouseMoveEvent(self, event):
if self.__last_clicked_pos:
move, begin = self.__last_clicked_pos
self.move((event.globalPos()-move)+begin)
else:
super(DragMoveWidget, self).mouseMoveEvent(event)
def mouseReleaseEvent(self, event):
super(DragMoveWidget, self).mouseReleaseEvent(event)
self.__last_clicked_pos = None
if __name__ == "__main__":
pass
```
#### File: cgtkLibs/cgtk_qt/load_style.py
```python
def load_style(qss_file):
f = open(qss_file, 'r')
data = f.read()
data.strip('\n')
return data
```
#### File: cgtkLibs/cgtk_qt/load_ui_type.py
```python
import Qt.QtGui as QtGui
import Qt
import xml.etree.ElementTree as xml
from cStringIO import StringIO
import logging
import cgtk_log
log = cgtk_log.cgtk_log(level=logging.INFO)
def load_ui_type(ui_file):
if "PySide" in Qt.__binding__:
import pysideuic
parsed = xml.parse(ui_file)
widget_class = parsed.find('widget').get('class')
form_class = parsed.find('class').text
with open(ui_file, 'r') as f:
o = StringIO()
frame = {}
pysideuic.compileUi(f, o, indent=0)
pyc = compile(o.getvalue(), '<string>', 'exec')
exec pyc in frame
# Fetch the base_class and form class based on their type in the xml from designer
form_class = frame['Ui_%s' % form_class]
base_class = getattr(QtGui, widget_class)
return form_class, base_class
elif Qt.__binding__ == "PyQt4":
import PyQt4.uic
return PyQt4.uic.loadUiType(ui_file)
if __name__ == "__main__":
pass
```
#### File: cgtkLibs/cgtk_qt/setup_ui.py
```python
import os
from Qt import QtCompat
# Set preferred binding
os.environ['QT_PREFERRED_BINDING'] = os.pathsep.join(['PySide', 'PyQt4'])
from Qt import QtWidgets, load_ui
def setup_ui(uifile, base_instance=None):
"""Load a Qt Designer .ui file and returns an instance of the user interface
Args:
uifile (str): Absolute path to .ui file
base_instance (QWidget): The widget into which UI widgets are loaded
ui_class_info (dict): only used in PySide
Returns:
QWidget: the base instance
"""
ui = load_ui(uifile) # Qt.py mapped function
if base_instance:
for member in dir(ui):
if not member.startswith('__') and \
member is not 'staticMetaObject':
setattr(base_instance, member, getattr(ui, member))
if __name__ == "__main__":
pass
```
#### File: scripts/cgtk_shelf/get_shelf_layout.py
```python
import pymel.core as pm
import logging
import cgtk_log
log = cgtk_log.cgtk_log(level=logging.INFO)
def get_shelf_layout():
for shelfPrLayout in pm.lsUI(type='tabLayout'):
return shelfPrLayout.shortName()
log.warning("no shelf layout found.")
return None
if __name__ == "__main__":
get_shelf_layout()
```
#### File: scripts/cgtk_shelf/load_sheves.py
```python
import logging
import cgtk_log
log = cgtk_log.cgtk_log(level=logging.INFO)
def load_sheves():
pass
if __name__ == "__main__":
load_sheves()
```
|
{
"source": "JeremyZhao1998/PaddleNLP",
"score": 2
}
|
#### File: community/nosaydomore/convert_roberta.py
```python
from collections import OrderedDict
import argparse
import paddle
import torch
import os
import json
from paddle.utils.download import get_path_from_url
huggingface_to_paddle = {
"embeddings.LayerNorm": "embeddings.layer_norm",
"encoder.layer": "encoder.layers",
"attention.self.query": "self_attn.q_proj",
"attention.self.key": "self_attn.k_proj",
"attention.self.value": "self_attn.v_proj",
"attention.output.dense": "self_attn.out_proj",
"intermediate.dense": "linear1",
"output.dense": "linear2",
"attention.output.LayerNorm": "norm1",
"output.LayerNorm": "norm2",
"qa_outputs": 'classifier',
'lm_head.bias': 'lm_head.decoder.bias'
}
convert_model_name_list = [
"roberta-base",
"roberta-large",
"deepset/roberta-base-squad2",
"uer/roberta-base-finetuned-chinanews-chinese",
"sshleifer/tiny-distilroberta-base",
"uer/roberta-base-finetuned-cluener2020-chinese",
"uer/roberta-base-chinese-extractive-qa",
]
link_template = "https://huggingface.co/{}/resolve/main/pytorch_model.bin"
pretrained_init_configuration = {
"roberta-base": {
"attention_probs_dropout_prob": 0.1,
"layer_norm_eps": 1e-05,
"hidden_act": "gelu",
"hidden_dropout_prob": 0.1,
"hidden_size": 768,
"initializer_range": 0.02,
"intermediate_size": 3072,
"max_position_embeddings": 514,
"num_attention_heads": 12,
"num_hidden_layers": 12,
"pad_token_id": 1,
"type_vocab_size": 1,
"vocab_size": 50265
},
"roberta-large": {
"attention_probs_dropout_prob": 0.1,
"hidden_act": "gelu",
"hidden_dropout_prob": 0.1,
"hidden_size": 1024,
"initializer_range": 0.02,
"intermediate_size": 4096,
"max_position_embeddings": 514,
"num_attention_heads": 16,
"num_hidden_layers": 24,
"pad_token_id": 1,
"type_vocab_size": 1,
"layer_norm_eps": 1e-05,
"vocab_size": 50265
},
"deepset/roberta-base-squad2": {
"layer_norm_eps": 1e-05,
"attention_probs_dropout_prob": 0.1,
"hidden_act": "gelu",
"hidden_dropout_prob": 0.1,
"hidden_size": 768,
"initializer_range": 0.02,
"intermediate_size": 3072,
"max_position_embeddings": 514,
"num_attention_heads": 12,
"num_hidden_layers": 12,
"pad_token_id": 1,
"type_vocab_size": 1,
"vocab_size": 50265
},
"uer/roberta-base-finetuned-chinanews-chinese": {
"attention_probs_dropout_prob": 0.1,
"hidden_act": "gelu",
"hidden_dropout_prob": 0.1,
"hidden_size": 768,
"initializer_range": 0.02,
"intermediate_size": 3072,
"layer_norm_eps": 1e-12,
"max_position_embeddings": 512,
"num_attention_heads": 12,
"num_hidden_layers": 12,
"pad_token_id": 0,
"type_vocab_size": 2,
"vocab_size": 21128
},
"sshleifer/tiny-distilroberta-base": {
"attention_probs_dropout_prob": 0.1,
"hidden_act": "gelu",
"hidden_dropout_prob": 0.1,
"hidden_size": 2,
"initializer_range": 0.02,
"intermediate_size": 2,
"layer_norm_eps": 1e-05,
"max_position_embeddings": 514,
"num_attention_heads": 2,
"num_hidden_layers": 2,
"pad_token_id": 1,
"type_vocab_size": 1,
"vocab_size": 50265
},
"uer/roberta-base-finetuned-cluener2020-chinese": {
"attention_probs_dropout_prob": 0.1,
"hidden_act": "gelu",
"hidden_dropout_prob": 0.1,
"hidden_size": 768,
"initializer_range": 0.02,
"intermediate_size": 3072,
"layer_norm_eps": 1e-12,
"max_position_embeddings": 512,
"num_attention_heads": 12,
"num_hidden_layers": 12,
"pad_token_id": 0,
"type_vocab_size": 2,
"vocab_size": 21128
},
"uer/roberta-base-chinese-extractive-qa": {
"attention_probs_dropout_prob": 0.1,
"hidden_act": "gelu",
"hidden_dropout_prob": 0.1,
"hidden_size": 768,
"initializer_range": 0.02,
"intermediate_size": 3072,
"layer_norm_eps": 1e-12,
"max_position_embeddings": 512,
"num_attention_heads": 12,
"num_hidden_layers": 12,
"pad_token_id": 0,
"type_vocab_size": 2,
"vocab_size": 21128
}
}
def convert_pytorch_checkpoint_to_paddle(pytorch_src_base_path,
paddle_dump_base_path):
for model_name in convert_model_name_list:
model_state_url = link_template.format(model_name)
paddle_dump_path = os.path.join(paddle_dump_base_path,
model_name.split('/')[-1])
if os.path.exists(
os.path.join(paddle_dump_path, 'model_state.pdparams')):
continue
if not os.path.exists(paddle_dump_path):
os.makedirs(paddle_dump_path)
with open(os.path.join(paddle_dump_path, 'model_config.json'),
'w') as fw:
json.dump(pretrained_init_configuration[model_name], fw)
_ = get_path_from_url(model_state_url, paddle_dump_path)
pytorch_checkpoint_path = os.path.join(paddle_dump_path,
'pytorch_model.bin')
pytorch_state_dict = torch.load(
pytorch_checkpoint_path, map_location="cpu")
paddle_state_dict = OrderedDict()
for k, v in pytorch_state_dict.items():
is_transpose = False
if k[-7:] == ".weight":
if ".embeddings." not in k and ".LayerNorm." not in k:
if v.ndim == 2:
v = v.transpose(0, 1)
is_transpose = True
oldk = k
if k == 'lm_head.bias' and 'lm_head.decoder.bias' in pytorch_state_dict.keys(
):
continue
for huggingface_name, paddle_name in huggingface_to_paddle.items():
k = k.replace(huggingface_name, paddle_name)
if k[:5] == 'bert.':
k = k.replace('bert.', 'roberta.')
print(f"Converting: {oldk} => {k} | is_transpose {is_transpose}")
paddle_state_dict[k] = v.data.numpy()
del pytorch_state_dict
paddle_dump_path = os.path.join(paddle_dump_path,
'model_state.pdparams')
paddle.save(paddle_state_dict, paddle_dump_path)
if __name__ == "__main__":
pytorch_src_base_path = os.path.dirname(os.path.realpath(__file__))
paddle_dump_base_path = pytorch_src_base_path
convert_pytorch_checkpoint_to_paddle(pytorch_src_base_path,
paddle_dump_base_path)
```
#### File: clue/classification/predict_clue_classifier.py
```python
import argparse
# import logging
import os
import sys
import random
import time
import math
import json
from functools import partial
import numpy as np
import paddle
from paddle.io import DataLoader
import paddle.nn as nn
import paddle.nn.functional as F
from paddle.metric import Metric, Accuracy, Precision, Recall
from paddlenlp.datasets import load_dataset
from paddlenlp.data import Stack, Tuple, Pad, Dict
from paddlenlp.transformers import BertForSequenceClassification, BertTokenizer
from paddlenlp.transformers import ErnieForSequenceClassification, ErnieTokenizer
from paddlenlp.transformers import RobertaForSequenceClassification, RobertaTokenizer
METRIC_CLASSES = {
"afqmc": Accuracy,
"tnews": Accuracy,
"iflytek": Accuracy,
"ocnli": Accuracy,
"cmnli": Accuracy,
"cluewsc2020": Accuracy,
"csl": Accuracy,
}
MODEL_CLASSES = {
"bert": (BertForSequenceClassification, BertTokenizer),
"ernie": (ErnieForSequenceClassification, ErnieTokenizer),
"roberta": (RobertaForSequenceClassification, RobertaTokenizer),
}
def parse_args():
parser = argparse.ArgumentParser()
# Required parameters
parser.add_argument(
"--task_name",
default=None,
type=str,
required=True,
help="The name of the task to train selected in the list: " +
", ".join(METRIC_CLASSES.keys()), )
parser.add_argument(
"--model_type",
default="ernie",
type=str,
help="Model type selected in the list: " +
", ".join(MODEL_CLASSES.keys()), )
parser.add_argument(
"--model_name_or_path",
default=None,
type=str,
required=True,
help="Path to pre-trained model or shortcut name selected in the list: "
+ ", ".join(
sum([
list(classes[-1].pretrained_init_configuration.keys())
for classes in MODEL_CLASSES.values()
], [])), )
parser.add_argument(
"--output_dir",
default="tmp",
type=str,
help="The output directory where the model predictions and checkpoints will be written.",
)
parser.add_argument(
"--max_seq_length",
default=128,
type=int,
help="The maximum total input sequence length after tokenization. Sequences longer "
"than this will be truncated, sequences shorter will be padded.", )
parser.add_argument(
"--batch_size",
default=128,
type=int,
help="Batch size per GPU/CPU for training.", )
parser.add_argument(
"--device",
default="gpu",
type=str,
help="The device to select to train the model, is must be cpu/gpu/xpu.")
args = parser.parse_args()
return args
def convert_example(example,
tokenizer,
label_list,
max_seq_length=512,
is_test=False):
"""convert a glue example into necessary features"""
if not is_test:
# `label_list == None` is for regression task
label_dtype = "int64" if label_list else "float32"
# Get the label
label = example['label']
label = np.array([label], dtype=label_dtype)
# Convert raw text to feature
if 'sentence' in example:
example = tokenizer(example['sentence'], max_seq_len=max_seq_length)
elif 'sentence1' in example:
example = tokenizer(
example['sentence1'],
text_pair=example['sentence2'],
max_seq_len=max_seq_length)
elif 'keyword' in example: # CSL
sentence1 = " ".join(example['keyword'])
example = tokenizer(
sentence1, text_pair=example['abst'], max_seq_len=max_seq_length)
elif 'target' in example: # wsc
text, query, pronoun, query_idx, pronoun_idx = example['text'], example[
'target']['span1_text'], example['target']['span2_text'], example[
'target']['span1_index'], example['target']['span2_index']
text_list = list(text)
assert text[pronoun_idx:(pronoun_idx + len(pronoun)
)] == pronoun, "pronoun: {}".format(pronoun)
assert text[query_idx:(query_idx + len(query)
)] == query, "query: {}".format(query)
if pronoun_idx > query_idx:
text_list.insert(query_idx, "_")
text_list.insert(query_idx + len(query) + 1, "_")
text_list.insert(pronoun_idx + 2, "[")
text_list.insert(pronoun_idx + len(pronoun) + 2 + 1, "]")
else:
text_list.insert(pronoun_idx, "[")
text_list.insert(pronoun_idx + len(pronoun) + 1, "]")
text_list.insert(query_idx + 2, "_")
text_list.insert(query_idx + len(query) + 2 + 1, "_")
text = "".join(text_list)
example = tokenizer(text, max_seq_len=max_seq_length)
if not is_test:
return example['input_ids'], example['token_type_ids'], label
else:
return example['input_ids'], example['token_type_ids']
def do_test(args):
paddle.set_device(args.device)
args.task_name = args.task_name.lower()
metric_class = METRIC_CLASSES[args.task_name]
args.model_type = args.model_type.lower()
model_class, tokenizer_class = MODEL_CLASSES[args.model_type]
train_ds, test_ds = load_dataset(
'clue', args.task_name, splits=('train', 'test'))
tokenizer = tokenizer_class.from_pretrained(args.model_name_or_path)
trans_func = partial(
convert_example,
tokenizer=tokenizer,
label_list=train_ds.label_list,
max_seq_length=args.max_seq_length,
is_test=True)
batchify_fn = lambda samples, fn=Tuple(
Pad(axis=0, pad_val=tokenizer.pad_token_id), # input
Pad(axis=0, pad_val=tokenizer.pad_token_type_id), # segment
): fn(samples)
test_ds = test_ds.map(trans_func, lazy=True)
test_batch_sampler = paddle.io.BatchSampler(
test_ds, batch_size=args.batch_size, shuffle=False)
test_data_loader = DataLoader(
dataset=test_ds,
batch_sampler=test_batch_sampler,
collate_fn=batchify_fn,
num_workers=0,
return_list=True)
num_classes = 1 if train_ds.label_list == None else len(train_ds.label_list)
model_class, _ = MODEL_CLASSES[args.model_type]
model = model_class.from_pretrained(
args.model_name_or_path, num_classes=num_classes)
if not os.path.exists(args.output_dir):
os.makedirs(args.output_dir)
if args.task_name == 'ocnli':
args.task_name = 'ocnli_50k'
f = open(
os.path.join(args.output_dir, args.task_name + "_predict.json"), 'w')
for step, batch in enumerate(test_data_loader):
input_ids, segment_ids = batch
with paddle.no_grad():
logits = model(input_ids, segment_ids)
preds = paddle.argmax(logits, axis=1)
for idx, pred in enumerate(preds):
j = json.dumps({"id": idx, "label": train_ds.label_list[pred]})
f.write(j + "\n")
if __name__ == "__main__":
args = parse_args()
do_test(args)
```
#### File: plato-2/utils/__init__.py
```python
from itertools import chain
import numpy as np
import paddle
def repeat_array(array, times):
"""Repeate numpy array."""
if isinstance(array, list):
return list(chain(*([array] * times)))
else:
return np.concatenate([array] * times, axis=0)
def gen_inputs(inputs, latent_type_size):
batch_size = len(inputs["data_id"])
new_bsz = batch_size * latent_type_size
inputs = {
name: repeat_array(array, latent_type_size)
for name, array in inputs.items()
}
# Add latent_id
inputs["latent_id"] = np.array(
[i for i in range(latent_type_size) for _ in range(batch_size)],
dtype="int64").reshape([-1, 1])
#print('\nplato_inputs:')
for key in inputs:
inputs[key] = paddle.to_tensor(inputs[key])
if key in [
'token_ids', 'type_ids', 'pos_ids', 'tgt_ids', 'tgt_pos',
'data_id'
]:
inputs[key] = paddle.squeeze(inputs[key], axis=-1)
#print(key, inputs[key].shape, inputs[key].dtype)
return inputs
def pad_batch_data(insts, pad_id=0):
"""Pad the instances to the max sequence length in batch. """
max_len = max(map(len, insts))
inst_data = np.array(
[list(inst) + [pad_id] * (max_len - len(inst)) for inst in insts])
return inst_data.astype("int64").reshape([-1, max_len, 1])
```
#### File: faster_ernie/token_cls/predict.py
```python
import argparse
import paddle
from paddlenlp.datasets import load_dataset
from paddlenlp.experimental import FasterErnieForTokenClassification, to_tensor
# yapf: disable
parser = argparse.ArgumentParser()
parser.add_argument("--save_dir", type=str, default="ckpt/model_4221", help="The path to model parameters to be loaded.")
parser.add_argument("--max_seq_length", type=int, default=128, help="The maximum total input sequence length after tokenization. "
"Sequences longer than this will be truncated, sequences shorter will be padded.")
parser.add_argument("--batch_size", type=int, default=1, help="Batch size per GPU/CPU for training.")
parser.add_argument('--device', choices=['cpu', 'gpu', 'xpu'], default="gpu", help="Select which device to train model, defaults to gpu.")
args = parser.parse_args()
# yapf: enable
def predict(model, data, label_map, batch_size=1):
# Seperates data into some batches.
batches = [
data[idx:idx + batch_size] for idx in range(0, len(data), batch_size)
]
results = []
model.eval()
for texts in batches:
texts = to_tensor(texts)
logits, preds = model(texts)
preds = preds.numpy()
for pred in preds:
# drop the concated CLS and SEP token label
pred = pred[1:-1]
label = [label_map[i] for i in pred]
results.append(label)
return results
if __name__ == "__main__":
paddle.set_device(args.device)
test_ds = load_dataset('msra_ner', splits=('test'))
texts = ["".join(example["tokens"]) for example in test_ds]
label_map = dict(enumerate(test_ds.label_list))
model = FasterErnieForTokenClassification.from_pretrained(
args.save_dir,
num_classes=len(test_ds.label_list),
max_seq_len=args.max_seq_length,
is_split_into_words=True)
results = predict(model, texts, label_map, args.batch_size)
for idx, text in enumerate(texts):
seq_len = len(text)
label = results[idx][:seq_len]
print(text, " : ", " ".join(label))
```
#### File: information_extraction/waybill_ie/run_bigru_crf.py
```python
import argparse
import os
from functools import partial
import paddle
import paddle.nn as nn
from paddlenlp.datasets import MapDataset
from paddlenlp.data import Stack, Tuple, Pad
from paddlenlp.metrics import ChunkEvaluator
from paddlenlp.embeddings import TokenEmbedding
from data import load_dict, load_dataset, parse_decodes
from model import BiGRUWithCRF
parser = argparse.ArgumentParser()
# yapf: disable
parser.add_argument("--save_dir", default='./bigru_crf_ckpt', type=str, help="The output directory where the model checkpoints will be written.")
parser.add_argument("--epochs", default=10, type=int, help="Total number of training epochs to perform.")
parser.add_argument("--batch_size", default=200, type=int, help="Batch size per GPU/CPU for training.")
parser.add_argument("--device", default="gpu", type=str, choices=["cpu", "gpu"] ,help="The device to select to train the model, is must be cpu/gpu.")
parser.add_argument("--data_dir", default='./waybill_ie/data', type=str, help="The folder where the dataset is located.")
args = parser.parse_args()
# yapf: enable
def convert_tokens_to_ids(tokens, vocab, oov_token=None):
token_ids = []
oov_id = vocab.get(oov_token) if oov_token else None
for token in tokens:
token_id = vocab.get(token, oov_id)
token_ids.append(token_id)
return token_ids
def convert_to_features(example, word_vocab, label_vocab):
tokens, labels = example
token_ids = convert_tokens_to_ids(tokens, word_vocab, 'OOV')
label_ids = convert_tokens_to_ids(labels, label_vocab, 'O')
return token_ids, len(token_ids), label_ids
@paddle.no_grad()
def evaluate(model, metric, data_loader):
model.eval()
metric.reset()
for token_ids, lengths, label_ids in data_loader:
preds = model(token_ids, lengths)
n_infer, n_label, n_correct = metric.compute(lengths, preds, label_ids)
metric.update(n_infer.numpy(), n_label.numpy(), n_correct.numpy())
precision, recall, f1_score = metric.accumulate()
print("[EVAL] Precision: %f - Recall: %f - F1: %f" %
(precision, recall, f1_score))
model.train()
@paddle.no_grad()
def predict(model, data_loader, ds, label_vocab):
all_preds = []
all_lens = []
for token_ids, lengths, label_ids in data_loader:
preds = model(token_ids, lengths)
all_preds.append(preds.numpy())
all_lens.append(lengths)
sentences = [example[0] for example in ds.data]
results = parse_decodes(sentences, all_preds, all_lens, label_vocab)
return results
if __name__ == '__main__':
paddle.set_device(args.device)
# Create dataset, tokenizer and dataloader.
train_ds, dev_ds, test_ds = load_dataset(
datafiles=(os.path.join(args.data_dir, 'train.txt'),
os.path.join(args.data_dir, 'dev.txt'),
os.path.join(args.data_dir, 'test.txt')))
label_vocab = load_dict(os.path.join(args.data_dir, 'tag.dic'))
word_vocab = load_dict(os.path.join(args.data_dir, 'word.dic'))
trans_func = partial(
convert_to_features, word_vocab=word_vocab, label_vocab=label_vocab)
train_ds.map(trans_func)
dev_ds.map(trans_func)
test_ds.map(trans_func)
batchify_fn = lambda samples, fn=Tuple(
Pad(axis=0, pad_val=word_vocab.get('OOV', 0), dtype='int32'), # token_ids
Stack(dtype='int64'), # seq_len
Pad(axis=0, pad_val=label_vocab.get('O', 0), dtype='int64') # label_ids
): fn(samples)
train_loader = paddle.io.DataLoader(
dataset=train_ds,
batch_size=args.batch_size,
shuffle=True,
drop_last=True,
return_list=True,
collate_fn=batchify_fn)
dev_loader = paddle.io.DataLoader(
dataset=dev_ds,
batch_size=args.batch_size,
drop_last=True,
return_list=True,
collate_fn=batchify_fn)
test_loader = paddle.io.DataLoader(
dataset=test_ds,
batch_size=args.batch_size,
drop_last=True,
return_list=True,
collate_fn=batchify_fn)
# Define the model netword and its loss
model = BiGRUWithCRF(300, 256, len(word_vocab), len(label_vocab))
optimizer = paddle.optimizer.Adam(
learning_rate=0.001, parameters=model.parameters())
metric = ChunkEvaluator(label_list=label_vocab.keys(), suffix=True)
step = 0
for epoch in range(args.epochs):
for token_ids, lengths, label_ids in train_loader:
loss = model(token_ids, lengths, label_ids)
loss = loss.mean()
loss.backward()
optimizer.step()
optimizer.clear_grad()
step += 1
print("[TRAIN] Epoch:%d - Step:%d - Loss: %f" % (epoch, step, loss))
evaluate(model, metric, dev_loader)
paddle.save(model.state_dict(),
os.path.join(args.save_dir, 'model_%d' % step))
preds = predict(model, test_loader, test_ds, label_vocab)
file_path = "bigru_crf_results.txt"
with open(file_path, "w", encoding="utf8") as fout:
fout.write("\n".join(preds))
# Print some examples
print(
"The results have been saved into: %s, some examples are shown below: "
% file_path)
print("\n".join(preds[:10]))
```
#### File: language_model/electra/get_ft_model.py
```python
import os
import hashlib
import argparse
import paddle
import paddle.nn as nn
#from paddlenlp.transformers import ElectraForTotalPretraining, ElectraDiscriminator, ElectraGenerator, ElectraModel
#from paddlenlp.transformers import ElectraTokenizer
#
#MODEL_CLASSES = {"electra": (ElectraForTotalPretraining, ElectraTokenizer), }
def get_md5sum(file_path):
md5sum = None
if os.path.isfile(file_path):
with open(file_path, 'rb') as f:
md5_obj = hashlib.md5()
md5_obj.update(f.read())
hash_code = md5_obj.hexdigest()
md5sum = str(hash_code).lower()
return md5sum
def main(args):
pretraining_model = os.path.join(args.model_dir, "model_state.pdparams")
if os.path.islink(pretraining_model):
print("%s already contain fine-tuning model, pleace check" %
args.model_dir)
exit(0)
print(
"load Electra pretrain model to get generator/discriminator model : %s \nmodel md5sum : %s"
% (pretraining_model, get_md5sum(pretraining_model)))
# depart total_pretraining_model to generator and discriminator state_dict
total_pretraining_model = paddle.load(pretraining_model)
generator_state_dict = {}
discriminator_state_dict = {}
total_keys = []
num_keys = 0
for key in total_pretraining_model.keys():
new_key = None
if "generator." in key:
new_key = key.replace("generator.", "", 1)
generator_state_dict[new_key] = total_pretraining_model[key]
if "discriminator." in key:
new_key = key.replace("discriminator.", "", 1)
discriminator_state_dict[new_key] = total_pretraining_model[key]
num_keys += 1
print("total electra keys : ", num_keys)
print("total generator keys : ", len(generator_state_dict))
print("total discriminator keys : ", len(discriminator_state_dict))
# save generator and discriminator model to disk
paddle.save(generator_state_dict,
os.path.join(args.model_dir, args.generator_output_file))
paddle.save(discriminator_state_dict,
os.path.join(args.model_dir, args.discriminator_output_file))
print("save generator and discriminator model success")
os.rename(pretraining_model,
os.path.join(args.model_dir, "pretrain_model_state.pdparams"))
os.symlink(args.discriminator_output_file,
os.path.join(args.model_dir, "model_state.pdparams"))
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument(
"--model_dir",
required=True,
default=None,
help="Directory of storing ElectraForTotalPreTraining model")
parser.add_argument(
"--generator_output_file",
default='generator_for_ft.pdparams',
help="Electra generator model for fine-tuning")
parser.add_argument(
"--discriminator_output_file",
default='discriminator_for_ft.pdparams',
help="Electra discriminator model for fine-tuning")
args, unparsed = parser.parse_known_args()
main(args)
```
#### File: language_model/end_to_end_memory_networks/config.py
```python
import yaml
class Config(object):
"""
A simple waper for configs
"""
def __init__(self, config_path: str):
with open(config_path, 'r') as f:
self.d = yaml.load(f.read(), Loader=yaml.SafeLoader)
def __getattribute__(self, key):
d = super(Config, self).__getattribute__('d')
if key in d:
return d[key]
else:
return super(Config, self).__getattribute__(key)
```
#### File: language_model/end_to_end_memory_networks/data.py
```python
import os
def read_data(fname, word2idx):
"""
Data is processed into a one-dimensional vector, and each value is the code corresponding to a word.
The two sentences are separated by special characters < EOS >.
Args:
fname (str):
data filename
word2idx (dict):
word dict
Returns:
list: return word vectors
"""
if os.path.isfile(fname):
with open(fname) as f:
lines = f.readlines()
else:
raise (Exception("[!] Data %s not found" % fname))
words = []
for line in lines:
words.extend(line.split())
print("Read %s words from %s" % (len(words), fname))
data = list()
for line in lines:
for word in line.split():
index = word2idx[word]
data.append(index)
data.append(word2idx['<eos>'])
return data
def load_vocab(fname):
"""
load word dict
Args:
fname (str): filename of the vocav file
Returns:
dict: word dict
"""
word2idx = {}
with open(fname, "r") as f:
for line in f:
pair = line.split()
word2idx[pair[0]] = int(pair[1])
return word2idx
def load_data(config):
"""
load data
Args:
config: config
Returns:
word dict, and train, valid, test data
"""
vocab_path = os.path.join(config.data_dir,
"%s.vocab.txt" % config.data_name)
word2idx = load_vocab(vocab_path)
train_data = read_data(
os.path.join(config.data_dir, "%s.train.txt" % config.data_name),
word2idx)
valid_data = read_data(
os.path.join(config.data_dir, "%s.valid.txt" % config.data_name),
word2idx)
test_data = read_data(
os.path.join(config.data_dir, "%s.test.txt" % config.data_name),
word2idx)
return word2idx, train_data, valid_data, test_data
```
#### File: language_model/pretraining_data_prepare/main.py
```python
import argparse
import os
import subprocess
from paddlenlp.utils.log import logger
from text_formatting.bookcorpus import BookscorpusTextFormatter
from text_formatting.wikicorpus import WikicorpusTextFormatter
from text_sharding import Sharding, EnglishSegmenter, ChineseSegmenter
from create_pretraining_data import create_instances_from_document, write_instance_to_example_file
# yapf: disable
parser = argparse.ArgumentParser()
parser.add_argument("--formatted_file", default=None, type=str,
help="The input train corpus which should be already formatted as one article one line."
"It can be directory with .txt files or a path to a single file")
parser.add_argument('--skip_formatting', type=eval, default=True, required=True,
help="If the input file already have forrmatted as formatted as one article one line, "
"you can skip text formatting precoess.")
parser.add_argument("--output_dir", default=None, type=str, required=True,
help="The output directory where the pretrained data will be written.")
parser.add_argument("--model_name", choices=['bert-base-uncased', 'bert-base-chinese', 'bert-wwm-chinese','ernie-1.0'],
default="bert-base-chinese", required=True,
help="Select which model to pretrain, defaults to bert-base-chinese.")
parser.add_argument("--max_seq_length", default=128, type=int,
help="The maximum total input sequence length after WordPiece tokenization. \n"
"Sequences longer than this will be truncated, and sequences shorter \n"
"than this will be padded.")
parser.add_argument("--max_word_length", default=4, type=int,
help="The maximum total chinese characters in a chinese word after chinese word segmentation tokenization.")
parser.add_argument("--dupe_factor", default=10, type=int,
help="Number of times to duplicate the input data (with different masks).")
parser.add_argument("--max_predictions_per_seq", default=20, type=int, help="Maximum sequence length.")
parser.add_argument("--masked_lm_prob", default=0.15, type=float, help="Masked LM probability.")
parser.add_argument("--short_seq_prob", default=0.1, type=float,
help="Probability to create a sequence shorter than maximum sequence length")
parser.add_argument("--do_lower_case", action="store_true", default=True,
help="Whether to lower case the input text. True for uncased models, False for cased models.")
parser.add_argument('--random_seed', type=int, default=10000, help="random seed for initialization")
parser.add_argument('--n_train_shards', type=int, default=256, help='Specify the number of train shards to generate')
parser.add_argument('--n_test_shards', type=int, default=1, help='Specify the number of test shards to generate')
parser.add_argument('--fraction_test_set', type=float, default=0.1,
help='Specify the fraction (0.1) of the data to withhold for the test data split (based on number of sequences)')
args = parser.parse_args()
# yapf: enable
def create_record_worker(shardingfile_prefix,
outputfile_prefix,
shard_id,
do_lower_case,
model_name,
max_seq_length,
masked_lm_prob,
max_predictions_per_seq,
random_seed=10000,
dupe_factor=10):
bert_preprocessing_command = 'python create_pretraining_data.py'
bert_preprocessing_command += ' --input_file=' + shardingfile_prefix \
+ '_' + str(shard_id) + '.txt'
bert_preprocessing_command += ' --output_file=' + outputfile_prefix \
+ '_' + str(shard_id) + '.hdf5'
bert_preprocessing_command += ' --do_lower_case' if do_lower_case else ''
bert_preprocessing_command += ' --max_seq_length=' + str(max_seq_length)
bert_preprocessing_command += ' --max_predictions_per_seq=' + str(
max_predictions_per_seq)
bert_preprocessing_command += ' --masked_lm_prob=' + str(masked_lm_prob)
bert_preprocessing_command += ' --random_seed=' + str(random_seed)
bert_preprocessing_command += ' --dupe_factor=' + str(dupe_factor)
bert_preprocessing_command += ' --model_name=' + str(model_name)
bert_preprocessing_process = subprocess.Popen(
bert_preprocessing_command, shell=True)
last_process = bert_preprocessing_process
# This could be better optimized (fine if all take equal time)
if shard_id % 10 == 0 and shard_id > 0:
bert_preprocessing_process.wait()
return last_process
def do_text_formatting(model_name):
if model_name not in [
"bert-base-uncased", "bert-base-chinese", "bert-wwm-chinese"
]:
logger.error(
"The implimented text formattting process only fits"
"bert-base-uncased, bert-base-chinese and bert-wwm-chinese."
"Preraining model %s you should format the corpus firstly by your own."
)
logger.info("=" * 50)
logger.info("Start to text formatting.")
if model_name == "bert-base-uncased":
wiki_formatter = WikicorpusTextFormatter('en', args.output_dir)
formatted_files = [wiki_formatter.formatted_file]
book_formatter = BookscorpusTextFormatter(args.output_dir)
formatted_files.append(book_formatter.formatted_file)
else:
wiki_formatter = WikicorpusTextFormatter('zh', args.output_dir)
formatted_files = wiki_formatter.formatted_file
logger.info("End to text formatting")
return formatted_files
def do_text_sharding(model_name, formatted_files, output_dir, n_train_shards,
n_test_shards, fraction_test_set):
logger.info("=" * 50)
logger.info("Start to text Sharding. Formated files: {}".format(
formatted_files))
sharding_path = os.path.join(output_dir,
'sharded_train_shards_' + str(n_train_shards) \
+ "_test_shards_" + str(n_test_shards)) \
+ "_fraction_" + str(fraction_test_set)
if not os.path.exists(sharding_path):
os.makedirs(sharding_path)
# Segmentation is here because all datasets look the same in one article/book/whatever per line format, and
# it seemed unnecessarily complicated to add an additional preprocessing step to call just for this.
# For english, we use EnglishSegmenter. For chinese, we use ChineseSegmenter.
if model_name == "bert-base-uncased":
segmenter = EnglishSegmenter()
else:
segmenter = ChineseSegmenter()
sharding_output_name_prefix = os.path.join(sharding_path, "sharding")
sharding = Sharding(formatted_files, sharding_output_name_prefix,
n_train_shards, n_test_shards, fraction_test_set)
sharding.load_articles()
logger.info("Splitting the articles into sentences.")
sharding.segment_articles_into_sentences(segmenter)
sharding.distribute_articles_over_shards()
sharding.write_shards_to_disk()
logger.info("End to text sharding. Sharding files save as {}".format(
sharding_path))
return sharding_output_name_prefix
def create_data(do_lower_case, max_seq_length, max_predictions_per_seq,
masked_lm_prob, random_seed, dupe_factor, output_dir,
n_train_shards, n_test_shards, sharding_output_name_prefix):
logger.info("=" * 50)
logger.info("Start to create pretrainging data and save it to hdf5 files.")
hdf5_folder = "hdf5_lower_case_" + str(do_lower_case) + "_seq_len_" + str(max_seq_length) \
+ "_max_pred_" + str(max_predictions_per_seq) + "_masked_lm_prob_" + str(masked_lm_prob) \
+ "_random_seed_" + str(random_seed) + "_dupe_factor_" + str(dupe_factor)
if not os.path.exists(os.path.join(output_dir, hdf5_folder)):
os.makedirs(os.path.join(output_dir, hdf5_folder))
hdf5_folder_prefix = os.path.join(output_dir, hdf5_folder, "pretraing")
for i in range(n_train_shards):
last_process = create_record_worker(
sharding_output_name_prefix + "_train",
hdf5_folder_prefix + "_train", i, args.do_lower_case,
args.model_name, args.max_seq_length, args.masked_lm_prob,
args.max_predictions_per_seq, args.random_seed, args.dupe_factor)
last_process.wait()
for i in range(n_test_shards):
last_process = create_record_worker(
sharding_output_name_prefix + '_test', hdf5_folder_prefix + "_test",
i, args.do_lower_case, args.model_name, args.max_seq_length,
args.masked_lm_prob, args.max_predictions_per_seq, args.random_seed,
args.dupe_factor)
last_process.wait()
logger.info(
f"End to create pretrainging data and save it to hdf5 files "
f"{sharding_output_name_prefix}_train and {sharding_output_name_prefix}_test ."
)
if __name__ == "__main__":
if not args.skip_formatting:
formatted_files = do_text_formatting(args.model_name)
else:
logger.info("=" * 50)
logger.info("Skip text formatting, formatted file: %s" %
args.formatted_file)
formatted_files = [args.formatted_file]
sharding_output_name_prefix = do_text_sharding(
args.model_name, formatted_files, args.output_dir, args.n_train_shards,
args.n_test_shards, args.fraction_test_set)
create_data(args.do_lower_case, args.max_seq_length,
args.max_predictions_per_seq, args.masked_lm_prob,
args.random_seed, args.dupe_factor, args.output_dir,
args.n_train_shards, args.n_test_shards,
sharding_output_name_prefix)
```
#### File: sentiment_analysis/textcnn/train.py
```python
from functools import partial
import argparse
import os
import random
import numpy as np
import paddle
from paddlenlp.datasets import load_dataset
from paddlenlp.data import JiebaTokenizer, Pad, Stack, Tuple, Vocab
from data import create_dataloader, convert_example, read_custom_data
from model import TextCNNModel
# yapf: disable
parser = argparse.ArgumentParser(__doc__)
parser.add_argument("--epochs", type=int, default=10, help="Number of epoches for training.")
parser.add_argument('--device', choices=['cpu', 'gpu', 'xpu'], default="gpu", help="Select which device to train model, defaults to gpu.")
parser.add_argument("--lr", type=float, default=5e-5, help="Learning rate used to train.")
parser.add_argument("--save_dir", type=str, default='checkpoints/', help="Directory to save model checkpoint")
parser.add_argument("--data_path", type=str, default='./RobotChat', help="The path of datasets to be loaded")
parser.add_argument("--batch_size", type=int, default=64, help="Total examples' number of a batch for training.")
parser.add_argument("--vocab_path", type=str, default="./robot_chat_word_dict.txt", help="The directory to dataset.")
parser.add_argument("--init_from_ckpt", type=str, default=None, help="The path of checkpoint to be loaded.")
args = parser.parse_args()
# yapf: enable
def set_seed(seed=1000):
"""Sets random seed."""
random.seed(seed)
np.random.seed(seed)
paddle.seed(seed)
if __name__ == "__main__":
paddle.set_device(args.device)
set_seed()
# Load vocab.
if not os.path.exists(args.vocab_path):
raise RuntimeError('The vocab_path can not be found in the path %s' %
args.vocab_path)
vocab = Vocab.load_vocabulary(
args.vocab_path, unk_token='[UNK]', pad_token='[PAD]')
# Load datasets.
dataset_names = ['train.tsv', 'dev.tsv', 'test.tsv']
train_ds, dev_ds, test_ds = [load_dataset(read_custom_data, \
filename=os.path.join(args.data_path, dataset_name), lazy=False) for dataset_name in dataset_names]
tokenizer = JiebaTokenizer(vocab)
trans_fn = partial(convert_example, tokenizer=tokenizer)
batchify_fn = lambda samples, fn=Tuple(
Pad(axis=0, pad_val=vocab.token_to_idx.get('[PAD]', 0)),
Stack(dtype='int64') # label
): [data for data in fn(samples)]
train_loader = create_dataloader(
train_ds,
batch_size=args.batch_size,
mode='train',
batchify_fn=batchify_fn,
trans_fn=trans_fn)
dev_loader = create_dataloader(
dev_ds,
batch_size=args.batch_size,
mode='validation',
batchify_fn=batchify_fn,
trans_fn=trans_fn)
test_loader = create_dataloader(
test_ds,
batch_size=args.batch_size,
mode='test',
batchify_fn=batchify_fn,
trans_fn=trans_fn)
label_map = {0: 'negative', 1: 'neutral', 2: 'positive'}
vocab_size = len(vocab)
num_classes = len(label_map)
pad_token_id = vocab.to_indices('[PAD]')
model = TextCNNModel(
vocab_size,
num_classes,
padding_idx=pad_token_id,
ngram_filter_sizes=(1, 2, 3))
if args.init_from_ckpt and os.path.isfile(args.init_from_ckpt):
state_dict = paddle.load(args.init_from_ckpt)
model.set_dict(state_dict)
model = paddle.Model(model)
optimizer = paddle.optimizer.Adam(
parameters=model.parameters(), learning_rate=args.lr)
# Define loss and metric.
criterion = paddle.nn.CrossEntropyLoss()
metric = paddle.metric.Accuracy()
model.prepare(optimizer, criterion, metric)
# Start training and evaluating.
callback = paddle.callbacks.ProgBarLogger(log_freq=10, verbose=3)
model.fit(train_loader,
dev_loader,
epochs=args.epochs,
save_dir=args.save_dir,
callbacks=callback)
# Evaluate on test dataset
print('Start to evaluate on test dataset...')
model.evaluate(test_loader, log_freq=len(test_loader))
```
#### File: text_correction/ernie-csc/download.py
```python
import os
import sys
import argparse
from paddle.utils.download import get_path_from_url
parser = argparse.ArgumentParser()
parser.add_argument(
'-d',
'--data_dir',
help='directory to save data to',
type=str,
default='./')
parser.add_argument(
'-u',
'--url',
help='URL of target',
type=str,
default="https://bj.bcebos.com/paddlenlp/datasets/sighan_test.zip")
args = parser.parse_args()
def main():
get_path_from_url(args.url, args.data_dir)
if __name__ == '__main__':
sys.exit(main())
```
#### File: text_correction/ernie-csc/predict.py
```python
import os
import argparse
import numpy as np
from functools import partial
import paddle
import paddlenlp as ppnlp
from paddle import inference
from paddlenlp.data import Stack, Tuple, Pad, Vocab
from paddlenlp.transformers import ErnieTokenizer
from utils import convert_example, parse_decode
# yapf: disable
parser = argparse.ArgumentParser(__doc__)
parser.add_argument("--model_file", type=str, required=True, default='./static_graph_params.pdmodel', help="The path to model info in static graph.")
parser.add_argument("--params_file", type=str, required=True, default='./static_graph_params.pdiparams', help="The path to parameters in static graph.")
parser.add_argument("--batch_size", type=int, default=2, help="The number of sequences contained in a mini-batch.")
parser.add_argument("--max_seq_len", type=int, default=64, help="Number of words of the longest seqence.")
parser.add_argument("--device", default="gpu", type=str, choices=["cpu", "gpu"] ,help="The device to select to train the model, is must be cpu/gpu.")
parser.add_argument("--pinyin_vocab_file_path", type=str, default="pinyin_vocab.txt", help="pinyin vocab file path")
args = parser.parse_args()
# yapf: enable
class Predictor(object):
def __init__(self, model_file, params_file, device, max_seq_length,
tokenizer, pinyin_vocab):
self.max_seq_length = max_seq_length
config = paddle.inference.Config(model_file, params_file)
if device == "gpu":
# set GPU configs accordingly
config.enable_use_gpu(100, 0)
elif device == "cpu":
# set CPU configs accordingly,
# such as enable_mkldnn, set_cpu_math_library_num_threads
config.disable_gpu()
config.switch_use_feed_fetch_ops(False)
self.predictor = paddle.inference.create_predictor(config)
self.input_handles = [
self.predictor.get_input_handle(name)
for name in self.predictor.get_input_names()
]
self.det_error_probs_handle = self.predictor.get_output_handle(
self.predictor.get_output_names()[0])
self.corr_logits_handle = self.predictor.get_output_handle(
self.predictor.get_output_names()[1])
self.tokenizer = tokenizer
self.pinyin_vocab = pinyin_vocab
def predict(self, data, batch_size=1):
"""
Predicts the data labels.
Args:
data (obj:`List(Example)`): The processed data whose each element is a Example (numedtuple) object.
A Example object contains `text`(word_ids) and `seq_len`(sequence length).
batch_size(obj:`int`, defaults to 1): The number of batch.
Returns:
results(obj:`dict`): All the predictions labels.
"""
examples = []
texts = []
trans_func = partial(
convert_example,
tokenizer=self.tokenizer,
pinyin_vocab=self.pinyin_vocab,
max_seq_length=self.max_seq_length,
is_test=True)
batchify_fn = lambda samples, fn=Tuple(
Pad(axis=0, pad_val=self.tokenizer.pad_token_id, dtype='int64'), # input
Pad(axis=0, pad_val=self.tokenizer.pad_token_type_id, dtype='int64'), # segment
Pad(axis=0, pad_val=self.pinyin_vocab.token_to_idx[self.pinyin_vocab.pad_token], dtype='int64'), # pinyin
Stack(axis=0, dtype='int64'), # length
): [data for data in fn(samples)]
for text in data:
example = {"source": text.strip()}
input_ids, token_type_ids, pinyin_ids, length = trans_func(example)
examples.append((input_ids, token_type_ids, pinyin_ids, length))
texts.append(example["source"])
batch_examples = [
examples[idx:idx + batch_size]
for idx in range(0, len(examples), batch_size)
]
batch_texts = [
texts[idx:idx + batch_size]
for idx in range(0, len(examples), batch_size)
]
results = []
for examples, texts in zip(batch_examples, batch_texts):
token_ids, token_type_ids, pinyin_ids, length = batchify_fn(
examples)
self.input_handles[0].copy_from_cpu(token_ids)
self.input_handles[1].copy_from_cpu(pinyin_ids)
self.predictor.run()
det_error_probs = self.det_error_probs_handle.copy_to_cpu()
corr_logits = self.corr_logits_handle.copy_to_cpu()
det_pred = det_error_probs.argmax(axis=-1)
char_preds = corr_logits.argmax(axis=-1)
for i in range(len(length)):
pred_result = parse_decode(texts[i], char_preds[i], det_pred[i],
length[i], self.tokenizer,
self.max_seq_length)
results.append(''.join(pred_result))
return results
if __name__ == "__main__":
tokenizer = ErnieTokenizer.from_pretrained("ernie-1.0")
pinyin_vocab = Vocab.load_vocabulary(
args.pinyin_vocab_file_path, unk_token='[UNK]', pad_token='[PAD]')
predictor = Predictor(args.model_file, args.params_file, args.device,
args.max_seq_len, tokenizer, pinyin_vocab)
samples = [
'遇到逆竟时,我们必须勇于面对,而且要愈挫愈勇,这样我们才能朝著成功之路前进。',
'人生就是如此,经过磨练才能让自己更加拙壮,才能使自己更加乐观。',
]
results = predictor.predict(samples, batch_size=args.batch_size)
for source, target in zip(samples, results):
print("Source:", source)
print("Target:", target)
```
#### File: text_correction/ernie-csc/sighan_evaluate.py
```python
import argparse
parser = argparse.ArgumentParser()
parser.add_argument("--pred_file", "-p", required=True, type=str, help="")
parser.add_argument("--truth_file", "-t", required=True, type=str, help="")
args = parser.parse_args()
def main(args):
detect_tp, correct_tp, pos, neg, fp = 0, 0, 0, 0, 0
pred_dict = dict()
truth_dict = dict()
fpred = open(args.pred_file, 'r', encoding='utf-8')
ftruth = open(args.truth_file, 'r', encoding='utf-8')
for idx, (pred, truth) in enumerate(zip(fpred, ftruth)):
pred_tokens = pred.strip().split(" ")
truth_tokens = truth.strip().split(" ")
pred_id = pred_tokens[0]
truth_id = truth_tokens[0]
pred_tokens = pred_tokens[1:]
truth_tokens = truth_tokens[1:]
detect_truth_positions = [
int(truth_token.strip(","))
for i, truth_token in enumerate(truth_tokens) if i % 2 == 0
]
correct_truth_tokens = [
truth_token.strip(",") for i, truth_token in enumerate(truth_tokens)
if i % 2 == 1
]
detect_pred_positions = [
int(pred_token.strip(","))
for i, pred_token in enumerate(pred_tokens) if i % 2 == 0
]
correct_pred_tokens = [
pred_token.strip(",") for i, pred_token in enumerate(pred_tokens)
if i % 2 == 1
]
pred_dict[pred_id] = (detect_pred_positions, correct_pred_tokens)
truth_dict[truth_id] = (detect_truth_positions, correct_truth_tokens)
assert sorted(pred_dict.keys()) == sorted(truth_dict.keys(
)), "Prediction file should have all prediction result in truth file"
for pid, predition in pred_dict.items():
truth = truth_dict[pid]
if predition[0][0] != 0:
pos += 1
if sorted(zip(*predition)) == sorted(zip(*truth)):
correct_tp += 1
if truth[0][0] == 0:
fp += 1
if truth[0][0] != 0:
if sorted(predition[0]) == sorted(truth[0]):
detect_tp += 1
neg += 1
eps = 1e-9
# Detection level
detect_pos = detect_tp + fp
if detect_pos > 0 and neg > 0:
detect_precision = detect_tp * 1.0 / detect_pos
detect_recall = detect_tp * 1.0 / neg
detect_f1 = 2. * detect_precision * detect_recall / (
detect_precision + detect_recall + eps)
else:
detect_precision = 0
detect_recall = 0
detect_f1 = 0
# Correction level
correct_pos = correct_tp + fp
if correct_pos > 0 and neg > 0:
correct_precision = correct_tp * 1.0 / correct_pos
correct_recall = correct_tp * 1.0 / neg
correct_f1 = 2. * correct_precision * correct_recall / (
correct_precision + correct_recall + eps)
else:
correct_precision = 0
correct_recall = 0
correct_f1 = 0
print("==========================================================")
print("Overall Performance")
print("==========================================================")
print("\nDetection Level")
print("\tPrecision = {:.4f} ({}/{})".format(detect_precision, detect_tp,
detect_pos))
print("\tRecall = {:.4f} ({}/{})".format(detect_recall, detect_tp, neg))
print("\tF1-Score = {:.4f} ((2*{:.4f}*{:.4f})/({:.4f}+{:.4f}))".format(
detect_f1, detect_precision, detect_recall, detect_precision,
detect_recall))
print("\nCorrection Level")
print("\tPrecision = {:.4f} ({}/{})".format(correct_precision, correct_tp,
correct_pos))
print("\tRecall = {:.4f} ({}/{})".format(correct_recall, correct_tp, neg))
print("\tF1-Score = {:.4f} ((2*{:.4f}*{:.4f})/({:.4f}+{:.4f}))".format(
correct_f1, correct_precision, correct_recall, correct_precision,
correct_recall))
print("==========================================================\n")
if __name__ == "__main__":
main(args)
```
#### File: text_generation/unimo-text/run_gen.py
```python
import os
import time
import math
import argparse
import json
import paddle
import paddle.distributed as dist
import paddle.nn as nn
import paddle.nn.functional as F
from paddlenlp.transformers import LinearDecayWithWarmup
from paddle.optimizer import AdamW
from paddlenlp.datasets import load_dataset
from paddlenlp.transformers import UNIMOLMHeadModel, UNIMOTokenizer, BasicTokenizer
from paddlenlp.metrics import BLEU
from gen_utils import print_args, set_seed, create_data_loader, select_sum
# yapf: disable
def parse_args():
parser = argparse.ArgumentParser(__doc__)
parser.add_argument('--dataset_name', type=str, default='dureader_qg', help='The name of the dataset to load.')
parser.add_argument('--model_name_or_path', type=str, default='unimo-text-1.0', help='The path or shortcut name of the pre-trained model.')
parser.add_argument("--train_file", type=str, required=False, default=None, help="Train data path.")
parser.add_argument("--predict_file", type=str, required=False, default=None, help="Predict data path.")
parser.add_argument('--save_dir', type=str, default='./checkpoints', help='The directory where the checkpoints will be saved.')
parser.add_argument('--logging_steps', type=int, default=100, help='Log every X updates steps.')
parser.add_argument('--save_steps', type=int, default=1000, help='Save checkpoint every X updates steps.')
parser.add_argument('--seed', type=int, default=1, help='Random seed for initialization.')
parser.add_argument('--batch_size', type=int, default=16, help='Batch size per GPU/CPU for training.')
parser.add_argument('--learning_rate', type=float, default=5e-5, help='The initial learning rate.')
parser.add_argument('--weight_decay', type=float, default=0.01, help='The weight decay for optimizer.')
parser.add_argument('--epochs', type=int, default=3, help='Total number of training epochs to perform.')
parser.add_argument('--warmup_propotion', type=float, default=0.02, help='The number of warmup steps.')
parser.add_argument('--max_grad_norm', type=float, default=1.0, help='The max value of grad norm.')
parser.add_argument('--beta1', type=float, default=0.9, help='beta1')
parser.add_argument('--beta2', type=float, default=0.98, help='beta2')
parser.add_argument('--epsilon', type=float, default=1e-6, help='epsilon')
parser.add_argument('--max_seq_len', type=int, default=512, help='The maximum sequence length of training.')
parser.add_argument('--max_dec_len', type=int, default=20, help='The maximum sequence length of decoding.')
parser.add_argument('--min_dec_len', type=int, default=3, help='The minimal sequence length of decoding.')
parser.add_argument('--max_target_len', type=int, default=30, help='The maximum target sequence length of training.')
parser.add_argument('--max_title_len', type=int, default=30, help='The maximum title sequence length of training.')
parser.add_argument('--num_return_sequences', type=int, default=1, help='The numbers of returned sequences for one input in generation.')
parser.add_argument('--decode_strategy', type=str, default='beam_search', help='The decode strategy in generation.')
parser.add_argument('--top_k', type=int, default=0, help='The number of highest probability vocabulary tokens to keep for top-k sampling.')
parser.add_argument('--temperature', type=float, default=1.0, help='The value used to module the next token probabilities.')
parser.add_argument('--top_p', type=float, default=1.0, help='The cumulative probability for top-p sampling.')
parser.add_argument('--num_beams', type=int, default=6, help='The number of beams for beam search.')
parser.add_argument('--length_penalty', type=float, default=1.2, help='The exponential penalty to the sequence length for beam search.')
parser.add_argument('--device', type=str, default='gpu', help='The device to select for training the model.')
parser.add_argument('--output_path', type=str, default='./predict.txt', help='The file path where the infer result will be saved.')
parser.add_argument("--do_train", action='store_true', help="Whether to train the model.")
parser.add_argument("--do_predict", action='store_true', help="Whether to eval and predict.")
args = parser.parse_args()
return args
# yapf: enable
def calc_bleu(preds, targets):
assert len(preds) == len(targets), (
'The length of pred_responses should be equal to the length of '
'target_responses. But received {} and {}.'.format(
len(preds), len(targets)))
bleu4 = BLEU(n_size=4)
tokenizer = BasicTokenizer()
for pred, target in zip(preds, targets):
pred_tokens = tokenizer.tokenize(pred)
target_token = tokenizer.tokenize(target)
bleu4.add_inst(pred_tokens, [target_token])
print('\n' + '*' * 15)
print('The auto evaluation result is:')
print('BLEU-4:', bleu4.score())
def save_ckpt(model, tokenizer, save_dir, name):
output_dir = os.path.join(save_dir, "model_{}".format(name))
if not os.path.exists(output_dir):
os.makedirs(output_dir)
# Need better way to get inner model of DataParallel
model_to_save = model._layers if isinstance(model,
paddle.DataParallel) else model
model_to_save.save_pretrained(output_dir)
tokenizer.save_pretrained(output_dir)
def run(args):
paddle.set_device(args.device)
world_size = dist.get_world_size()
if world_size > 1:
dist.init_parallel_env()
set_seed(args.seed)
model = UNIMOLMHeadModel.from_pretrained(args.model_name_or_path)
tokenizer = UNIMOTokenizer.from_pretrained(args.model_name_or_path)
if world_size > 1:
model = paddle.DataParallel(model)
train_ds = load_dataset(
args.dataset_name, splits='train', data_files=args.train_file)
dev_ds = load_dataset(
args.dataset_name, splits='dev', data_files=args.predict_file)
train_ds, train_data_loader = create_data_loader(train_ds, tokenizer, args,
'train')
dev_ds, dev_data_loader = create_data_loader(dev_ds, tokenizer, args,
'test')
if args.do_train:
num_training_steps = args.epochs * len(train_data_loader)
lr_scheduler = LinearDecayWithWarmup(
args.learning_rate, num_training_steps, args.warmup_propotion)
# Generate parameter names needed to perform weight decay.
# All bias and LayerNorm parameters are excluded.
decay_params = [
p.name for n, p in model.named_parameters()
if not any(nd in n for nd in ["bias", "norm"])
]
optimizer = AdamW(
learning_rate=lr_scheduler,
parameters=model.parameters(),
weight_decay=args.weight_decay,
beta1=args.beta1,
beta2=args.beta2,
epsilon=args.epsilon,
apply_decay_param_fun=lambda x: x in decay_params,
grad_clip=paddle.nn.ClipGradByGlobalNorm(args.max_grad_norm))
step = 0
total_time = 0.0
for epoch in range(args.epochs):
print('\nEpoch %d/%d' % (epoch + 1, args.epochs))
batch_start_time = time.time()
for inputs in train_data_loader:
step += 1
labels = inputs[-1]
logits = model(*inputs[:-1])
labels = paddle.nn.functional.one_hot(
labels, num_classes=logits.shape[-1])
labels = paddle.nn.functional.label_smooth(labels)
loss = F.cross_entropy(logits, labels, soft_label=True)
loss.backward()
optimizer.step()
lr_scheduler.step()
optimizer.clear_grad()
total_time += (time.time() - batch_start_time)
if step % args.logging_steps == 0:
ppl = paddle.exp(loss)
print(
'step %d - loss: %.4f - ppl: %.4f - lr: %.7f - %.3fs/step'
% (step, loss, ppl, optimizer.get_lr(),
total_time / args.logging_steps))
total_time = 0.0
if step % args.save_steps == 0 or step >= num_training_steps:
if dist.get_rank() == 0:
save_ckpt(model, tokenizer, args.save_dir, step)
print('Saved step {} model.\n'.format(step))
if args.do_predict:
model_eval = model._layers if isinstance(
model, paddle.DataParallel) else model
evaluation(model_eval, dev_data_loader, args,
tokenizer)
batch_start_time = time.time()
print('\nTraining completed.')
elif args.do_predict:
model_eval = model._layers if isinstance(model,
paddle.DataParallel) else model
evaluation(model_eval, dev_data_loader, args, tokenizer)
@paddle.no_grad()
def evaluation(model, data_loader, args, tokenizer):
print('\nEval begin...')
model.eval()
pred_ref = []
total_time = 0.0
start_time = time.time()
for step, inputs in enumerate(data_loader, 1):
input_ids, token_type_ids, position_ids, attention_mask = inputs
ids, scores = model.generate(
input_ids=input_ids,
token_type_ids=token_type_ids,
position_ids=position_ids,
attention_mask=attention_mask,
max_length=args.max_dec_len,
min_length=args.min_dec_len,
decode_strategy=args.decode_strategy,
temperature=args.temperature,
top_k=args.top_k,
top_p=args.top_p,
num_beams=args.num_beams,
length_penalty=args.length_penalty,
num_return_sequences=args.num_return_sequences,
bos_token_id=tokenizer.cls_token_id,
eos_token_id=tokenizer.mask_token_id)
total_time += (time.time() - start_time)
if step % args.logging_steps == 0:
print('step %d - %.3fs/step' %
(step, total_time / args.logging_steps))
total_time = 0.0
results = select_sum(ids, scores, tokenizer, args.max_dec_len,
args.num_return_sequences)
pred_ref.extend(results)
start_time = time.time()
with open(args.output_path, 'w', encoding='utf-8') as fout:
for ref in pred_ref:
fout.write(ref + '\n')
print('\nSave inference result into: %s' % args.output_path)
if 'target' in data_loader.dataset[0].keys():
targets = [example['target'] for example in data_loader.dataset]
calc_bleu(pred_ref, targets)
model.train()
return
if __name__ == '__main__':
args = parse_args()
print_args(args)
run(args)
```
#### File: erniesage/models/model.py
```python
import pgl
import paddle
import paddle.nn as nn
import numpy as np
from paddlenlp.transformers import ErniePretrainedModel
from models.encoder import Encoder
from models.loss import LossFactory
__all__ = ["ErnieSageForLinkPrediction"]
class ErnieSageForLinkPrediction(ErniePretrainedModel):
"""ErnieSage for link prediction task.
"""
def __init__(self, ernie, config):
""" Model which Based on the PaddleNLP PretrainedModel
Note:
1. the ernie must be the first argument.
2. must set self.XX = ernie to load weights.
3. the self.config keyword is taken by PretrainedModel class.
Args:
ernie (nn.Layer): the submodule layer of ernie model.
config (Dict): the config file
"""
super(ErnieSageForLinkPrediction, self).__init__()
self.config_file = config
self.ernie = ernie
self.encoder = Encoder.factory(self.config_file, self.ernie)
self.loss_func = LossFactory(self.config_file)
def forward(self, graphs, datas):
"""Forward function of link prediction task.
Args:
graphs (Graph List): the Graph list.
datas (Tensor List): other input of the model.
Returns:
Tensor: loss and output tensors.
"""
term_ids, user_index, pos_item_index, neg_item_index, user_real_index, pos_item_real_index = datas
# encoder model
outputs = self.encoder(graphs, term_ids,
[user_index, pos_item_index, neg_item_index])
user_feat, pos_item_feat, neg_item_feat = outputs
# calc loss
if self.config_file.neg_type == "batch_neg":
neg_item_feat = pos_item_feat
pos = paddle.sum(user_feat * pos_item_feat, -1, keepdim=True) # [B, 1]
neg = paddle.matmul(
user_feat, neg_item_feat, transpose_y=True) # [B, B]
loss = self.loss_func(pos, neg)
# return loss, outputs
return loss, outputs + [user_real_index, pos_item_real_index]
```
#### File: text_matching/sentence_transformers/model.py
```python
import paddle
import paddle.nn as nn
import paddle.nn.functional as F
class SentenceTransformer(nn.Layer):
def __init__(self, pretrained_model, dropout=None):
super().__init__()
self.ptm = pretrained_model
self.dropout = nn.Dropout(dropout if dropout is not None else 0.1)
# num_labels = 2 (similar or dissimilar)
self.classifier = nn.Linear(self.ptm.config["hidden_size"] * 3, 2)
def forward(self,
query_input_ids,
title_input_ids,
query_token_type_ids=None,
query_position_ids=None,
query_attention_mask=None,
title_token_type_ids=None,
title_position_ids=None,
title_attention_mask=None):
query_token_embedding, _ = self.ptm(
query_input_ids, query_token_type_ids, query_position_ids,
query_attention_mask)
query_token_embedding = self.dropout(query_token_embedding)
query_attention_mask = paddle.unsqueeze(
(query_input_ids != self.ptm.pad_token_id
).astype(self.ptm.pooler.dense.weight.dtype),
axis=2)
# Set token embeddings to 0 for padding tokens
query_token_embedding = query_token_embedding * query_attention_mask
query_sum_embedding = paddle.sum(query_token_embedding, axis=1)
query_sum_mask = paddle.sum(query_attention_mask, axis=1)
query_mean = query_sum_embedding / query_sum_mask
title_token_embedding, _ = self.ptm(
title_input_ids, title_token_type_ids, title_position_ids,
title_attention_mask)
title_token_embedding = self.dropout(title_token_embedding)
title_attention_mask = paddle.unsqueeze(
(title_input_ids != self.ptm.pad_token_id
).astype(self.ptm.pooler.dense.weight.dtype),
axis=2)
# Set token embeddings to 0 for padding tokens
title_token_embedding = title_token_embedding * title_attention_mask
title_sum_embedding = paddle.sum(title_token_embedding, axis=1)
title_sum_mask = paddle.sum(title_attention_mask, axis=1)
title_mean = title_sum_embedding / title_sum_mask
sub = paddle.abs(paddle.subtract(query_mean, title_mean))
projection = paddle.concat([query_mean, title_mean, sub], axis=-1)
logits = self.classifier(projection)
return logits
```
#### File: text_matching/simnet/model.py
```python
import paddle
import paddle.nn as nn
import paddle.nn.functional as F
import paddlenlp as nlp
class SimNet(nn.Layer):
def __init__(self,
network,
vocab_size,
num_classes,
emb_dim=128,
pad_token_id=0):
super().__init__()
network = network.lower()
if network == 'bow':
self.model = BoWModel(
vocab_size, num_classes, emb_dim, padding_idx=pad_token_id)
elif network == 'cnn':
self.model = CNNModel(
vocab_size, num_classes, emb_dim, padding_idx=pad_token_id)
elif network == 'gru':
self.model = GRUModel(
vocab_size,
num_classes,
emb_dim,
direction='forward',
padding_idx=pad_token_id)
elif network == 'lstm':
self.model = LSTMModel(
vocab_size,
num_classes,
emb_dim,
direction='forward',
padding_idx=pad_token_id)
else:
raise ValueError(
"Unknown network: %s, it must be one of bow, cnn, lstm or gru."
% network)
def forward(self, query, title, query_seq_len=None, title_seq_len=None):
logits = self.model(query, title, query_seq_len, title_seq_len)
return logits
class BoWModel(nn.Layer):
"""
This class implements the Bag of Words Classification Network model to classify texts.
At a high level, the model starts by embedding the tokens and running them through
a word embedding. Then, we encode these epresentations with a `BoWEncoder`.
Lastly, we take the output of the encoder to create a final representation,
which is passed through some feed-forward layers to output a logits (`output_layer`).
Args:
vocab_size (obj:`int`): The vocabulary size.
emb_dim (obj:`int`, optional, defaults to 128): The embedding dimension.
padding_idx (obj:`int`, optinal, defaults to 0) : The pad token index.
hidden_size (obj:`int`, optional, defaults to 128): The first full-connected layer hidden size.
fc_hidden_size (obj:`int`, optional, defaults to 96): The second full-connected layer hidden size.
num_classes (obj:`int`): All the labels that the data has.
"""
def __init__(self,
vocab_size,
num_classes,
emb_dim=128,
padding_idx=0,
fc_hidden_size=128):
super().__init__()
self.embedder = nn.Embedding(
vocab_size, emb_dim, padding_idx=padding_idx)
self.bow_encoder = nlp.seq2vec.BoWEncoder(emb_dim)
self.fc = nn.Linear(self.bow_encoder.get_output_dim() * 2,
fc_hidden_size)
self.output_layer = nn.Linear(fc_hidden_size, num_classes)
def forward(self, query, title, query_seq_len=None, title_seq_len=None):
# Shape: (batch_size, num_tokens, embedding_dim)
embedded_query = self.embedder(query)
embedded_title = self.embedder(title)
# Shape: (batch_size, embedding_dim)
summed_query = self.bow_encoder(embedded_query)
summed_title = self.bow_encoder(embedded_title)
encoded_query = paddle.tanh(summed_query)
encoded_title = paddle.tanh(summed_title)
# Shape: (batch_size, embedding_dim*2)
contacted = paddle.concat([encoded_query, encoded_title], axis=-1)
# Shape: (batch_size, fc_hidden_size)
fc_out = paddle.tanh(self.fc(contacted))
# Shape: (batch_size, num_classes)
logits = self.output_layer(fc_out)
# probs = F.softmax(logits, axis=-1)
return logits
class LSTMModel(nn.Layer):
def __init__(self,
vocab_size,
num_classes,
emb_dim=128,
padding_idx=0,
lstm_hidden_size=128,
direction='forward',
lstm_layers=1,
dropout_rate=0.0,
pooling_type=None,
fc_hidden_size=128):
super().__init__()
self.embedder = nn.Embedding(
num_embeddings=vocab_size,
embedding_dim=emb_dim,
padding_idx=padding_idx)
self.lstm_encoder = nlp.seq2vec.LSTMEncoder(
emb_dim,
lstm_hidden_size,
num_layers=lstm_layers,
direction=direction,
dropout=dropout_rate)
self.fc = nn.Linear(self.lstm_encoder.get_output_dim() * 2,
fc_hidden_size)
self.output_layer = nn.Linear(fc_hidden_size, num_classes)
def forward(self, query, title, query_seq_len, title_seq_len):
assert query_seq_len is not None and title_seq_len is not None
# Shape: (batch_size, num_tokens, embedding_dim)
embedded_query = self.embedder(query)
embedded_title = self.embedder(title)
# Shape: (batch_size, lstm_hidden_size)
query_repr = self.lstm_encoder(
embedded_query, sequence_length=query_seq_len)
title_repr = self.lstm_encoder(
embedded_title, sequence_length=title_seq_len)
# Shape: (batch_size, 2*lstm_hidden_size)
contacted = paddle.concat([query_repr, title_repr], axis=-1)
# Shape: (batch_size, fc_hidden_size)
fc_out = paddle.tanh(self.fc(contacted))
# Shape: (batch_size, num_classes)
logits = self.output_layer(fc_out)
# probs = F.softmax(logits, axis=-1)
return logits
class GRUModel(nn.Layer):
def __init__(self,
vocab_size,
num_classes,
emb_dim=128,
padding_idx=0,
gru_hidden_size=128,
direction='forward',
gru_layers=1,
dropout_rate=0.0,
pooling_type=None,
fc_hidden_size=96):
super().__init__()
self.embedder = nn.Embedding(
num_embeddings=vocab_size,
embedding_dim=emb_dim,
padding_idx=padding_idx)
self.gru_encoder = nlp.seq2vec.GRUEncoder(
emb_dim,
gru_hidden_size,
num_layers=gru_layers,
direction=direction,
dropout=dropout_rate)
self.fc = nn.Linear(self.gru_encoder.get_output_dim() * 2,
fc_hidden_size)
self.output_layer = nn.Linear(fc_hidden_size, num_classes)
def forward(self, query, title, query_seq_len, title_seq_len):
# Shape: (batch_size, num_tokens, embedding_dim)
embedded_query = self.embedder(query)
embedded_title = self.embedder(title)
# Shape: (batch_size, gru_hidden_size)
query_repr = self.gru_encoder(
embedded_query, sequence_length=query_seq_len)
title_repr = self.gru_encoder(
embedded_title, sequence_length=title_seq_len)
# Shape: (batch_size, 2*gru_hidden_size)
contacted = paddle.concat([query_repr, title_repr], axis=-1)
# Shape: (batch_size, fc_hidden_size)
fc_out = paddle.tanh(self.fc(contacted))
# Shape: (batch_size, num_classes)
logits = self.output_layer(fc_out)
# probs = F.softmax(logits, axis=-1)
return logits
class CNNModel(nn.Layer):
"""
This class implements the
Convolution Neural Network model.
At a high level, the model starts by embedding the tokens and running them through
a word embedding. Then, we encode these epresentations with a `CNNEncoder`.
The CNN has one convolution layer for each ngram filter size. Each convolution operation gives
out a vector of size num_filter. The number of times a convolution layer will be used
is `num_tokens - ngram_size + 1`. The corresponding maxpooling layer aggregates all these
outputs from the convolution layer and outputs the max.
Lastly, we take the output of the encoder to create a final representation,
which is passed through some feed-forward layers to output a logits (`output_layer`).
Args:
vocab_size (obj:`int`): The vocabulary size.
emb_dim (obj:`int`, optional, defaults to 128): The embedding dimension.
padding_idx (obj:`int`, optinal, defaults to 0) : The pad token index.
num_classes (obj:`int`): All the labels that the data has.
"""
def __init__(self,
vocab_size,
num_classes,
emb_dim=128,
padding_idx=0,
num_filter=256,
ngram_filter_sizes=(3, ),
fc_hidden_size=128):
super().__init__()
self.padding_idx = padding_idx
self.embedder = nn.Embedding(
vocab_size, emb_dim, padding_idx=padding_idx)
self.encoder = nlp.seq2vec.CNNEncoder(
emb_dim=emb_dim,
num_filter=num_filter,
ngram_filter_sizes=ngram_filter_sizes)
self.fc = nn.Linear(self.encoder.get_output_dim() * 2, fc_hidden_size)
self.output_layer = nn.Linear(fc_hidden_size, num_classes)
def forward(self, query, title, query_seq_len=None, title_seq_len=None):
# Shape: (batch_size, num_tokens, embedding_dim)
embedded_query = self.embedder(query)
embedded_title = self.embedder(title)
# Shape: (batch_size, num_filter)
query_repr = self.encoder(embedded_query)
title_repr = self.encoder(embedded_title)
# Shape: (batch_size, 2*num_filter)
contacted = paddle.concat([query_repr, title_repr], axis=-1)
# Shape: (batch_size, fc_hidden_size)
fc_out = paddle.tanh(self.fc(contacted))
# Shape: (batch_size, num_classes)
logits = self.output_layer(fc_out)
# probs = F.softmax(logits, axis=-1)
return logits
```
#### File: text_to_knowledge/ernie-ctm/metric.py
```python
from typing import List, Tuple
import paddle
class SequenceAccuracy(paddle.metric.Metric):
"""
Masked language model pre-train task accuracy.
"""
def __init__(self):
super(SequenceAccuracy, self).__init__()
self.correct_k = 0
self.total = 0
def compute(self, pred, label, ignore_index):
pred = paddle.argmax(pred, 1)
active_acc = label.reshape([-1]) != ignore_index
active_pred = pred.masked_select(active_acc)
active_labels = label.masked_select(active_acc)
correct = active_pred.equal(active_labels)
return correct
def update(self, correct):
self.correct_k += correct.cast('float32').sum(0)
self.total += correct.shape[0]
def reset(self):
self.correct_k = 0
self.total = 0
def accumulate(self):
return float(self.correct_k) / self.total
def name(self):
return "Masked Language Model Accuracy"
def wordseg_hard_acc(list_a: List[Tuple[str, str]],
list_b: List[Tuple[str, str]]) -> float:
"""
Calculate extra metrics of word-seg
Args:
list_a: prediction list
list_b: real list
Returns:
acc: the extra accuracy
"""
p, q = 0, 0
a_l, b_l = 0, 0
acc = 0.0
while q < len(list_b) and p < len(list_a):
a_r = a_l + len(list_a[p][0]) - 1
b_r = b_l + len(list_b[q][0]) - 1
if a_r < b_l:
p += 1
a_l = a_r + 1
continue
if b_r < a_l:
q += 1
b_l = b_r + 1
continue
if a_l == b_l and a_r == b_r:
acc += 1.0
p += 1
q += 1
a_l = a_r + 1
b_l = b_r + 1
continue
p += 1
return acc
def wordtag_hard_acc(list_a: List[Tuple[str, str]],
list_b: List[Tuple[str, str]]) -> float:
"""
Calculate extra metrics of word-tag
Args:
list_a: prediction list
list_b: real list
Returns:
acc: the extra accuracy
"""
p, q = 0, 0
a_l, b_l = 0, 0
acc = 0.0
while q < len(list_b) and p < len(list_a):
a_r = a_l + len(list_a[p][0]) - 1
b_r = b_l + len(list_b[q][0]) - 1
if a_r < b_l:
p += 1
a_l = a_r + 1
continue
if b_r < a_l:
q += 1
b_l = b_r + 1
continue
if a_l == b_l and a_r == b_r:
if list_a[p][-1] == list_b[q][-1]:
acc += 1.0
p += 1
q += 1
a_l, b_l = a_r + 1, b_r + 1
continue
p += 1
return acc
def wordtag_soft_acc(list_a: List[Tuple[str, str]],
list_b: List[Tuple[str, str]]) -> float:
"""
Calculate extra metrics of word-tag
Args:
list_a: prediction list
list_b: real list
Returns:
acc: the extra accuracy
"""
p, q = 0, 0
a_l, b_l = 0, 0
acc = 0.0
while q < len(list_b) and p < len(list_a):
a_r = a_l + len(list_a[p][0]) - 1
b_r = b_l + len(list_b[q][0]) - 1
if a_r < b_l:
p += 1
a_l = a_r + 1
continue
if b_r < a_l:
q += 1
b_l = b_r + 1
continue
if a_l == b_l and a_r == b_r:
if list_a[p][-1] == list_b[q][-1]:
acc += 1.0
elif list_b[q][-1].startswith(list_a[p][-1]):
acc += 1.0
elif list_b[q] == "词汇用语":
acc += 1.0
p += 1
q += 1
a_l, b_l = a_r + 1, b_r + 1
continue
p += 1
return acc
def wordseg_soft_acc(list_a: List[Tuple[str, str]],
list_b: List[Tuple[str, str]]) -> float:
"""
Calculate extra metrics of word-seg
Args:
list_a: prediction list
list_b: real list
Returns:
acc: the extra accuracy
"""
i, j = 0, 0
acc = 0.0
a_l, b_l = 0, 0
while i < len(list_a) and j < len(list_b):
a_r = a_l + len(list_a[i][0]) - 1
b_r = b_l + len(list_b[j][0]) - 1
if a_r < b_l:
i += 1
a_l = a_r + 1
continue
if b_r < a_l:
j += 1
b_l = b_r + 1
continue
if a_l == b_l and a_r == b_r:
acc += 1.0
a_l, b_l = a_r + 1, b_r + 1
i, j = i + 1, j + 1
continue
if a_l == b_l and a_r < b_r:
cnt = 0.0
tmp_a_r = a_r
for k in range(i + 1, len(list_a)):
tmp_a_r += len(list_a[k])
cnt += 1.0
if tmp_a_r == b_r:
acc += cnt
i, j = k + 1, j + 1
a_l, b_l = tmp_a_r + 1, b_r + 1
break
i += 1
continue
if a_l == b_l and a_r > b_r:
tmp_b_r = b_r
for k in range(j + 1, len(list_b)):
tmp_b_r += len(list_b[k])
if tmp_b_r == a_r:
acc += 1.0
i, j = i + 1, k + 1
a_l, b_l = a_r + 1, tmp_b_r + 1
break
j += 1
continue
i += 1
return acc
```
#### File: IGSQL/data_util/atis_vocab.py
```python
from . import snippets
from .vocabulary import Vocabulary, UNK_TOK, DEL_TOK, EOS_TOK
INPUT_FN_TYPES = [UNK_TOK, DEL_TOK, EOS_TOK]
OUTPUT_FN_TYPES = [UNK_TOK, EOS_TOK]
MIN_INPUT_OCCUR = 1
MIN_OUTPUT_OCCUR = 1
class ATISVocabulary():
""" Stores the vocabulary for the ATIS data.
Attributes:
raw_vocab (`Vocabulary`): Vocabulary object.
tokens (`set`): Set of all of the strings in the vocabulary.
inorder_tokens (`list`): List of all tokens, with a strict and
unchanging order.
"""
def __init__(self,
token_sequences,
filename,
params,
is_input='input',
min_occur=1,
anonymizer=None,
skip=None):
if is_input == 'input':
functional_types = INPUT_FN_TYPES
elif is_input == 'output':
functional_types = OUTPUT_FN_TYPES
elif is_input == 'schema':
functional_types = [UNK_TOK]
else:
functional_types = []
self.raw_vocab = Vocabulary(
token_sequences,
filename,
functional_types=functional_types,
min_occur=min_occur,
ignore_fn=lambda x: snippets.is_snippet(x) or (anonymizer and anonymizer.is_anon_tok(x)) or (skip and x in skip)
)
self.tokens = set(self.raw_vocab.token_to_id.keys())
self.inorder_tokens = self.raw_vocab.id_to_token
assert len(self.inorder_tokens) == len(self.raw_vocab)
def __len__(self):
return len(self.raw_vocab)
def token_to_id(self, token):
""" Maps from a token to a unique ID.
Args:
token (`str`): The token to look up.
Returns:
`int`: Uniquely identifying the token.
"""
return self.raw_vocab.token_to_id[token]
def id_to_token(self, identifier):
""" Maps from a unique integer to an identifier.
Args:
identifier (`int`): The unique ID.
Returns:
`str`: Representing the token.
"""
return self.raw_vocab.id_to_token[identifier]
```
#### File: IGSQL/model/decoder.py
```python
from collections import namedtuple
import numpy as np
import paddle
import paddle.nn.functional as F
from . import model_utils
from .token_predictor import PredictionInput, PredictionInputWithSchema
import data_util.snippets as snippet_handler
from . import embedder
from data_util.vocabulary import EOS_TOK, UNK_TOK
np.random.seed(0)
def flatten_distribution(distribution_map, probabilities):
""" Flattens a probability distribution given a map of "unique" values.
All values in distribution_map with the same value should get the sum
of the probabilities.
Arguments:
distribution_map (`list`): List of values to get the probability for.
probabilities (`np.ndarray`): Probabilities corresponding to the values in
distribution_map.
Returns:
`list`: `np.ndarray` of the same size where probabilities for duplicates
in distribution_map are given the sum of the probabilities in probabilities.
"""
assert len(distribution_map) == len(probabilities)
if len(distribution_map) != len(set(distribution_map)):
idx_first_dup = 0
seen_set = set()
for i, tok in enumerate(distribution_map):
if tok in seen_set:
idx_first_dup = i
break
seen_set.add(tok)
new_dist_map = distribution_map[:idx_first_dup] + list(
set(distribution_map) - set(distribution_map[:idx_first_dup]))
assert len(new_dist_map) == len(set(new_dist_map))
new_probs = np.array(
probabilities[:idx_first_dup] \
+ [0. for _ in range(len(set(distribution_map)) \
- idx_first_dup)])
assert len(new_probs) == len(new_dist_map)
for i, token_name in enumerate(distribution_map[idx_first_dup:]):
if token_name not in new_dist_map:
new_dist_map.append(token_name)
new_index = new_dist_map.index(token_name)
new_probs[new_index] += probabilities[i + idx_first_dup]
new_probs = new_probs.tolist()
else:
new_dist_map = distribution_map
new_probs = probabilities
assert len(new_dist_map) == len(new_probs)
return new_dist_map, new_probs
class SQLPrediction(
namedtuple('SQLPrediction', ('predictions', 'sequence',
'probability'))):
"""Contains prediction for a sequence."""
__slots__ = ()
def __str__(self):
return str(self.probability) + "\t" + " ".join(self.sequence)
class SequencePredictorWithSchema(paddle.nn.Layer):
""" Predicts a sequence.
Attributes:
lstms (list of dy.RNNBuilder): The RNN used.
token_predictor (TokenPredictor): Used to actually predict tokens.
"""
def __init__(self, params, input_size, output_embedder,
column_name_token_embedder, token_predictor):
super().__init__()
self.lstmCell = paddle.nn.LSTMCell(input_size,
params.decoder_state_size)
self.token_predictor = token_predictor
self.output_embedder = output_embedder
self.column_name_token_embedder = column_name_token_embedder
start_token_embedding = self.create_parameter(
[params.output_embedding_size],
dtype='float32',
default_initializer=paddle.nn.initializer.Uniform(
low=-0.1, high=0.1))
self.add_parameter("start_token_embedding", start_token_embedding)
self.input_size = input_size
self.params = params
def _initialize_decoder_lstm(self, encoder_state):
decoder_lstm_states = []
# check which one is h_0, which is c_0
c_0 = encoder_state[1].reshape([1, -1])
h_0 = encoder_state[0].reshape([1, -1])
decoder_lstm_states.append((h_0, c_0))
return decoder_lstm_states
def get_output_token_embedding(self, output_token, input_schema, snippets):
if self.params.use_snippets and snippet_handler.is_snippet(
output_token):
output_token_embedding = embedder.bow_snippets(
output_token, snippets, self.output_embedder, input_schema)
else:
if input_schema:
assert self.output_embedder.in_vocabulary(
output_token) or input_schema.in_vocabulary(
output_token, surface_form=True)
# 经过
if self.output_embedder.in_vocabulary(output_token):
output_token_embedding = self.output_embedder(output_token)
else:
output_token_embedding = input_schema.column_name_embedder(
output_token, surface_form=True)
else:
output_token_embedding = self.output_embedder(output_token)
return output_token_embedding
def get_decoder_input(self, output_token_embedding, prediction):
if self.params.use_schema_attention and self.params.use_query_attention:
decoder_input = paddle.concat(
[
output_token_embedding,
prediction.utterance_attention_results.vector,
prediction.schema_attention_results.vector,
prediction.query_attention_results.vector
],
axis=0)
elif self.params.use_schema_attention:
decoder_input = paddle.concat(
[
output_token_embedding,
prediction.utterance_attention_results.vector,
prediction.schema_attention_results.vector
],
axis=0)
else:
decoder_input = paddle.concat(
[
output_token_embedding,
prediction.utterance_attention_results.vector
],
axis=0)
return decoder_input
def forward(self,
final_encoder_state,
encoder_states,
schema_states,
max_generation_length,
snippets=None,
gold_sequence=None,
input_sequence=None,
previous_queries=None,
previous_query_states=None,
input_schema=None,
dropout_amount=0.):
""" Generates a sequence. """
index = 0
context_vector_size = self.input_size - self.params.output_embedding_size
# Decoder states: just the initialized decoder.
# Current input to decoder: phi(start_token) ; zeros the size of the
# context vector
predictions = []
sequence = []
probability = 1.
decoder_states = self._initialize_decoder_lstm(final_encoder_state)[0]
decoder_input = paddle.concat(
[self.start_token_embedding, paddle.zeros([context_vector_size])],
axis=0)
continue_generating = True
while continue_generating:
if len(sequence) == 0 or sequence[-1] != EOS_TOK:
decoder_state, decoder_states = self.lstmCell(
decoder_input.unsqueeze(0), decoder_states)
decoder_state = decoder_state.squeeze()
prediction_input = PredictionInputWithSchema(
decoder_state=decoder_state,
input_hidden_states=encoder_states,
schema_states=schema_states,
snippets=snippets,
input_sequence=input_sequence,
previous_queries=previous_queries,
previous_query_states=previous_query_states,
input_schema=input_schema)
prediction = self.token_predictor(
prediction_input, dropout_amount=dropout_amount)
predictions.append(prediction)
# 经过
if gold_sequence:
output_token = gold_sequence[index]
output_token_embedding = self.get_output_token_embedding(
output_token, input_schema, snippets)
decoder_input = self.get_decoder_input(
output_token_embedding, prediction)
sequence.append(gold_sequence[index])
if index >= len(gold_sequence) - 1:
continue_generating = False
else:
assert prediction.scores.dim() == 1
probabilities = F.softmax(
prediction.scores, axis=0).cpu().numpy().tolist()
distribution_map = prediction.aligned_tokens
assert len(probabilities) == len(distribution_map)
if self.params.use_previous_query and self.params.use_copy_switch and len(
previous_queries) > 0:
assert prediction.query_scores.dim() == 1
query_token_probabilities = F.softmax(
prediction.query_scores,
axis=0).cpu().data.numpy().tolist()
query_token_distribution_map = prediction.query_tokens
assert len(query_token_probabilities) == len(
query_token_distribution_map)
copy_switch = prediction.copy_switch.cpu().data.numpy()
# Merge the two
probabilities = ((np.array(probabilities) *
(1 - copy_switch)).tolist() +
(np.array(query_token_probabilities) *
copy_switch).tolist())
distribution_map = distribution_map + query_token_distribution_map
assert len(probabilities) == len(distribution_map)
# Get a new probabilities and distribution_map consolidating duplicates
distribution_map, probabilities = flatten_distribution(
distribution_map, probabilities)
# Modify the probability distribution so that the UNK token can never be produced
probabilities[distribution_map.index(UNK_TOK)] = 0.
argmax_index = int(np.argmax(probabilities))
argmax_token = distribution_map[argmax_index]
sequence.append(argmax_token)
output_token_embedding = self.get_output_token_embedding(
argmax_token, input_schema, snippets)
decoder_input = self.get_decoder_input(
output_token_embedding, prediction)
probability *= probabilities[argmax_index]
continue_generating = False
if index < max_generation_length and argmax_token != EOS_TOK:
continue_generating = True
index += 1
return SQLPrediction(predictions, sequence, probability)
```
#### File: IGSQL/model/embedder.py
```python
import paddle
import data_util.snippets as snippet_handler
import data_util.vocabulary as vocabulary_handler
class Embedder(paddle.nn.Layer):
""" Embeds tokens. """
def __init__(self,
embedding_size,
name="",
initializer=None,
vocabulary=None,
num_tokens=-1,
anonymizer=None,
freeze=False,
use_unk=True):
super().__init__()
if vocabulary:
assert num_tokens < 0, "Specified a vocabulary but also set number of tokens to " + \
str(num_tokens)
self.in_vocabulary = lambda token: token in vocabulary.tokens
self.vocab_token_lookup = lambda token: vocabulary.token_to_id(token)
if use_unk:
self.unknown_token_id = vocabulary.token_to_id(
vocabulary_handler.UNK_TOK)
else:
self.unknown_token_id = -1
self.vocabulary_size = len(vocabulary)
else:
def check_vocab(index):
""" Makes sure the index is in the vocabulary."""
assert index < num_tokens, "Passed token ID " + \
str(index) + "; expecting something less than " + str(num_tokens)
return index < num_tokens
self.in_vocabulary = check_vocab
self.vocab_token_lookup = lambda x: x
self.unknown_token_id = num_tokens # Deliberately throws an error here,
# But should crash before this
self.vocabulary_size = num_tokens
self.anonymizer = anonymizer
emb_name = name + "-tokens"
print("Creating token embedder called " + emb_name + " of size " + str(
self.vocabulary_size) + " x " + str(embedding_size))
if initializer is not None:
self.token_embedding_matrix = paddle.nn.Embedding(
initializer.shape[0], initializer.shape[1])
self.token_embedding_matrix.weight.set_value(initializer)
else:
initializer = paddle.nn.initializer.Uniform(low=-0.1, high=0.1)
init_tensor = paddle.ParamAttr(
initializer=initializer, trainable=True)
self.token_embedding_matrix = paddle.nn.Embedding(
self.vocabulary_size, embedding_size, weight_attr=initializer)
def forward(self, token):
assert isinstance(token, int) or not snippet_handler.is_snippet(
token
), "embedder should only be called on flat tokens; use snippet_bow if you are trying to encode snippets"
if self.in_vocabulary(token):
index_list = paddle.to_tensor(
self.vocab_token_lookup(token), 'int64')
return self.token_embedding_matrix(index_list).squeeze()
else:
index_list = paddle.to_tensor(self.unknown_token_id, 'int64')
return self.token_embedding_matrix(index_list).squeeze()
def bow_snippets(token, snippets, output_embedder, input_schema):
""" Bag of words embedding for snippets"""
assert snippet_handler.is_snippet(token) and snippets
snippet_sequence = []
for snippet in snippets:
if snippet.name == token:
snippet_sequence = snippet.sequence
break
assert snippet_sequence
if input_schema:
snippet_embeddings = []
for output_token in snippet_sequence:
assert output_embedder.in_vocabulary(
output_token) or input_schema.in_vocabulary(
output_token, surface_form=True)
if output_embedder.in_vocabulary(output_token):
snippet_embeddings.append(output_embedder(output_token))
else:
snippet_embeddings.append(
input_schema.column_name_embedder(
output_token, surface_form=True))
else:
snippet_embeddings = [
output_embedder(subtoken) for subtoken in snippet_sequence
]
snippet_embeddings = paddle.stack(
snippet_embeddings, axis=0) # len(snippet_sequence) x emb_size
return paddle.mean(snippet_embeddings, axis=0) # emb_size
```
#### File: IGSQL/model/token_predictor.py
```python
from collections import namedtuple
import paddle
import paddle.nn.functional as F
from . import model_utils
from .attention import Attention, AttentionResult
class PredictionInput(
namedtuple('PredictionInput', ('decoder_state', 'input_hidden_states',
'snippets', 'input_sequence'))):
""" Inputs to the token predictor. """
__slots__ = ()
class PredictionInputWithSchema(
namedtuple('PredictionInputWithSchema',
('decoder_state', 'input_hidden_states', 'schema_states',
'snippets', 'input_sequence', 'previous_queries',
'previous_query_states', 'input_schema'))):
""" Inputs to the token predictor. """
__slots__ = ()
class TokenPrediction(
namedtuple('TokenPrediction', (
'scores', 'aligned_tokens', 'utterance_attention_results',
'schema_attention_results', 'query_attention_results',
'copy_switch', 'query_scores', 'query_tokens', 'decoder_state'))):
"""A token prediction."""
__slots__ = ()
def score_schema_tokens(input_schema, schema_states, scorer):
# schema_states: emd_dim x num_tokens
scores = paddle.t(paddle.mm(paddle.t(scorer),
schema_states)) # num_tokens x 1
if scores.shape[0] != len(input_schema):
raise ValueError("Got " + str(scores.shape[0]) + " scores for " + str(
len(input_schema)) + " schema tokens")
return scores, input_schema.column_names_surface_form
def score_query_tokens(previous_query, previous_query_states, scorer):
scores = paddle.t(paddle.mm(paddle.t(scorer),
previous_query_states)) # num_tokens x 1
if scores.shape[0] != len(previous_query):
raise ValueError("Got " + str(scores.shape[0]) + " scores for " + str(
len(previous_query)) + " query tokens")
return scores, previous_query
class TokenPredictor(paddle.nn.Layer):
""" Predicts a token given a (decoder) state.
Attributes:
vocabulary (`Vocabulary`): A vocabulary object for the output.
attention_module (`Attention`): An attention module.
state_transformation_weights (`Parameter`): Transforms the input state
before predicting a token.
vocabulary_weights (`Parameter`): Final layer weights.
vocabulary_biases (`Parameter`): Final layer biases.
"""
def __init__(self, params, vocabulary, attention_key_size):
super().__init__()
self.params = params
self.vocabulary = vocabulary
self.attention_module = Attention(
params.decoder_state_size, attention_key_size, attention_key_size)
bias_initializer = paddle.nn.initializer.Uniform(low=-0.1, high=0.1)
_initializer = paddle.nn.initializer.XavierUniform()
state_transform_weights = paddle.ParamAttr(initializer=_initializer)
vocabulary_weights = paddle.ParamAttr(initializer=_initializer)
vocabulary_biases = paddle.ParamAttr(initializer=bias_initializer)
self.state_transform_Linear = paddle.nn.Linear(
in_features=params.decoder_state_size + attention_key_size,
out_features=params.decoder_state_size,
weight_attr=state_transform_weights,
bias_attr=False)
self.vocabulary_Linear = paddle.nn.Linear(
in_features=params.decoder_state_size,
out_features=len(vocabulary),
weight_attr=state_transform_weights,
bias_attr=vocabulary_biases)
def _get_intermediate_state(self, state, dropout_amount=0.):
intermediate_state = paddle.tanh(self.state_transform_Linear(state))
return F.dropout(intermediate_state, dropout_amount)
def _score_vocabulary_tokens(self, state):
scores = paddle.t(self.vocabulary_Linear(state))
if scores.shape[0] != len(self.vocabulary.inorder_tokens):
raise ValueError("Got " + str(scores.shape[
0]) + " scores for " + str(
len(self.vocabulary.inorder_tokens)) + " vocabulary items")
return scores, self.vocabulary.inorder_tokens
def forward(self, prediction_input, dropout_amount=0.):
decoder_state = prediction_input.decoder_state
input_hidden_states = prediction_input.input_hidden_states
attention_results = self.attention_module(decoder_state,
input_hidden_states)
state_and_attn = paddle.concat(
[decoder_state, attention_results.vector], axis=0)
intermediate_state = self._get_intermediate_state(
state_and_attn, dropout_amount=dropout_amount)
vocab_scores, vocab_tokens = self._score_vocabulary_tokens(
intermediate_state)
return TokenPrediction(vocab_scores, vocab_tokens, attention_results,
decoder_state)
class SchemaTokenPredictor(TokenPredictor):
""" Token predictor that also predicts snippets.
Attributes:
snippet_weights (`Parameter`): Weights for scoring snippets against some
state.
"""
def __init__(self, params, vocabulary, utterance_attention_key_size,
schema_attention_key_size, snippet_size):
TokenPredictor.__init__(self, params, vocabulary,
utterance_attention_key_size)
_initializer = paddle.nn.initializer.XavierUniform()
if params.use_schema_attention:
self.utterance_attention_module = self.attention_module
self.schema_attention_module = Attention(params.decoder_state_size,
schema_attention_key_size,
schema_attention_key_size)
if self.params.use_query_attention:
self.query_attention_module = Attention(params.decoder_state_size,
params.encoder_state_size,
params.encoder_state_size)
self.start_query_attention_vector = self.create_parameter(
[params.encoder_state_size],
dtype='float32',
default_initializer=paddle.nn.initializer.Uniform(
low=-0.1, high=0.1))
state_transform_weights = paddle.ParamAttr(initializer=_initializer)
if params.use_schema_attention and self.params.use_query_attention:
self.state_transform_Linear = paddle.nn.Linear(
in_features=params.decoder_state_size +
utterance_attention_key_size + schema_attention_key_size +
params.encoder_state_size,
out_features=params.decoder_state_size,
weight_attr=state_transform_weights,
bias_attr=False)
elif params.use_schema_attention:
self.state_transform_Linear = paddle.nn.Linear(
in_features=params.decoder_state_size +
utterance_attention_key_size + schema_attention_key_size,
out_features=params.decoder_state_size,
weight_attr=state_transform_weights,
bias_attr=False)
schema_token_weights = paddle.ParamAttr(initializer=_initializer)
self.schema_token_Linear = paddle.nn.Linear(
in_features=params.decoder_state_size,
out_features=schema_attention_key_size,
weight_attr=schema_token_weights,
bias_attr=False)
if self.params.use_previous_query:
query_token_weights = paddle.ParamAttr(initializer=_initializer)
self.query_token_Linear = paddle.nn.Linear(
in_features=params.decoder_state_size,
out_features=self.params.encoder_state_size,
weight_attr=query_token_weights,
bias_attr=False)
if self.params.use_copy_switch:
state2copyswitch_transform_weights = paddle.ParamAttr(
initializer=_initializer)
if self.params.use_query_attention:
self.state2copyswitch_transform_Linear = paddle.nn.Linear(
in_features=params.decoder_state_size +
utterance_attention_key_size + schema_attention_key_size +
params.encoder_state_size,
out_features=1,
weight_attr=state2copyswitch_transform_weights,
bias_attr=False)
else:
self.state2copyswitch_transform_Linear = paddle.nn.Linear(
in_features=params.decoder_state_size +
utterance_attention_key_size + schema_attention_key_size,
out_features=1,
weight_attr=state2copyswitch_transform_weights,
bias_attr=False)
state2copy_transform_weights = paddle.ParamAttr(
initializer=_initializer)
self.state2copy_transform_Linear = paddle.nn.Linear(
in_features=params.decoder_state_size,
out_features=3,
weight_attr=state2copy_transform_weights,
bias_attr=False)
def _get_schema_token_scorer(self, state):
scorer = paddle.t(self.schema_token_Linear(state))
return scorer
def _get_query_token_scorer(self, state):
scorer = paddle.t(self.query_token_Linear(state))
return scorer
def _get_copy_switch(self, state):
copy_switch = F.sigmoid(self.state2copyswitch_transform_Linear(state))
return copy_switch.squeeze()
def forward(self, prediction_input, dropout_amount=0.):
decoder_state = prediction_input.decoder_state
input_hidden_states = prediction_input.input_hidden_states
snippets = prediction_input.snippets
input_schema = prediction_input.input_schema
schema_states = prediction_input.schema_states
if self.params.use_schema_attention:
schema_attention_results = self.schema_attention_module(
decoder_state, schema_states)
utterance_attention_results = self.utterance_attention_module(
decoder_state, input_hidden_states)
else:
utterance_attention_results = self.attention_module(
decoder_state, input_hidden_states)
schema_attention_results = None
query_attention_results = None
if self.params.use_query_attention:
previous_query_states = prediction_input.previous_query_states
if len(previous_query_states) > 0:
query_attention_results = self.query_attention_module(
decoder_state, previous_query_states[-1])
else:
query_attention_results = self.start_query_attention_vector
query_attention_results = AttentionResult(
None, None, query_attention_results)
if self.params.use_schema_attention and self.params.use_query_attention:
state_and_attn = paddle.concat(
[
decoder_state, utterance_attention_results.vector,
schema_attention_results.vector,
query_attention_results.vector
],
axis=0)
elif self.params.use_schema_attention:
state_and_attn = paddle.concat(
[
decoder_state, utterance_attention_results.vector,
schema_attention_results.vector
],
axis=0)
else:
state_and_attn = paddle.concat(
[decoder_state, utterance_attention_results.vector], axis=0)
intermediate_state = self._get_intermediate_state(
state_and_attn, dropout_amount=dropout_amount)
copy_score = F.sigmoid(
self.state2copy_transform_Linear(intermediate_state).squeeze(0))
vocab_scores, vocab_tokens = self._score_vocabulary_tokens(
intermediate_state)
final_scores = vocab_scores
aligned_tokens = []
aligned_tokens.extend(vocab_tokens)
schema_states = paddle.stack(schema_states, axis=1)
schema_scores, schema_tokens = score_schema_tokens(
input_schema, schema_states,
self._get_schema_token_scorer(intermediate_state))
final_scores = paddle.concat(
[copy_score[0] * final_scores, copy_score[1] * schema_scores],
axis=0)
aligned_tokens.extend(schema_tokens)
# Previous Queries
previous_queries = prediction_input.previous_queries
previous_query_states = prediction_input.previous_query_states
copy_switch = None
query_scores = None
query_tokens = None
if self.params.use_previous_query and len(previous_queries) > 0:
if self.params.use_copy_switch:
copy_switch = self._get_copy_switch(state_and_attn)
for turn, (
previous_query, previous_query_state
) in enumerate(zip(previous_queries, previous_query_states)):
assert len(previous_query) == len(previous_query_state)
previous_query_state = paddle.stack(
previous_query_state, axis=1)
query_scores, query_tokens = score_query_tokens(
previous_query, previous_query_state,
self._get_query_token_scorer(intermediate_state))
query_scores = query_scores.squeeze()
if query_scores is not None:
final_scores = paddle.concat(
[final_scores, copy_score[2] * query_scores], axis=0)
aligned_tokens += query_tokens
return TokenPrediction(
final_scores, aligned_tokens, utterance_attention_results,
schema_attention_results, query_attention_results, copy_switch,
query_scores, query_tokens, decoder_state)
class AnonymizationTokenPredictor(TokenPredictor):
""" Token predictor that also predicts anonymization tokens.
Attributes:
anonymizer (`Anonymizer`): The anonymization object.
"""
def __init__(self, params, vocabulary, attention_key_size, anonymizer):
TokenPredictor.__init__(self, params, vocabulary, attention_key_size)
if not anonymizer:
raise ValueError("Expected an anonymizer, but was None")
self.anonymizer = anonymizer
def _score_anonymized_tokens(self, input_sequence, attention_scores):
scores = []
tokens = []
for i, token in enumerate(input_sequence):
if self.anonymizer.is_anon_tok(token):
scores.append(attention_scores[i])
tokens.append(token)
if len(scores) > 0:
if len(scores) != len(tokens):
raise ValueError("Got " + str(len(scores)) + " scores for " +
str(len(tokens)) + " anonymized tokens")
anonymized_scores = paddle.concat(scores, axis=0)
if anonymized_scores.dim() == 1:
anonymized_scores = anonymized_scores.unsqueeze(1)
return anonymized_scores, tokens
else:
return None, []
def forward(self, prediction_input, dropout_amount=0.):
decoder_state = prediction_input.decoder_state
input_hidden_states = prediction_input.input_hidden_states
input_sequence = prediction_input.input_sequence
assert input_sequence
attention_results = self.attention_module(decoder_state,
input_hidden_states)
state_and_attn = paddle.concat(
[decoder_state, attention_results.vector], axis=0)
intermediate_state = self._get_intermediate_state(
state_and_attn, dropout_amount=dropout_amount)
vocab_scores, vocab_tokens = self._score_vocabulary_tokens(
intermediate_state)
final_scores = vocab_scores
aligned_tokens = []
aligned_tokens.extend(vocab_tokens)
anonymized_scores, anonymized_tokens = self._score_anonymized_tokens(
input_sequence, attention_results.scores)
if anonymized_scores:
final_scores = paddle.concat(
[final_scores, anonymized_scores], axis=0)
aligned_tokens.extend(anonymized_tokens)
final_scores = final_scores.squeeze()
return TokenPrediction(final_scores, aligned_tokens, attention_results,
None, None, None, None, None, decoder_state)
def construct_token_predictor(params,
vocabulary,
utterance_attention_key_size,
schema_attention_key_size,
snippet_size,
anonymizer=None):
""" Constructs a token predictor given the parameters.
Args:
params (`dict`): Contains the command line parameters/hyperparameters.
vocabulary (`Vocabulary`): Vocabulary object for output generation.
attention_key_size (`int`): The size of the attention keys.
anonymizer (`Anonymizer`): An anonymization object.
"""
if not anonymizer and not params.previous_decoder_snippet_encoding:
print('using SchemaTokenPredictor')
return SchemaTokenPredictor(params, vocabulary,
utterance_attention_key_size,
schema_attention_key_size, snippet_size)
elif params.use_snippets and anonymizer and not params.previous_decoder_snippet_encoding:
print('using SnippetAnonymizationTokenPredictor')
return SnippetAnonymizationTokenPredictor(params, vocabulary,
utterance_attention_key_size,
snippet_size, anonymizer)
else:
print('Unknown token_predictor')
exit()
```
#### File: RAT-SQL/script/schema_linking.py
```python
import sys
import os
import traceback
import logging
import json
from collections import defaultdict
import re
from paddlenlp.transformers import BertTokenizer
from text2sql.dataproc.dusql_dataset_v2 import load_tables
logging.basicConfig(
level=logging.DEBUG,
format='%(levelname)s: %(asctime)s %(filename)s'
' [%(funcName)s:%(lineno)d][%(process)d] %(message)s',
datefmt='%m-%d %H:%M:%S',
filename=None,
filemode='a')
g_date_patt = re.compile(
r'(([0-9]{2})[0-9]{2}年)?[0-9]{1,2}月[0-9]{2}日|([0-9]{2})[0-9]{2}年[0-9]{1,2}月')
def get_char_list(sentence):
def is_ascii(s):
"""check if s is English album or number
Args:
s (str): NULL
Returns: bool
"""
return ord(s) < 128
if len(sentence) == 0:
return []
lst_result = [sentence[0]]
last_is_ascii = lst_result[-1].isalnum()
for char in sentence[1:]:
if char == ' ':
last_is_ascii = False
continue
elif char == '-':
last_is_ascii = False
lst_result.append(char)
continue
if is_ascii(char) and last_is_ascii:
lst_result[-1] += char
continue
if is_ascii(char):
last_is_ascii = True
else:
last_is_ascii = False
lst_result.append(char)
return tuple(lst_result)
def _format_date_cell(old_cell):
new_cell = old_cell.rstrip('月日')
new_cell = new_cell.replace('年', '-')
new_cell = new_cell.replace('月', '-')
return new_cell
def _build(cells):
dct_index = defaultdict(set)
for cell in set(cells):
if type(cell) is not str:
continue
cell = cell.strip()
if re.match(g_date_patt, cell):
cell = _format_date_cell(cell)
cell_chars = get_char_list(cell.lower())
dct_index[cell.lower()].add((cell, len(cell_chars)))
for pos in range(len(cell_chars) - 1):
bigram = cell_chars[pos:pos + 2]
####tri_gram = cell_chars[pos: pos + 3]
####four_gram = cell_chars[pos: pos + 4]
dct_index[bigram].add((cell, len(cell_chars) - 1))
####dct_index[tri_gram].add((cell, len(cell_chars) - 2))
####dct_index[four_gram].add(cell)
return dct_index
def build_cell_index(db_dict):
for db in db_dict.values():
column_cells = []
for column in db.columns:
cell_index = _build(column.cells)
column_cells.append(cell_index)
db.column_cells_index = column_cells
def extract_value_from_sql(sql_json, sql_format='dusql'):
dct_col_values = defaultdict(list)
if sql_format == 'nl2sql':
for col, _, val in item['sql']['conds']:
dct_col_values[col].append(val)
return dct_col_values
def _merge_dict(base_dict, extra_dict):
for k, v in extra_dict.items():
base_dict[k].extend(v)
def _extract_value_from_sql_cond(cond, dct_col_values):
if type(cond[3]) is dict:
new_col_values = extract_value_from_sql(cond[3])
_merge_dict(dct_col_values, new_col_values)
return
col_id = cond[2][1][1]
dct_col_values[col_id].append(cond[3])
if cond[4] is not None:
dct_col_values[col_id].append(cond[4])
for table_unit in sql_json['from']['table_units']:
if type(table_unit[1]) is dict:
new_col_values = extract_value_from_sql(table_unit[1])
_merge_dict(dct_col_values, new_col_values)
for cond in sql_json['where'][::2]:
_extract_value_from_sql_cond(cond, dct_col_values)
for cond in sql_json['having'][::2]:
_extract_value_from_sql_cond(cond, dct_col_values)
if sql_json['intersect'] is not None:
new_col_values = extract_value_from_sql(sql_json['intersect'])
_merge_dict(dct_col_values, new_col_values)
if sql_json['union'] is not None:
new_col_values = extract_value_from_sql(sql_json['union'])
_merge_dict(dct_col_values, new_col_values)
if sql_json['except'] is not None:
new_col_values = extract_value_from_sql(sql_json['except'])
_merge_dict(dct_col_values, new_col_values)
return dct_col_values
def search_values(query, db, extra_values):
lst_match_values = []
for column, cell_index in zip(db.columns, db.column_cells_index):
if column.id == 0:
lst_match_values.append([])
continue
col_id = column.id
candi_cnt = defaultdict(float)
query_chars = get_char_list(query.lower())
appear_set = set()
for pos in range(len(query_chars)):
unigram = query_chars[pos]
if len(
unigram
) > 2 and unigram not in appear_set and unigram in cell_index:
for cell, base in cell_index[unigram]:
candi_cnt[cell] += 1.0 / base
if pos == len(query_chars) - 1:
break
bigram = query_chars[pos:pos + 2]
if bigram not in cell_index:
continue
if bigram in appear_set:
continue
appear_set.add(bigram)
for cell, base in cell_index[bigram]:
candi_cnt[cell] += 1.0 / base
if extra_values is not None and column.id in extra_values:
gold_values = extra_values[column.id]
for gval in gold_values:
candi_cnt[str(gval)] += 2.0
lst_match_values.append(
list(sorted(
candi_cnt.items(), key=lambda x: x[1], reverse=True))[:10])
return lst_match_values
if __name__ == "__main__":
import argparse
try:
arg_parser = argparse.ArgumentParser(
description="linking candidate values for each column")
arg_parser.add_argument(
"input",
nargs="?",
type=argparse.FileType('r'),
default=sys.stdin,
help="input file path")
arg_parser.add_argument(
"-s", "--db-schema", required=True, help="file path")
arg_parser.add_argument(
"-c", "--db-content", required=True, help="file path")
arg_parser.add_argument(
"-o",
"--output",
type=argparse.FileType('w'),
default=sys.stdout,
help="output file path")
arg_parser.add_argument(
'-t', '--is-train', default=False, action="store_true")
arg_parser.add_argument(
'-f',
'--sql-format',
default='dusql',
choices=['dusql', 'nl2sql', 'cspider'])
args = arg_parser.parse_args()
sys.stderr.write('>>> loading databases...\n')
dct_db, _ = load_tables(args.db_schema, args.db_content)
build_cell_index(dct_db)
sys.stderr.write('>>> extracting values...\n')
lst_output = []
for idx, item in enumerate(json.load(args.input)):
question_id = item.get('question_id', f'qid{idx:06d}')
question = item['question']
db_id = item['db_id']
db = dct_db[db_id]
extra_values = None
if args.is_train:
extra_values = extract_value_from_sql(item['sql'],
args.sql_format)
match_values = search_values(question, db, extra_values)
lst_output.append({
"question_id": question_id,
"question": question,
"db_id": db_id,
"match_values": match_values
})
json.dump(lst_output, args.output, indent=2, ensure_ascii=False)
except Exception as e:
traceback.print_exc()
#logging.critical(traceback.format_exc())
exit(-1)
```
#### File: RAT-SQL/text2sql/optim.py
```python
import sys
import os
import traceback
import logging
import json
import re
import paddle
param_name_to_exclue_from_weight_decay = re.compile(
r'.*layer_norm_scale|.*layer_norm_bias|.*b_0')
def get_warmup_and_linear_decay(max_steps, warmup_steps):
"""ERNIE/demo/utils.py"""
return lambda step: min(step / warmup_steps, 1. - (step - warmup_steps) / (max_steps - warmup_steps))
def init_optimizer(model, config, train_steps, scale_params_lr=None):
if scale_params_lr is not None:
for model, lr_scale in scale_params_lr:
for param in model.parameters():
param.optimize_attr['learning_rate'] *= lr_scale
warmup_steps = int(config.warmup_proportion * train_steps)
lr_scheduler = paddle.optimizer.lr.LambdaDecay(
config.learning_rate,
get_warmup_and_linear_decay(train_steps, warmup_steps))
optimizer = paddle.optimizer.AdamW(
lr_scheduler,
parameters=model.parameters(),
weight_decay=config.weight_decay,
apply_decay_param_fun=lambda n: not param_name_to_exclue_from_weight_decay.match(n),
grad_clip=paddle.nn.ClipGradByGlobalNorm(config.grad_clip))
return optimizer
if __name__ == "__main__":
"""run some simple test cases"""
import types
model = paddle.vision.models.LeNet()
config = types.SimpleNamespace(
learning_rate=1e-3,
warmup_proportion=0.1,
weight_decay=0.2,
grad_clip=1.0)
optim = init_optimizer(model, config, train_steps=10000)
print(optim)
```
#### File: text2sql/utils/utils.py
```python
import sys
import os
import traceback
import logging
import json
import time
import re
class Timer(object):
"""Stat Cost Time"""
def __init__(self, msg=""):
super(Timer, self).__init__()
self._msg = msg
self._start = time.time()
self._last = self._start
def reset(self, only_last=False, msg=None):
"""reset all setting
"""
if msg is not None:
self._msg = msg
curr_time = time.time()
self._last = curr_time
if not only_last:
self._start = curr_time
def check(self):
"""check cost time from start
"""
end = time.time()
cost = end - self._start
return cost
def interval(self):
"""check cost time from lst
"""
end = time.time()
cost = end - self._last
self._last = end
return cost
def ending(self):
"""ending checking and log
"""
cost = '%.2f' % time.time() - self._start
if self._msg == "":
log_msg = "cost time: %s" % (cost)
elif '{}' in self._msg:
log_msg = self._msg.format(cost)
else:
log_msg = self._msg + cost
logging.info(log_msg)
def list_increment(lst: list, base: int):
"""increment each element in list
"""
for i in range(len(lst)):
lst[i] += base
return lst
def count_file_lines(filename):
cnt = 0
with open(filename) as ifs:
for _ in ifs:
cnt += 1
return cnt
def print_tensors(tag='*', **kwrags):
"""print tensors for debuging
"""
print(tag * 50)
for key, value in kwrags.items():
print(key, ':', value)
if __name__ == "__main__":
"""run some simple test cases"""
import json
from boomup import data_struct
question = '三峡碧江需要大于2的招聘数量'
table_json = {
"rows": [[
4.0, "污水运行工", "三峡碧江公司", "渝北", 2.0, "大专及以上", "给排水/环境工程/机电及相关专业",
"<EMAIL>"
], [
5.0, "污水运行工", "三峡垫江公司", "垫江", 1.0, "大专及以上", "给排水/环境工程/机电及相关专业",
"<EMAIL>"
]],
"name": "Table_a7b5108c3b0611e98ad7f40f24344a08",
"title": "",
"header":
["岗位序号", "招聘岗位", "用人单位", "工作地点", "招聘数量", "学历要求", "专业及资格要求", "简历投递邮箱"],
"common": "",
"id": "a7b510",
"types":
["real", "text", "text", "text", "real", "text", "text", "text"]
}
table_json['header'] = data_struct.Header(table_json['header'],
table_json['types'])
table = data_struct.Table(**table_json)
```
#### File: paddlenlp/datasets/bstc.py
```python
import os
import json
from paddle.utils.download import get_path_from_url
from paddlenlp.utils.env import DATA_HOME
from . import DatasetBuilder
class BSTC(DatasetBuilder):
"""
BSTC (Baidu Speech Translation Corpus), a large-scale Chinese-English
speech translation dataset. This dataset is constructed based on a
collection of licensed videos of talks or lectures, including about
68 hours of Mandarin data, their manual transcripts and translations
into English, as well as automated transcripts by an automatic speech
recognition (ASR) model.
Details: https://arxiv.org/pdf/2104.03575.pdf
"""
lazy = False
BUILDER_CONFIGS = {
'transcription_translation': {
'url':
"https://bj.bcebos.com/paddlenlp/datasets/bstc_transcription_translation.tar.gz",
'md5': '236800188e397c42a3251982aeee48ee',
'splits': {
'train':
[os.path.join('bstc_transcription_translation', 'train')],
'dev': [
os.path.join('bstc_transcription_translation', 'dev',
'streaming_transcription'),
os.path.join('bstc_transcription_translation', 'dev',
'ref_text')
]
}
},
'asr': {
'url': "https://bj.bcebos.com/paddlenlp/datasets/bstc_asr.tar.gz",
'md5': '3a0cc5039f45e62e29485e27d3a5f5a7',
'splits': {
'train': [os.path.join('bstc_asr', 'train', 'asr_sentences')],
'dev': [
os.path.join('bstc_asr', 'dev', 'streaming_asr'),
os.path.join('bstc_asr', 'dev', 'ref_text')
]
}
}
}
def _get_data(self, mode, **kwargs):
''' Check and download Dataset '''
builder_config = self.BUILDER_CONFIGS[self.name]
default_root = os.path.join(DATA_HOME, self.__class__.__name__)
source_file_dir = builder_config['splits'][mode][0]
source_full_dir = os.path.join(default_root, source_file_dir)
if not os.path.exists(source_full_dir):
get_path_from_url(builder_config['url'], default_root,
builder_config['md5'])
if mode == 'train':
return source_full_dir
elif mode == 'dev':
target_file_dir = builder_config['splits'][mode][1]
target_full_dir = os.path.join(default_root, target_file_dir)
if not os.path.exists(target_full_dir):
get_path_from_url(builder_config['url'], default_root,
builder_config['md5'])
return source_full_dir, target_full_dir
def _read(self, data_dir, split):
"""Reads data."""
if split == 'train':
if self.name == 'transcription_translation':
source_full_dir = data_dir
filenames = [
f for f in os.listdir(source_full_dir)
if not f.startswith('.')
]
filenames.sort(key=lambda x: int(x[:-5]))
for filename in filenames:
with open(
os.path.join(source_full_dir, filename),
'r',
encoding='utf-8') as f:
for line in f.readlines():
line = line.strip()
if not line:
continue
yield json.loads(line)
elif self.name == 'asr':
source_full_dir = data_dir
dir_list = [
f for f in os.listdir(source_full_dir)
if not f.startswith('.')
]
dir_list.sort(key=lambda x: int(x))
for dir_name in dir_list:
filenames = [
f
for f in os.listdir(
os.path.join(source_full_dir, dir_name))
if not f.startswith('.')
]
filenames.sort(key=lambda x: int(x[x.find('-') + 1:-5]))
for filename in filenames:
with open(
os.path.join(source_full_dir, dir_name,
filename),
'r',
encoding='utf-8') as f:
for line in f.readlines():
line = line.strip()
if not line:
continue
yield json.loads(line)
else:
raise ValueError(
'Argument name should be one of [transcription_translation, asr].'
)
elif split == 'dev':
source_full_dir, target_full_dir = data_dir
source_filenames = [
f for f in os.listdir(source_full_dir) if f.endswith('txt')
]
target_filenames = [
f for f in os.listdir(target_full_dir) if f.endswith('txt')
]
assert len(source_filenames) == len(target_filenames)
source_filenames.sort(
key=lambda x: int(x[:-4]) if self.name == 'transcription_translation' else int(x[:-8])
)
target_filenames.sort(key=lambda x: int(x[:-4]))
for src_file, tgt_file in zip(source_filenames, target_filenames):
if self.name == 'transcription_translation':
src_list = []
with open(
os.path.join(source_full_dir, src_file),
'r',
encoding='utf-8') as src_f:
src_part = []
for src_line in src_f.readlines():
src_line = src_line.strip()
if not src_line:
continue
if len(src_part) != 0 and not src_line.startswith(
src_part[-1]):
src_list.append(src_part)
src_part = [src_line]
else:
src_part.append(src_line)
if len(src_part) > 0:
src_list.append(src_part)
elif self.name == 'asr':
src_list = []
with open(
os.path.join(source_full_dir, src_file),
'r',
encoding='utf-8') as src_f:
src_part = []
for src_line in src_f.readlines():
src_line = src_line.strip()
if not src_line:
continue
line = src_line.split(', ')
final = line[2].split(': ')[1] == 'final'
src_part.append(src_line)
if final:
src_list.append(src_part)
src_part = []
else:
raise ValueError(
'Argument name should be one of [transcription_translation, asr].'
)
tgt_list = []
with open(
os.path.join(target_full_dir, tgt_file),
'r',
encoding='utf-8') as tgt_f:
lines = tgt_f.readlines()
for idx, tgt_line in enumerate(lines):
tgt_line = tgt_line.strip()
if not tgt_line:
continue
tgt_list.append(tgt_line)
yield {'src': src_list, 'tgt': tgt_list}
```
#### File: paddlenlp/metrics/distinct.py
```python
import numpy as np
import paddle
__all__ = ['Distinct']
class Distinct(paddle.metric.Metric):
"""
`Distinct` is an algorithm for evaluating the textual diversity of the
generated text by calculating the number of distinct n-grams. The larger
the number of distinct n-grams, the higher the diversity of the text. See
details at https://arxiv.org/abs/1510.03055.
:class:`Distinct` could be used as a :class:`paddle.metric.Metric` class,
or an ordinary class. When :class:`Distinct` is used as a
:class:`paddle.metric.Metric` class, a function is needed to transform
the network output to a string list.
Args:
n_size (int, optional):
Number of gram for :class:`Distinct` metric. Defaults to 2.
trans_func (callable, optional):
`trans_func` transforms the network output to a string list. Defaults to None.
.. note::
When :class:`Distinct` is used as a :class:`paddle.metric.Metric`
class, `trans_func` must be provided. Please note that the
input of `trans_func` is numpy array.
name (str, optional): Name of :class:`paddle.metric.Metric` instance.
Defaults to "distinct".
Examples:
1. Using as a general evaluation object.
.. code-block:: python
from paddlenlp.metrics import Distinct
distinct = Distinct()
cand = ["The","cat","The","cat","on","the","mat"]
#update the states
distinct.add_inst(cand)
print(distinct.score())
# 0.8333333333333334
2. Using as an instance of `paddle.metric.Metric`.
.. code-block:: python
import numpy as np
from functools import partial
import paddle
from paddlenlp.transformers import BertTokenizer
from paddlenlp.metrics import Distinct
def trans_func(logits, tokenizer):
'''Transform the network output `logits` to string list.'''
# [batch_size, seq_len]
token_ids = np.argmax(logits, axis=-1).tolist()
cand_list = []
for ids in token_ids:
tokens = tokenizer.convert_ids_to_tokens(ids)
strings = tokenizer.convert_tokens_to_string(tokens)
cand_list.append(strings.split())
return cand_list
paddle.seed(2021)
tokenizer = BertTokenizer.from_pretrained('bert-base-uncased')
distinct = Distinct(trans_func=partial(trans_func, tokenizer=tokenizer))
batch_size, seq_len, vocab_size = 4, 16, tokenizer.vocab_size
logits = paddle.rand([batch_size, seq_len, vocab_size])
distinct.update(logits.numpy())
print(distinct.accumulate()) # 1.0
"""
def __init__(self, n_size=2, trans_func=None, name="distinct"):
super(Distinct, self).__init__()
self._name = name
self.diff_ngram = set()
self.count = 0.0
self.n_size = n_size
self.trans_func = trans_func
def update(self, output, *args):
"""
Updates the metrics states. This method firstly will use
:meth:`trans_func` method to process the `output` to get the tokenized
candidate sentence list. Then call :meth:`add_inst` method to process
the candidate list one by one.
Args:
output (numpy.ndarray|Tensor):
The outputs of model.
args (tuple): The additional inputs.
"""
if isinstance(output, paddle.Tensor):
output = output.numpy()
assert self.trans_func is not None, "The `update` method requires user "\
"to provide `trans_func` when initializing `Distinct`."
cand_list = self.trans_func(output)
for cand in cand_list:
self.add_inst(cand)
def add_inst(self, cand):
"""
Updates the states based on the candidate.
Args:
cand (list): Tokenized candidate sentence generated by model.
"""
for i in range(0, len(cand) - self.n_size + 1):
ngram = ' '.join(cand[i:(i + self.n_size)])
self.count += 1
self.diff_ngram.add(ngram)
def reset(self):
"""Resets states and result."""
self.diff_ngram = set()
self.count = 0.0
def accumulate(self):
"""
Calculates the final distinct score.
Returns:
float: The final distinct score.
"""
distinct = len(self.diff_ngram) / self.count
return distinct
def score(self):
"""
The function is the same as :meth:`accumulate` method.
Returns:
float: The final distinct score.
"""
return self.accumulate()
def name(self):
"""
Returns the metric name.
Returns:
str: The metric name.
"""
return self._name
```
#### File: transformers/luke/modeling.py
```python
import paddle.nn as nn
import paddle
import paddle.nn.functional as F
from .. import PretrainedModel, register_base_model
from ...transformers.roberta.modeling import RobertaEmbeddings
import math
__all__ = [
'LukeModel', 'LukePretrainedModel', 'LukeForEntitySpanClassification',
'LukeForEntityPairClassification', 'LukeForEntityClassification',
'LukeForMaskedLM', 'LukeForQuestionAnswering'
]
def paddle_gather(x, dim, index):
index_shape = index.shape
index_flatten = index.flatten()
if dim < 0:
dim = len(x.shape) + dim
nd_index = []
for k in range(len(x.shape)):
if k == dim:
nd_index.append(index_flatten)
else:
reshape_shape = [1] * len(x.shape)
reshape_shape[k] = x.shape[k]
x_arange = paddle.arange(x.shape[k], dtype=index.dtype)
x_arange = x_arange.reshape(reshape_shape)
dim_index = paddle.expand(x_arange, index_shape).flatten()
nd_index.append(dim_index)
ind2 = paddle.transpose(paddle.stack(nd_index), [1, 0]).astype("int64")
paddle_out = paddle.gather_nd(x, ind2).reshape(index_shape)
return paddle_out
def get_activation(activation_string):
if activation_string in ACT2FN:
return ACT2FN[activation_string]
else:
raise KeyError("function {} not found in ACT2FN mapping {}".format(
activation_string, list(ACT2FN.keys())))
def mish(x):
return x * F.tanh(F.softplus(x))
def linear_act(x):
return x
def swish(x):
return x * F.sigmoid(x)
def gelu_new(x):
"""
Implementation of the GELU activation function currently in Google BERT repo (identical to OpenAI GPT). Also see
the Gaussian Error Linear Units paper: https://arxiv.org/abs/1606.08415
"""
return F.gelu(x, approximate=True)
ACT2FN = {
"relu": F.relu,
"gelu": F.gelu,
"gelu_new": gelu_new,
"tanh": F.tanh,
"sigmoid": F.sigmoid,
"mish": mish,
"linear": linear_act,
"swish": swish,
}
layer_norm_eps = 1e-6
class LukePretrainedModel(PretrainedModel):
r"""
An abstract class for pretrained Luke models. It provides Luke related
`model_config_file`, `pretrained_init_configuration`, `resource_files_names`,
`pretrained_resource_files_map`, `base_model_prefix` for downloading and
loading pretrained models.
See :class:`~paddlenlp.transformers.model_utils.PretrainedModel` for more details.
"""
model_config_file = "model_config.json"
pretrained_init_configuration = {
"luke-base": {
"attention_probs_dropout_prob": 0.1,
"hidden_act": "gelu",
"pad_token_id": 1,
"hidden_dropout_prob": 0.1,
"hidden_size": 768,
"initializer_range": 0.02,
"intermediate_size": 3072,
"max_position_embeddings": 514,
"num_attention_heads": 12,
"num_hidden_layers": 12,
"type_vocab_size": 1,
"vocab_size": 50267
},
"luke-large": {
"attention_probs_dropout_prob": 0.1,
"hidden_act": "gelu",
"pad_token_id": 1,
"hidden_dropout_prob": 0.1,
"hidden_size": 1024,
"initializer_range": 0.02,
"intermediate_size": 4096,
"max_position_embeddings": 514,
"num_attention_heads": 16,
"num_hidden_layers": 24,
"type_vocab_size": 1,
"vocab_size": 50267
}
}
resource_files_names = {"model_state": "model_state.pdparams"}
pretrained_resource_files_map = {
"model_state": {
"luke-base":
"https://bj.bcebos.com/paddlenlp/models/transformers/luke/luke-base/model_state.pdparams",
"luke-large":
"https://bj.bcebos.com/paddlenlp/models/transformers/luke/luke-large/model_state.pdparams",
}
}
base_model_prefix = "luke"
def init_weights(self, layer):
""" Initialization hook """
if isinstance(layer, (nn.Linear, nn.Embedding)):
# only support dygraph, use truncated_normal and make it inplace
# and configurable later
layer.weight.set_value(
paddle.tensor.normal(
mean=0.0,
std=self.initializer_range
if hasattr(self, "initializer_range") else self.luke.config[
"initializer_range"],
shape=layer.weight.shape))
elif isinstance(layer, nn.LayerNorm):
layer._epsilon = layer_norm_eps
class LukeSelfOutput(nn.Layer):
def __init__(self, hidden_size, hidden_dropout_prob):
super(LukeSelfOutput, self).__init__()
self.dense = nn.Linear(hidden_size, hidden_size)
self.layer_norm = nn.LayerNorm(hidden_size, epsilon=layer_norm_eps)
self.dropout = nn.Dropout(hidden_dropout_prob)
def forward(self, hidden_states, input_tensor):
hidden_states = self.dense(hidden_states)
hidden_states = self.dropout(hidden_states)
hidden_states = self.layer_norm(hidden_states + input_tensor)
return hidden_states
class LukeIntermediate(nn.Layer):
def __init__(self, hidden_size, hidden_act, intermediate_size):
super().__init__()
self.dense = nn.Linear(hidden_size, intermediate_size)
self.intermediate_act_fn = get_activation(hidden_act)
def forward(self, hidden_states):
hidden_states = self.dense(hidden_states)
hidden_states = self.intermediate_act_fn(hidden_states)
return hidden_states
class LukeOutput(nn.Layer):
def __init__(self, intermediate_size, hidden_size, hidden_dropout_prob):
super(LukeOutput, self).__init__()
self.dense = nn.Linear(intermediate_size, hidden_size)
self.layer_norm = nn.LayerNorm(hidden_size, epsilon=layer_norm_eps)
self.dropout = nn.Dropout(hidden_dropout_prob)
def forward(self, hidden_states, input_tensor):
hidden_states = self.dense(hidden_states)
hidden_states = self.dropout(hidden_states)
hidden_states = self.layer_norm(hidden_states + input_tensor)
return hidden_states
class LukeEmbeddings(RobertaEmbeddings):
"""
Same as BertEmbeddings with a tiny tweak for positional embeddings indexing.
"""
def __init__(self,
vocab_size=50267,
hidden_size=768,
max_position_embeddings=514,
type_vocab_size=1,
pad_token_id=0,
hidden_dropout_prob=0.1):
super(LukeEmbeddings, self).__init__(
vocab_size=vocab_size,
hidden_size=hidden_size,
hidden_dropout_prob=hidden_dropout_prob,
max_position_embeddings=max_position_embeddings,
type_vocab_size=type_vocab_size,
pad_token_id=pad_token_id)
def forward(
self,
input_ids=None,
token_type_ids=None,
position_ids=None, ):
return super(LukeEmbeddings, self).forward(
input_ids=input_ids,
token_type_ids=token_type_ids,
position_ids=position_ids)
class LukePooler(nn.Layer):
def __init__(self, hidden_size):
super(LukePooler, self).__init__()
self.dense = nn.Linear(hidden_size, hidden_size)
self.activation = nn.Tanh()
def forward(self, hidden_states):
# We "pool" the model by simply taking the hidden state corresponding
# to the first token.
first_token_tensor = hidden_states[:, 0]
pooled_output = self.dense(first_token_tensor)
pooled_output = self.activation(pooled_output)
return pooled_output
class EntityEmbeddings(nn.Layer):
def __init__(self,
entity_vocab_size=500000,
entity_emb_size=256,
hidden_size=768,
max_position_embeddings=514,
type_vocab_size=1,
hidden_dropout_prob=0.1):
super(EntityEmbeddings, self).__init__()
self.entity_emb_size = entity_emb_size
self.hidden_size = hidden_size
self.entity_embeddings = nn.Embedding(
entity_vocab_size, entity_emb_size, padding_idx=0)
if entity_emb_size != hidden_size:
self.entity_embedding_dense = nn.Linear(
entity_emb_size, hidden_size, bias_attr=False)
self.position_embeddings = nn.Embedding(max_position_embeddings,
hidden_size)
self.token_type_embeddings = nn.Embedding(type_vocab_size, hidden_size)
self.layer_norm = nn.LayerNorm(hidden_size, epsilon=layer_norm_eps)
self.dropout = nn.Dropout(hidden_dropout_prob)
def forward(self, entity_ids, position_ids, token_type_ids=None):
if token_type_ids is None:
token_type_ids = paddle.zeros_like(entity_ids)
entity_embeddings = self.entity_embeddings(entity_ids)
if self.entity_emb_size != self.hidden_size:
entity_embeddings = self.entity_embedding_dense(entity_embeddings)
position_embeddings = self.position_embeddings(position_ids.clip(min=0))
position_embedding_mask = (
position_ids != -1).astype(position_embeddings.dtype).unsqueeze(-1)
position_embeddings = position_embeddings * position_embedding_mask
position_embeddings = paddle.sum(position_embeddings, axis=-2)
position_embeddings = position_embeddings / position_embedding_mask.sum(
axis=-2).clip(min=1e-7)
token_type_embeddings = self.token_type_embeddings(token_type_ids)
embeddings = entity_embeddings + position_embeddings + token_type_embeddings
embeddings = self.layer_norm(embeddings)
embeddings = self.dropout(embeddings)
return embeddings
class LukeSelfAttention(nn.Layer):
def __init__(self, num_attention_heads, hidden_size,
attention_probs_dropout_prob):
super(LukeSelfAttention, self).__init__()
self.num_attention_heads = num_attention_heads
self.attention_head_size = int(hidden_size / num_attention_heads)
self.all_head_size = self.num_attention_heads * self.attention_head_size
self.query = nn.Linear(hidden_size, self.all_head_size)
self.key = nn.Linear(hidden_size, self.all_head_size)
self.value = nn.Linear(hidden_size, self.all_head_size)
self.w2e_query = nn.Linear(hidden_size, self.all_head_size)
self.e2w_query = nn.Linear(hidden_size, self.all_head_size)
self.e2e_query = nn.Linear(hidden_size, self.all_head_size)
self.dropout = nn.Dropout(attention_probs_dropout_prob)
def transpose_for_scores(self, x):
new_x_shape = x.shape[:-1] + [
self.num_attention_heads, self.attention_head_size
]
x = x.reshape(new_x_shape)
return x.transpose((0, 2, 1, 3))
def forward(
self,
word_hidden_states,
entity_hidden_states,
attention_mask=None, ):
word_size = word_hidden_states.shape[1]
if entity_hidden_states is None:
concat_hidden_states = word_hidden_states
else:
concat_hidden_states = paddle.concat(
[word_hidden_states, entity_hidden_states], axis=1)
key_layer = self.transpose_for_scores(self.key(concat_hidden_states))
value_layer = self.transpose_for_scores(
self.value(concat_hidden_states))
if entity_hidden_states is not None:
# compute query vectors using word-word (w2w), word-entity (w2e), entity-word (e2w), entity-entity (e2e)
# query layers
w2w_query_layer = self.transpose_for_scores(
self.query(word_hidden_states))
w2e_query_layer = self.transpose_for_scores(
self.w2e_query(word_hidden_states))
e2w_query_layer = self.transpose_for_scores(
self.e2w_query(entity_hidden_states))
e2e_query_layer = self.transpose_for_scores(
self.e2e_query(entity_hidden_states))
# compute w2w, w2e, e2w, and e2e key vectors used with the query vectors computed above
w2w_key_layer = key_layer[:, :, :word_size, :]
e2w_key_layer = key_layer[:, :, :word_size, :]
w2e_key_layer = key_layer[:, :, word_size:, :]
e2e_key_layer = key_layer[:, :, word_size:, :]
# compute attention scores based on the dot product between the query and key vectors
w2w_attention_scores = paddle.matmul(w2w_query_layer,
w2w_key_layer.transpose(
(0, 1, 3, 2)))
w2e_attention_scores = paddle.matmul(w2e_query_layer,
w2e_key_layer.transpose(
(0, 1, 3, 2)))
e2w_attention_scores = paddle.matmul(e2w_query_layer,
e2w_key_layer.transpose(
(0, 1, 3, 2)))
e2e_attention_scores = paddle.matmul(e2e_query_layer,
e2e_key_layer.transpose(
(0, 1, 3, 2)))
# combine attention scores to create the final attention score matrix
word_attention_scores = paddle.concat(
[w2w_attention_scores, w2e_attention_scores], axis=3)
entity_attention_scores = paddle.concat(
[e2w_attention_scores, e2e_attention_scores], axis=3)
attention_scores = paddle.concat(
[word_attention_scores, entity_attention_scores], axis=2)
else:
query_layer = self.transpose_for_scores(
self.query(concat_hidden_states))
attention_scores = paddle.matmul(query_layer,
key_layer.transpose((0, 1, 3, 2)))
attention_scores = attention_scores / math.sqrt(
self.attention_head_size)
if attention_mask is not None:
# Apply the attention mask is (precomputed for all layers in LukeModel forward() function)
attention_scores = attention_scores + attention_mask
# Normalize the attention scores to probabilities.
attention_probs = nn.functional.softmax(attention_scores, axis=-1)
# This is actually dropping out entire tokens to attend to, which might
# seem a bit unusual, but is taken from the original Transformer paper.
attention_probs = self.dropout(attention_probs)
context_layer = paddle.matmul(attention_probs, value_layer)
context_layer = context_layer.transpose((0, 2, 1, 3))
new_context_layer_shape = context_layer.shape[:-2] + [
self.all_head_size,
]
context_layer = context_layer.reshape(new_context_layer_shape)
output_word_hidden_states = context_layer[:, :word_size, :]
if entity_hidden_states is None:
output_entity_hidden_states = None
else:
output_entity_hidden_states = context_layer[:, word_size:, :]
outputs = (output_word_hidden_states, output_entity_hidden_states)
return outputs
class LukeAttention(nn.Layer):
def __init__(
self,
num_attention_heads,
hidden_size,
attention_probs_dropout_prob,
hidden_dropout_prob, ):
super().__init__()
self.self = LukeSelfAttention(
num_attention_heads=num_attention_heads,
hidden_size=hidden_size,
attention_probs_dropout_prob=attention_probs_dropout_prob, )
self.output = LukeSelfOutput(
hidden_size=hidden_size, hidden_dropout_prob=hidden_dropout_prob)
def forward(
self,
word_hidden_states,
entity_hidden_states,
attention_mask=None, ):
word_size = word_hidden_states.shape[1]
self_outputs = self.self(word_hidden_states, entity_hidden_states,
attention_mask)
if entity_hidden_states is None:
concat_self_outputs = self_outputs[0]
concat_hidden_states = word_hidden_states
else:
concat_self_outputs = paddle.concat(self_outputs[:2], axis=1)
concat_hidden_states = paddle.concat(
[word_hidden_states, entity_hidden_states], axis=1)
attention_output = self.output(concat_self_outputs,
concat_hidden_states)
word_attention_output = attention_output[:, :word_size, :]
if entity_hidden_states is None:
entity_attention_output = None
else:
entity_attention_output = attention_output[:, word_size:, :]
# add attentions if we output them
outputs = (word_attention_output, entity_attention_output
) + self_outputs[2:]
return outputs
class LukeLayer(nn.Layer):
def __init__(self, num_attention_heads, hidden_size, hidden_act,
intermediate_size, attention_probs_dropout_prob,
hidden_dropout_prob):
super(LukeLayer, self).__init__()
self.seq_len_dim = 1
self.attention = LukeAttention(
num_attention_heads=num_attention_heads,
hidden_size=hidden_size,
attention_probs_dropout_prob=attention_probs_dropout_prob,
hidden_dropout_prob=hidden_dropout_prob)
self.intermediate = LukeIntermediate(
intermediate_size=intermediate_size,
hidden_act=hidden_act,
hidden_size=hidden_size)
self.output = LukeOutput(
intermediate_size=intermediate_size,
hidden_size=hidden_size,
hidden_dropout_prob=hidden_dropout_prob)
def forward(
self,
word_hidden_states,
entity_hidden_states,
attention_mask=None, ):
word_size = word_hidden_states.shape[1]
self_attention_outputs = self.attention(
word_hidden_states,
entity_hidden_states,
attention_mask, )
if entity_hidden_states is None:
concat_attention_output = self_attention_outputs[0]
else:
concat_attention_output = paddle.concat(
self_attention_outputs[:2], axis=1)
outputs = self_attention_outputs[
2:] # add self attentions if we output attention weights
layer_output = self.feed_forward_chunk(concat_attention_output)
word_layer_output = layer_output[:, :word_size, :]
if entity_hidden_states is None:
entity_layer_output = None
else:
entity_layer_output = layer_output[:, word_size:, :]
outputs = (word_layer_output, entity_layer_output) + outputs
return outputs
def feed_forward_chunk(self, attention_output):
intermediate_output = self.intermediate(attention_output)
layer_output = self.output(intermediate_output, attention_output)
return layer_output
class LukeEncoder(nn.Layer):
def __init__(self, num_attention_heads, hidden_size, hidden_act,
intermediate_size, num_hidden_layers,
attention_probs_dropout_prob, hidden_dropout_prob):
super(LukeEncoder, self).__init__()
self.layer = nn.LayerList([
LukeLayer(
num_attention_heads=num_attention_heads,
hidden_size=hidden_size,
hidden_act=hidden_act,
intermediate_size=intermediate_size,
attention_probs_dropout_prob=attention_probs_dropout_prob,
hidden_dropout_prob=hidden_dropout_prob)
for _ in range(num_hidden_layers)
])
def forward(
self,
word_hidden_states,
entity_hidden_states,
attention_mask=None, ):
for i, layer_module in enumerate(self.layer):
layer_outputs = layer_module(
word_hidden_states,
entity_hidden_states,
attention_mask, )
word_hidden_states = layer_outputs[0]
if entity_hidden_states is not None:
entity_hidden_states = layer_outputs[1]
return word_hidden_states, entity_hidden_states
@register_base_model
class LukeModel(LukePretrainedModel):
"""
The bare Luke Model transformer outputting raw hidden-states.
This model inherits from :class:`~paddlenlp.transformers.model_utils.PretrainedModel`.
Refer to the superclass documentation for the generic methods.
This model is also a Paddle `paddle.nn.Layer <https://www.paddlepaddle.org.cn/documentation
/docs/en/api/paddle/fluid/dygraph/layers/Layer_en.html>`__ subclass. Use it as a regular Paddle Layer
and refer to the Paddle documentation for all matter related to general usage and behavior.
Args:
vocab_size (int, optional):
Vocabulary size of `inputs_ids` in `LukeModel`. Also is the vocab size of token embedding matrix.
Defines the number of different tokens that can be represented by the `inputs_ids` passed when
calling `LukeModel`. Defaults to 50267.
hidden_size (int, optional):
Dimensionality of the embedding layer, encoder layer and pooler layer. Defaults to `768`.
num_hidden_layers (int, optional):
Number of hidden layers in the Transformer encoder. Defaults to `12`.
num_attention_heads (int, optional):
Number of attention heads for each attention layer in the Transformer encoder.
Defaults to `12`.
intermediate_size (int, optional):
Dimensionality of the feed-forward (ff) layer in the encoder. Input tensors
to ff layers are firstly projected from `hidden_size` to `intermediate_size`,
and then projected back to `hidden_size`. Typically `intermediate_size` is larger than `hidden_size`.
Defaults to `3072`.
hidden_act (str, optional):
The non-linear activation function in the feed-forward layer.
``"gelu"``, ``"relu"`` and any other paddle supported activation functions
are supported. Defaults to `"gelu"`.
hidden_dropout_prob (float, optional):
The dropout probability for all fully connected layers in the embeddings and encoder.
Defaults to `0.1`.
attention_probs_dropout_prob (float, optional):
The dropout probability used in MultiHeadAttention in all encoder layers to drop some attention target.
Defaults to `0.1`.
max_position_embeddings (int, optional):
The maximum value of the dimensionality of position encoding, which dictates the maximum supported length of an input
sequence. Defaults to `514`.
type_vocab_size (int, optional):
The vocabulary size of `token_type_ids`.
Defaults to `1`.
entity_vocab_size (int, optional):
Vocabulary size of `entity_ids` in `LukeModel`. Also is the vocab size of token entity embedding matrix.
Defines the number of different entity that can be represented by the `entity_ids` passed when
calling `LukeModel`. Defaults to 500000.
entity_emb_size (int, optional):
Dimensionality of the entity embedding layer Defaults to `256`.
initializer_range (float, optional):
The standard deviation of the normal initializer.
Defaults to 0.02.
.. note::
A normal_initializer initializes weight matrices as normal distributions.
See :meth:`BertPretrainedModel.init_weights()` for how weights are initialized in `BertModel`.
pad_token_id (int, optional):
The index of padding token in the token vocabulary.
Defaults to `1`.
entity_pad_token_id (int, optional):
The index of padding token in the token vocabulary.
Defaults to `0`.
"""
def __init__(
self,
vocab_size=50267,
hidden_size=768,
num_hidden_layers=12,
num_attention_heads=12,
intermediate_size=3072,
hidden_act="gelu",
hidden_dropout_prob=0.1,
attention_probs_dropout_prob=0.1,
max_position_embeddings=514,
type_vocab_size=1,
entity_vocab_size=500000,
entity_emb_size=256,
initializer_range=0.02,
pad_token_id=1,
entity_pad_token_id=0, ):
super(LukeModel, self).__init__()
self.initializer_range = initializer_range
self.pad_token_id = pad_token_id
self.entity_pad_token_id = entity_pad_token_id
self.encoder = LukeEncoder(
hidden_act=hidden_act,
num_hidden_layers=num_hidden_layers,
hidden_size=hidden_size,
intermediate_size=intermediate_size,
hidden_dropout_prob=hidden_dropout_prob,
num_attention_heads=num_attention_heads,
attention_probs_dropout_prob=attention_probs_dropout_prob)
self.embeddings = LukeEmbeddings(
pad_token_id=pad_token_id,
vocab_size=vocab_size,
hidden_size=hidden_size,
max_position_embeddings=max_position_embeddings,
type_vocab_size=type_vocab_size,
hidden_dropout_prob=hidden_dropout_prob)
self.entity_embeddings = EntityEmbeddings(
entity_vocab_size=entity_vocab_size,
entity_emb_size=entity_emb_size,
hidden_size=hidden_size,
max_position_embeddings=max_position_embeddings,
type_vocab_size=type_vocab_size,
hidden_dropout_prob=hidden_dropout_prob)
self.pooler = LukePooler(hidden_size=hidden_size)
self.apply(self.init_weights)
def forward(
self,
input_ids,
token_type_ids=None,
position_ids=None,
attention_mask=None,
entity_ids=None,
entity_position_ids=None,
entity_token_type_ids=None,
entity_attention_mask=None, ):
r"""
The LukeModel forward method, overrides the `__call__()` special method.
Args:
input_ids (Tensor):
Indices of input sequence tokens in the vocabulary. They are
numerical representations of tokens that build the input sequence.
Its data type should be `int64` and it has a shape of [batch_size, sequence_length].
token_type_ids (Tensor, optional):
Segment token indices to indicate different portions of the inputs.
Selected in the range ``[0, type_vocab_size - 1]``.
If `type_vocab_size` is 2, which means the inputs have two portions.
Indices can either be 0 or 1:
- 0 corresponds to a *sentence A* token,
- 1 corresponds to a *sentence B* token.
Its data type should be `int64` and it has a shape of [batch_size, sequence_length].
Defaults to `None`, which means we don't add segment embeddings.
position_ids(Tensor, optional):
Indices of positions of each input sequence tokens in the position embeddings. Selected in the range ``[0,
max_position_embeddings - 1]``.
Shape as `(batch_size, num_tokens)` and dtype as int64. Defaults to `None`.
attention_mask (Tensor, optional):
Mask used in multi-head attention to avoid performing attention on to some unwanted positions,
usually the paddings or the subsequent positions.
Its data type can be int, float and bool.
When the data type is bool, the `masked` tokens have `False` values and the others have `True` values.
When the data type is int, the `masked` tokens have `0` values and the others have `1` values.
When the data type is float, the `masked` tokens have `-INF` values and the others have `0` values.
It is a tensor with shape broadcasted to `[batch_size, num_attention_heads, sequence_length, sequence_length]`.
Defaults to `None`, which means nothing needed to be prevented attention to.
entity_ids (Tensor, optional):
Indices of entity sequence tokens in the entity vocabulary. They are numerical
representations of entities that build the entity input sequence.
Its data type should be `int64` and it has a shape of [batch_size, entity_sequence_length].
entity_position_ids (Tensor, optional):
Indices of positions of each entity sequence tokens in the position embeddings. Selected in the range ``[0,
max_position_embeddings - 1]``.
Shape as `(batch_size, num_entity_tokens)` and dtype as int64. Defaults to `None`.
entity_token_type_ids (Tensor, optional):
Segment entity token indices to indicate different portions of the entity inputs.
Selected in the range ``[0, type_vocab_size - 1]``.
If `type_vocab_size` is 2, which means the inputs have two portions.
Indices can either be 0 or 1:
entity_attention_mask (Tensor, optional):
Mask used in multi-head attention to avoid performing attention on to some unwanted positions,
usually the paddings or the subsequent positions.
Its data type can be int, float and bool.
When the data type is bool, the `masked` tokens have `False` values and the others have `True` values.
When the data type is int, the `masked` tokens have `0` values and the others have `1` values.
When the data type is float, the `masked` tokens have `-INF` values and the others have `0` values.
It is a tensor will be concat with `attention_mask`.
Returns:
tuple: Returns tuple (`word_hidden_state, entity_hidden_state, pool_output`).
With the fields:
- `word_hidden_state` (Tensor):
Sequence of hidden-states at the last layer of the model.
It's data type should be float32 and its shape is [batch_size, sequence_length, hidden_size].
- `entity_hidden_state` (Tensor):
Sequence of entity hidden-states at the last layer of the model.
It's data type should be float32 and its shape is [batch_size, sequence_length, hidden_size].
- `pooled_output` (Tensor):
The output of first token (`<s>`) in sequence.
We "pool" the model by simply taking the hidden state corresponding to the first token.
Its data type should be float32 and its shape is [batch_size, hidden_size].
Example:
.. code-block::
import paddle
from paddlenlp.transformers import LukeModel, LukeTokenizer
tokenizer = LukeTokenizer.from_pretrained('luke-base')
model = LukeModel.from_pretrained('luke-base')
text = "Beyoncé lives in Los Angeles."
entity_spans = [(0, 7)]
inputs = tokenizer(text, entity_spans=entity_spans, add_prefix_space=True)
inputs = {k:paddle.to_tensor([v]) for (k, v) in inputs.items()}
output = model(**inputs)
"""
input_shape = input_ids.shape
batch_size, seq_length = input_shape
if attention_mask is None:
attention_mask = paddle.unsqueeze(
(input_ids == self.pad_token_id
).astype(self.pooler.dense.weight.dtype) * -1e4,
axis=[1, 2])
else:
if attention_mask.ndim == 2:
# attention_mask [batch_size, sequence_length] -> [batch_size, 1, 1, sequence_length]
attention_mask = attention_mask.unsqueeze(axis=[1, 2])
attention_mask = (1.0 - attention_mask) * -1e4
if entity_ids is not None:
entity_seq_length = entity_ids.shape[1]
if entity_attention_mask is None:
entity_attention_mask = paddle.unsqueeze(
(entity_ids == self.entity_pad_token_id
).astype(self.pooler.dense.weight.dtype) * -1e4,
axis=[1, 2])
else:
if entity_attention_mask.ndim == 2:
# attention_mask [batch_size, sequence_length] -> [batch_size, 1, 1, sequence_length]
entity_attention_mask = entity_attention_mask.unsqueeze(
axis=[1, 2])
entity_attention_mask = (1.0 - entity_attention_mask) * -1e4
if entity_token_type_ids is None:
entity_token_type_ids = paddle.zeros(
(batch_size, entity_seq_length), dtype='int64')
attention_mask = paddle.concat(
[attention_mask, entity_attention_mask], axis=-1)
word_embedding_output = self.embeddings(
input_ids=input_ids,
position_ids=position_ids,
token_type_ids=token_type_ids, )
if entity_ids is None:
entity_embedding_output = None
else:
entity_embedding_output = self.entity_embeddings(
entity_ids, entity_position_ids, entity_token_type_ids)
# Fourth, send embeddings through the model
encoder_outputs = self.encoder(
word_embedding_output,
entity_embedding_output,
attention_mask=attention_mask, )
sequence_output = encoder_outputs[0]
pooled_output = self.pooler(sequence_output)
return sequence_output, encoder_outputs[1], pooled_output
class LukeLMHead(nn.Layer):
"""Luke Head for masked language modeling."""
def __init__(self,
vocab_size,
hidden_size,
hidden_act,
embedding_weights=None):
super(LukeLMHead, self).__init__()
self.dense = nn.Linear(hidden_size, hidden_size)
self.layer_norm = nn.LayerNorm(hidden_size, epsilon=layer_norm_eps)
self.activation = get_activation(hidden_act)
self.decoder_weight = self.create_parameter(
shape=[vocab_size, hidden_size],
dtype=self.transform.weight.dtype,
is_bias=False) if embedding_weights is None else embedding_weights
self.decoder_bias = self.create_parameter(
shape=[vocab_size], dtype=self.decoder_weight.dtype, is_bias=True)
def forward(self, features, **kwargs):
hidden_state = self.dense(features)
hidden_state = self.activation(hidden_state)
hidden_state = self.layer_norm(hidden_state)
hidden_state = paddle.tensor.matmul(
hidden_state, self.decoder_weight,
transpose_y=True) + self.decoder_bias
return hidden_state
class EntityPredictionHeadTransform(nn.Layer):
def __init__(self, hidden_act, hidden_size, entity_emb_size):
super(EntityPredictionHeadTransform, self).__init__()
self.dense = nn.Linear(hidden_size, entity_emb_size)
self.transform_act_fn = get_activation(hidden_act)
self.layer_norm = nn.LayerNorm(entity_emb_size, epsilon=layer_norm_eps)
def forward(self, hidden_states):
hidden_states = self.dense(hidden_states)
hidden_states = self.transform_act_fn(hidden_states)
hidden_states = self.layer_norm(hidden_states)
return hidden_states
class EntityPredictionHead(nn.Layer):
def __init__(self, hidden_size, entity_vocab_size, entity_emb_size,
hidden_act):
super(EntityPredictionHead, self).__init__()
self.transform = EntityPredictionHeadTransform(
hidden_size=hidden_size,
hidden_act=hidden_act,
entity_emb_size=entity_emb_size)
self.decoder = nn.Linear(entity_emb_size, entity_vocab_size)
def forward(self, hidden_states):
hidden_states = self.transform(hidden_states)
hidden_states = self.decoder(hidden_states)
return hidden_states
class LukeForMaskedLM(LukePretrainedModel):
"""
Luke Model with a `masked language modeling` head on top.
Args:
luke (:class:`LukeModel`):
An instance of :class:`LukeModel`.
"""
def __init__(self, luke):
super(LukeForMaskedLM, self).__init__()
self.luke = luke
self.vocab_size = self.luke.config['vocab_size']
self.entity_vocab_size = self.luke.config['entity_vocab_size']
self.lm_head = LukeLMHead(
vocab_size=self.luke.config['vocab_size'],
hidden_size=self.luke.config['hidden_size'],
hidden_act=self.luke.config['hidden_act'],
embedding_weights=self.luke.embeddings.word_embeddings.weight)
self.entity_predictions = EntityPredictionHead(
hidden_size=self.luke.config['hidden_size'],
hidden_act=self.luke.config['hidden_act'],
entity_vocab_size=self.luke.config['entity_vocab_size'],
entity_emb_size=self.luke.config['entity_emb_size'])
self.apply(self.init_weights)
def forward(self,
input_ids,
token_type_ids=None,
position_ids=None,
attention_mask=None,
entity_ids=None,
entity_position_ids=None,
entity_token_type_ids=None,
entity_attention_mask=None):
r"""
The LukeForMaskedLM forward method, overrides the __call__() special method.
Args:
input_ids (Tensor):
See :class:`LukeModel`.
token_type_ids (Tensor, optional):
See :class:`LukeModel`.
position_ids (Tensor, optional):
See :class: `LukeModel`
attention_mask (list, optional):
See :class:`LukeModel`.
entity_ids (Tensor, optional):
See :class:`LukeModel`.
entity_position_ids (Tensor, optional):
See :class:`LukeModel`.
entity_token_type_ids (Tensor, optional):
See :class:`LukeModel`.
entity_attention_mask (list, optional):
See :class:`LukeModel`.
Returns:
tuple: Returns tuple (``logits``, ``entity_logits``).
With the fields:
- `logits` (Tensor):
The scores of masked token prediction.
Its data type should be float32 and shape is [batch_size, sequence_length, vocab_size].
- `entity_logits` (Tensor):
The scores of masked entity prediction.
Its data type should be float32 and its shape is [batch_size, entity_length, entity_vocab_size].
Example:
.. code-block::
import paddle
from paddlenlp.transformers import LukeForMaskedLM, LukeTokenizer
tokenizer = LukeTokenizer.from_pretrained('luke-base')
model = LukeForMaskedLM.from_pretrained('luke-base')
text = "Beyoncé lives in Los Angeles."
entity_spans = [(0, 7)]
inputs = tokenizer(text, entity_spans=entity_spans, add_prefix_space=True)
inputs = {k:paddle.to_tensor([v]) for (k, v) in inputs.items()}
logits, entity_logits = model(**inputs)
"""
outputs = self.luke(
input_ids=input_ids,
token_type_ids=token_type_ids,
position_ids=position_ids,
attention_mask=attention_mask,
entity_ids=entity_ids,
entity_position_ids=entity_position_ids,
entity_token_type_ids=entity_token_type_ids,
entity_attention_mask=entity_attention_mask)
logits = self.lm_head(outputs[0])
entity_logits = self.entity_predictions(outputs[1])
return logits, entity_logits
class LukeForEntityClassification(LukePretrainedModel):
"""
The LUKE model with a classification head on top (a linear layer on top of the hidden state of the first entity
token) for entity classification tasks, such as Open Entity.
Args:
luke (:class:`LukeModel`):
An instance of LukeModel.
num_classes (int):
The number of classes.
"""
def __init__(self, luke, num_classes):
super(LukeForEntityClassification, self).__init__()
self.luke = luke
self.num_classes = num_classes
self.dropout = nn.Dropout(self.luke.config['hidden_dropout_prob'])
self.classifier = nn.Linear(self.luke.config['hidden_size'],
num_classes)
self.apply(self.init_weights)
def forward(self,
input_ids,
token_type_ids=None,
position_ids=None,
attention_mask=None,
entity_ids=None,
entity_position_ids=None,
entity_token_type_ids=None,
entity_attention_mask=None):
r"""
The LukeForEntityClassification forward method, overrides the __call__() special method.
Args:
input_ids (Tensor):
See :class:`LukeModel`.
token_type_ids (Tensor, optional):
See :class:`LukeModel`.
position_ids (Tensor, optional):
See :class: `LukeModel`
attention_mask (list, optional):
See :class:`LukeModel`.
entity_ids (Tensor, optional):
See :class:`LukeModel`.
entity_position_ids (Tensor, optional):
See :class:`LukeModel`.
entity_token_type_ids (Tensor, optional):
See :class:`LukeModel`.
entity_attention_mask (list, optional):
See :class:`LukeModel`.
Returns:
Tensor: Returns tensor `logits`, a tensor of the entity classification logits.
Shape as `[batch_size, num_classes]` and dtype as float32.
Example:
.. code-block::
import paddle
from paddlenlp.transformers import LukeForEntityClassification, LukeTokenizer
tokenizer = LukeTokenizer.from_pretrained('luke-base')
model = LukeForEntityClassification.from_pretrained('luke-base', num_classes=2)
text = "Beyoncé lives in Los Angeles."
entity_spans = [(0, 7)]
inputs = tokenizer(text, entity_spans=entity_spans, add_prefix_space=True)
inputs = {k:paddle.to_tensor([v]) for (k, v) in inputs.items()}
logits = model(**inputs)
"""
outputs = self.luke(
input_ids=input_ids,
token_type_ids=token_type_ids,
position_ids=position_ids,
attention_mask=attention_mask,
entity_ids=entity_ids,
entity_position_ids=entity_position_ids,
entity_token_type_ids=entity_token_type_ids,
entity_attention_mask=entity_attention_mask)
feature_vector = outputs[1][:, 0, :]
feature_vector = self.dropout(feature_vector)
logits = self.classifier(feature_vector)
return logits
class LukeForEntityPairClassification(LukePretrainedModel):
"""
The LUKE model with a classification head on top (a linear layer on top of the hidden states of the two entity
tokens) for entity pair classification tasks, such as TACRED.
Args:
luke (:class:`LukeModel`):
An instance of LukeModel.
num_classes (int):
The number of classes.
"""
def __init__(self, luke, num_classes):
super(LukeForEntityPairClassification, self).__init__()
self.luke = luke
self.num_classes = num_classes
self.dropout = nn.Dropout(self.luke.config['hidden_dropout_prob'])
self.classifier = nn.Linear(
self.luke.config['hidden_size'] * 2, num_classes, bias_attr=False)
self.apply(self.init_weights)
def forward(
self,
input_ids,
token_type_ids=None,
position_ids=None,
attention_mask=None,
entity_ids=None,
entity_position_ids=None,
entity_token_type_ids=None,
entity_attention_mask=None, ):
r"""
The LukeForEntityPairClassification forward method, overrides the __call__() special method.
Args:
input_ids (Tensor):
See :class:`LukeModel`.
token_type_ids (Tensor, optional):
See :class:`LukeModel`.
position_ids (Tensor, optional):
See :class: `LukeModel`
attention_mask (list, optional):
See :class:`LukeModel`.
entity_ids (Tensor, optional):
See :class:`LukeModel`.
entity_position_ids (Tensor, optional):
See :class:`LukeModel`.
entity_token_type_ids (Tensor, optional):
See :class:`LukeModel`.
entity_attention_mask (list, optional):
See :class:`LukeModel`.
Returns:
Tensor: Returns tensor `logits`, a tensor of the entity pair classification logits.
Shape as `[batch_size, num_classes]` and dtype as float32.
Example:
.. code-block::
import paddle
from paddlenlp.transformers import LukeForEntityPairClassification, LukeTokenizer
tokenizer = LukeTokenizer.from_pretrained('luke-base')
model = LukeForEntityPairClassification.from_pretrained('luke-base', num_classes=2)
text = "Beyoncé lives in Los Angeles."
entity_spans = [(0, 7), (17, 28)]
inputs = tokenizer(text, entity_spans=entity_spans, add_prefix_space=True)
inputs = {k:paddle.to_tensor([v]) for (k, v) in inputs.items()}
logits = model(**inputs)
"""
outputs = self.luke(
input_ids=input_ids,
token_type_ids=token_type_ids,
position_ids=position_ids,
attention_mask=attention_mask,
entity_ids=entity_ids,
entity_position_ids=entity_position_ids,
entity_token_type_ids=entity_token_type_ids,
entity_attention_mask=entity_attention_mask)
feature_vector = paddle.concat(
[outputs[1][:, 0, :], outputs[1][:, 1, :]], axis=1)
feature_vector = self.dropout(feature_vector)
logits = self.classifier(feature_vector)
return logits
class LukeForEntitySpanClassification(LukePretrainedModel):
"""
The LUKE model with a span classification head on top (a linear layer on top of the hidden states output) for tasks
such as named entity recognition.
Args:
luke (:class:`LukeModel`):
An instance of LukeModel.
num_classes (int):
The number of classes.
"""
def __init__(self, luke, num_classes):
super(LukeForEntitySpanClassification, self).__init__()
self.luke = luke
self.num_classes = num_classes
self.dropout = nn.Dropout(self.luke.config['hidden_dropout_prob'])
self.classifier = nn.Linear(self.luke.config['hidden_size'] * 3,
num_classes)
self.apply(self.init_weights)
def forward(self,
entity_start_positions,
entity_end_positions,
input_ids,
token_type_ids=None,
position_ids=None,
attention_mask=None,
entity_ids=None,
entity_position_ids=None,
entity_token_type_ids=None,
entity_attention_mask=None):
r"""
The LukeForEntitySpanClassification forward method, overrides the __call__() special method.
Args:
entity_start_positions:
The start position of entities in sequence.
entity_end_positions:
The start position of entities in sequence.
input_ids (Tensor):
See :class:`LukeModel`.
token_type_ids (Tensor, optional):
See :class:`LukeModel`.
position_ids (Tensor, optional):
See :class: `LukeModel`
attention_mask (list, optional):
See :class:`LukeModel`.
entity_ids (Tensor, optional):
See :class:`LukeModel`.
entity_position_ids (Tensor, optional):
See :class:`LukeModel`.
entity_token_type_ids (Tensor, optional):
See :class:`LukeModel`.
entity_attention_mask (list, optional):
See :class:`LukeModel`.
Returns:
Tensor: Returns tensor `logits`, a tensor of the entity span classification logits.
Shape as `[batch_size, num_entities, num_classes]` and dtype as float32.
Example:
.. code-block::
import paddle
from paddlenlp.transformers import LukeForEntitySpanClassification, LukeTokenizer
tokenizer = LukeTokenizer.from_pretrained('luke-base')
model = LukeForEntitySpanClassification.from_pretrained('luke-base', num_classes=2)
text = "Beyoncé lives in Los Angeles."
entity_spans = [(0, 7)]
inputs = tokenizer(text, entity_spans=entity_spans, add_prefix_space=True)
inputs = {k:paddle.to_tensor([v]) for (k, v) in inputs.items()}
inputs['entity_start_positions'] = paddle.to_tensor([[1]], dtype='int64')
inputs['entity_end_positions'] = paddle.to_tensor([[2]], dtype='int64')
logits = model(**inputs)
"""
outputs = self.luke(
input_ids=input_ids,
token_type_ids=token_type_ids,
position_ids=position_ids,
attention_mask=attention_mask,
entity_ids=entity_ids,
entity_position_ids=entity_position_ids,
entity_token_type_ids=entity_token_type_ids,
entity_attention_mask=entity_attention_mask)
hidden_size = outputs[0].shape[-1]
entity_start_positions = entity_start_positions.unsqueeze(-1).expand(
(-1, -1, hidden_size))
start_states = paddle_gather(
x=outputs[0], index=entity_start_positions, dim=-2)
entity_end_positions = entity_end_positions.unsqueeze(-1).expand(
(-1, -1, hidden_size))
end_states = paddle_gather(
x=outputs[0], index=entity_end_positions, dim=-2)
feature_vector = paddle.concat(
[start_states, end_states, outputs[1]], axis=2)
feature_vector = self.dropout(feature_vector)
logits = self.classifier(feature_vector)
return logits
class LukeForQuestionAnswering(LukePretrainedModel):
"""
LukeBert Model with question answering tasks.
Args:
luke (:class:`LukeModel`):
An instance of :class:`LukeModel`.
"""
def __init__(self, luke):
super(LukeForQuestionAnswering, self).__init__()
self.luke = luke
self.qa_outputs = nn.Linear(self.luke.config['hidden_size'], 2)
self.apply(self.init_weights)
def forward(self,
input_ids=None,
token_type_ids=None,
position_ids=None,
attention_mask=None,
entity_ids=None,
entity_position_ids=None,
entity_token_type_ids=None,
entity_attention_mask=None):
r"""
The LukeForQuestionAnswering forward method, overrides the __call__() special method.
Args:
input_ids (Tensor):
See :class:`LukeModel`.
token_type_ids (Tensor, optional):
See :class:`LukeModel`.
position_ids (Tensor, optional):
See :class: `LukeModel`
attention_mask (list, optional):
See :class:`LukeModel`.
entity_ids (Tensor, optional):
See :class:`LukeModel`.
entity_position_ids (Tensor, optional):
See :class:`LukeModel`.
entity_token_type_ids (Tensor, optional):
See :class:`LukeModel`.
entity_attention_mask (list, optional):
See :class:`LukeModel`.
Returns:
tuple: Returns tuple (`start_logits`, `end_logits`).
With the fields:
- `start_logits` (Tensor):
A tensor of the input token classification logits, indicates the start position of the labelled span.
Its data type should be float32 and its shape is [batch_size, sequence_length].
- `end_logits` (Tensor):
A tensor of the input token classification logits, indicates the end position of the labelled span.
Its data type should be float32 and its shape is [batch_size, sequence_length].
Example:
.. code-block::
import paddle
from paddlenlp.transformers import LukeForQuestionAnswering, LukeTokenizer
tokenizer = LukeTokenizer.from_pretrained('luke-base')
model = LukeForQuestionAnswering.from_pretrained('luke-base')
text = "Beyoncé lives in Los Angeles."
entity_spans = [(0, 7)]
inputs = tokenizer(text, entity_spans=entity_spans, add_prefix_space=True)
inputs = {k:paddle.to_tensor([v]) for (k, v) in inputs.items()}
start_logits, end_logits = model(**inputs)
"""
encoder_outputs = self.luke(
input_ids=input_ids,
token_type_ids=token_type_ids,
position_ids=position_ids,
attention_mask=attention_mask,
entity_ids=entity_ids,
entity_position_ids=entity_position_ids,
entity_token_type_ids=entity_token_type_ids,
entity_attention_mask=entity_attention_mask)
word_hidden_states = encoder_outputs[0][:, :input_ids.shape[1], :]
logits = self.qa_outputs(word_hidden_states)
start_logits, end_logits = paddle.split(logits, 2, -1)
start_logits = start_logits.squeeze(-1)
end_logits = end_logits.squeeze(-1)
return start_logits, end_logits
```
#### File: transformers/luke/tokenizer.py
```python
from typing import Optional, Union, List, Dict
try:
import regex as re
except:
import re
import sys
import json
import itertools
from .. import RobertaTokenizer
from itertools import repeat
import warnings
try:
from functools import lru_cache
except ImportError:
# Just a dummy decorator to get the checks to run on python2
# because honestly I don't want to support a byte-level unicode BPE tokenizer on python 2 right now.
def lru_cache():
return lambda func: func
__all__ = ['LukeTokenizer']
_add_prefix_space = False
def get_pairs(word):
"""Return set of symbol pairs in a word.
Word is represented as tuple of symbols (symbols being variable-length strings).
"""
pairs = set()
prev_char = word[0]
for char in word[1:]:
pairs.add((prev_char, char))
prev_char = char
return pairs
@lru_cache()
def bytes_to_unicode():
"""
Returns list of utf-8 byte and a mapping to unicode strings.
We specifically avoids mapping to whitespace/control characters the bpe code barfs on.
The reversible bpe codes work on unicode strings.
This means you need a large # of unicode characters in your vocab if you want to avoid UNKs.
When you're at something like a 10B token dataset you end up needing around 5K for decent coverage.
This is a signficant percentage of your normal, say, 32K bpe vocab.
To avoid that, we want lookup tables between utf-8 bytes and unicode strings.
"""
_chr = chr
bs = list(range(ord("!"), ord("~") + 1)) + list(
range(ord("¡"), ord("¬") + 1)) + list(range(ord("®"), ord("ÿ") + 1))
cs = bs[:]
n = 0
for b in range(2**8):
if b not in bs:
bs.append(b)
cs.append(2**8 + n)
n += 1
cs = [_chr(n) for n in cs]
return dict(zip(bs, cs))
class LukeTokenizer(RobertaTokenizer):
"""
Constructs a Luke tokenizer. It uses a basic tokenizer to do punctuation
splitting, lower casing and so on, and follows a WordPiece tokenizer to
tokenize as subwords.
This tokenizer inherits from :class:`~paddlenlp.transformers.tokenizer_utils.PretrainedTokenizer`
which contains most of the main methods. For more information regarding those methods,
please refer to this superclass.
Args:
vocab_file (str):
The vocabulary file path (ends with '.json') required to instantiate
a `WordpieceTokenizer`.
entity_file (str):
The entity vocabulary file path (ends with '.tsv') required to instantiate
a `EntityTokenizer`.
do_lower_case (bool):
Whether or not to lowercase the input when tokenizing.
Defaults to`True`.
unk_token (str):
A special token representing the *unknown (out-of-vocabulary)* token.
An unknown token is set to be `unk_token` inorder to be converted to an ID.
Defaults to "[UNK]".
sep_token (str):
A special token separating two different sentences in the same input.
Defaults to "[SEP]".
pad_token (str):
A special token used to make arrays of tokens the same size for batching purposes.
Defaults to "[PAD]".
cls_token (str):
A special token used for sequence classification. It is the last token
of the sequence when built with special tokens. Defaults to "[CLS]".
mask_token (str):
A special token representing a masked token. This is the token used
in the masked language modeling task which the model tries to predict the original unmasked ones.
Defaults to "[MASK]".
Examples:
.. code-block::
from paddlenlp.transformers import LukeTokenizer
tokenizer = LukeTokenizer.from_pretrained('luke-large)
tokens = tokenizer('Beyoncé lives in Los Angeles', entity_spans=[(0, 7), (17, 28)])
#{'input_ids': [0, 40401, 261, 12695, 1074, 11, 1287, 1422, 2], 'entity_ids': [1657, 32]}
"""
# resource_files_names = {"vocab_file": "vocab.txt"} # for save_pretrained
resource_files_names = {
"vocab_file": "vocab.json",
"merges_file": "merges.txt",
"entity_file": "entity_vocab.json"
}
pretrained_resource_files_map = {
"vocab_file": {
"luke-base":
"https://bj.bcebos.com/paddlenlp/models/transformers/luke/luke-base/vocab.json",
"luke-large":
"https://bj.bcebos.com/paddlenlp/models/transformers/luke/luke-large/vocab.json"
},
"merges_file": {
"luke-base":
"https://bj.bcebos.com/paddlenlp/models/transformers/luke/luke-base/merges.txt",
"luke-large":
"https://bj.bcebos.com/paddlenlp/models/transformers/luke/luke-large/merges.txt"
},
"entity_file": {
"luke-base":
"https://bj.bcebos.com/paddlenlp/models/transformers/luke/luke-base/entity_vocab.json",
"luke-large":
"https://bj.bcebos.com/paddlenlp/models/transformers/luke/luke-large/entity_vocab.json"
},
}
pretrained_init_configuration = {
"luke-base": {
"do_lower_case": True
},
"luke-large": {
"do_lower_case": True
}
}
def __init__(self,
vocab_file,
entity_file,
merges_file,
do_lower_case=True,
unk_token="<unk>",
sep_token="</s>",
pad_token="<pad>",
cls_token="<s>",
mask_token="<mask>"):
with open(vocab_file, encoding="utf-8") as vocab_handle:
self.encoder = json.load(vocab_handle)
with open(entity_file, encoding='utf-8') as entity_vocab_handle:
self.entity_vocab = json.load(entity_vocab_handle)
self.sep_token, self.sep_token_id = sep_token, self.encoder[sep_token]
self.cls_token, self.cls_token_id = cls_token, self.encoder[cls_token]
self.pad_token, self.pad_token_id = pad_token, self.encoder[pad_token]
self.unk_token, self.unk_token_id = unk_token, self.encoder[unk_token]
self._all_special_tokens = [
unk_token, sep_token, pad_token, cls_token, mask_token
]
self.decoder = {v: k for k, v in self.encoder.items()}
self.errors = 'replace' # how to handle errors in decoding
self.byte_encoder = bytes_to_unicode()
self.byte_decoder = {v: k for k, v in self.byte_encoder.items()}
with open(merges_file, encoding='utf-8') as merges_handle:
bpe_merges = merges_handle.read().split('\n')[1:-1]
bpe_merges = [tuple(merge.split()) for merge in bpe_merges]
self.bpe_ranks = dict(zip(bpe_merges, range(len(bpe_merges))))
self.cache = {}
self.added_tokens_encoder = {}
self.added_tokens_decoder = {}
# Should haved added re.IGNORECASE so BPE merges can happen for capitalized versions of contractions
self.pat = re.compile(
r"""'s|'t|'re|'ve|'m|'ll|'d| ?\p{L}+| ?\p{N}+| ?[^\s\p{L}\p{N}]+|\s+(?!\S)|\s+"""
)
super(LukeTokenizer, self).__init__(
vocab_file,
merges_file,
do_lower_case=do_lower_case,
unk_token=unk_token,
sep_token=sep_token,
pad_token=pad_token,
cls_token=cls_token,
mask_token=mask_token)
def get_entity_vocab(self):
"""Get the entity vocab"""
return self.entity_vocab
def _convert_token_to_id(self, token):
""" Converts a token (str/unicode) in an id using the vocab. """
return self.encoder.get(token, self.encoder.get(self.unk_token))
def _convert_id_to_token(self, index):
"""Converts an index (integer) in a token (string/unicode) using the vocab."""
return self.decoder.get(index)
def _tokenize(self, text, add_prefix_space=False):
if add_prefix_space:
text = ' ' + text
bpe_tokens = []
for token in re.findall(self.pat, text):
if sys.version_info[0] == 2:
token = ''.join(
self.byte_encoder[ord(b)] for b in token
) # Maps all our bytes to unicode strings, avoiding controle tokens of the BPE (spaces in our case)
else:
token = ''.join(
self.byte_encoder[b] for b in token.encode('utf-8')
) # Maps all our bytes to unicode strings, avoiding controle tokens of the BPE (spaces in our case)
bpe_tokens.extend(
bpe_token for bpe_token in self.bpe(token).split(' '))
return bpe_tokens
def __call__(self,
text,
text_pair=None,
entity_spans=None,
entity_spans_pair=None,
entities=None,
entities_pair=None,
max_mention_length=30,
max_seq_len: Optional[int]=None,
stride=0,
add_prefix_space=False,
is_split_into_words=False,
pad_to_max_seq_len=False,
truncation_strategy="longest_first",
return_position_ids=True,
return_token_type_ids=False,
return_attention_mask=True,
return_length=False,
return_overflowing_tokens=False,
return_special_tokens_mask=False):
"""
Performs tokenization and uses the tokenized tokens to prepare model
inputs. It supports sequence or sequence pair as input, and batch input
is allowed. `self.encode()` or `self.batch_encode()` would be called
separately for single or batch input depending on input format and
`is_split_into_words` argument.
Args:
text (str, List[str] or List[List[str]]):
The sequence or batch of sequences to be processed. One sequence
is a string or a list of strings depending on whether it has been
pretokenized. If each sequence is provided as a list of strings
(pretokenized), you must set `is_split_into_words` as `True` to
disambiguate with a batch of sequences.
text_pair (str, List[str] or List[List[str]], optional):
Same as `text` argument, while it represents for the latter
sequence of the sequence pair.
entity_spans (`List[Tuple[int, int]]`, `List[List[Tuple[int, int]]]`, *optional*):
The sequence or batch of sequences of entity spans to be encoded. Each sequence consists of tuples each
with two integers denoting character-based(different from transformers LUKE) start and end positions
of entities. If you specify `"entity_classification"` or `"entity_pair_classification"` as the `task`
argument in the constructor, the length of each sequence must be 1 or 2, respectively. If you specify
`entities`, the length of each sequence must be equal to the length of each sequence of `entities`.
entity_spans_pair (`List[Tuple[int, int]]`, `List[List[Tuple[int, int]]]`, *optional*):
The sequence or batch of sequences of entity spans to be encoded. Each sequence consists of tuples each
with two integers denoting character-based start and end positions of entities. If you specify the
`task` argument in the constructor, this argument is ignored. If you specify `entities_pair`, the
length of each sequence must be equal to the length of each sequence of `entities_pair`.
entities (`List[str]`, `List[List[str]]`, *optional*):
The sequence or batch of sequences of entities to be encoded. Each sequence consists of strings
representing entities, i.e., special entities (e.g., [MASK]) or entity titles of Wikipedia (e.g., Los
Angeles). This argument is ignored if you specify the `task` argument in the constructor. The length of
each sequence must be equal to the length of each sequence of `entity_spans`. If you specify
`entity_spans` without specifying this argument, the entity sequence or the batch of entity sequences
is automatically constructed by filling it with the [MASK] entity.
entities_pair (`List[str]`, `List[List[str]]`, *optional*):
The sequence or batch of sequences of entities to be encoded. Each sequence consists of strings
representing entities, i.e., special entities (e.g., [MASK]) or entity titles of Wikipedia (e.g., Los
Angeles). This argument is ignored if you specify the `task` argument in the constructor. The length of
each sequence must be equal to the length of each sequence of `entity_spans_pair`. If you specify
`entity_spans_pair` without specifying this argument, the entity sequence or the batch of entity
sequences is automatically constructed by filling it with the [MASK] entity.
max_mention_length (`int`):
The entity_position_ids's length.
max_seq_len (int, optional):
If set to a number, will limit the total sequence returned so
that it has a maximum length. If there are overflowing tokens,
those overflowing tokens will be added to the returned dictionary
when `return_overflowing_tokens` is `True`. Defaults to `None`.
stride (int, optional):
Only available for batch input of sequence pair and mainly for
question answering usage. When for QA, `text` represents questions
and `text_pair` represents contexts. If `stride` is set to a
positive number, the context will be split into multiple spans
where `stride` defines the number of (tokenized) tokens to skip
from the start of one span to get the next span, thus will produce
a bigger batch than inputs to include all spans. Moreover, 'overflow_to_sample'
and 'offset_mapping' preserving the original example and position
information will be added to the returned dictionary. Defaults to 0.
add_prefix_space (bool, optional):
The tokenizer will add a space at the beginning of the sentence when it set to `True`.
Defaults to `False`.
pad_to_max_seq_len (bool, optional):
If set to `True`, the returned sequences would be padded up to
`max_seq_len` specified length according to padding side
(`self.padding_side`) and padding token id. Defaults to `False`.
truncation_strategy (str, optional):
String selected in the following options:
- 'longest_first' (default) Iteratively reduce the inputs sequence
until the input is under `max_seq_len` starting from the longest
one at each token (when there is a pair of input sequences).
- 'only_first': Only truncate the first sequence.
- 'only_second': Only truncate the second sequence.
- 'do_not_truncate': Do not truncate (raise an error if the input
sequence is longer than `max_seq_len`).
Defaults to 'longest_first'.
return_position_ids (bool, optional):
Whether to include tokens position ids in the returned dictionary.
Defaults to `False`.
return_token_type_ids (bool, optional):
Whether to include token type ids in the returned dictionary.
Defaults to `True`.
return_attention_mask (bool, optional):
Whether to include the attention mask in the returned dictionary.
Defaults to `False`.
return_length (bool, optional):
Whether to include the length of each encoded inputs in the
returned dictionary. Defaults to `False`.
return_overflowing_tokens (bool, optional):
Whether to include overflowing token information in the returned
dictionary. Defaults to `False`.
return_special_tokens_mask (bool, optional):
Whether to include special tokens mask information in the returned
dictionary. Defaults to `False`.
Returns:
dict or list[dict] (for batch input):
The dict has the following optional items:
- **input_ids** (list[int]): List of token ids to be fed to a model.
- **position_ids** (list[int], optional): List of token position ids to be
fed to a model. Included when `return_position_ids` is `True`
- **token_type_ids** (list[int], optional): List of token type ids to be
fed to a model. Included when `return_token_type_ids` is `True`.
- **attention_mask** (list[int], optional): List of integers valued 0 or 1,
where 0 specifies paddings and should not be attended to by the
model. Included when `return_attention_mask` is `True`.
- **entity_ids** (list[int]): List of token ids to be fed to a model. Included when
`entity_spans` is not `None`.
- **entity_position_ids** (list[int], optional): List of token position ids to be
fed to a model. Included when `entity_spans` is not `None`.
- **entity_segment_ids** (list[int], optional): List of token type ids to be
fed to a model. Included when `entity_spans` is not `None`.
- **entity_attention_mask** (list[int], optional): List of integers valued 0 or 1,
where 0 specifies paddings and should not be attended to by the
model. Included when `entity_spans` is not `None`.
- **seq_len** (int, optional): The input_ids length. Included when `return_length`
is `True`.
- **overflowing_tokens** (list[int], optional): List of overflowing tokens.
Included when if `max_seq_len` is specified and `return_overflowing_tokens`
is True.
- **num_truncated_tokens** (int, optional): The number of overflowing tokens.
Included when if `max_seq_len` is specified and `return_overflowing_tokens`
is True.
- **special_tokens_mask** (list[int], optional): List of integers valued 0 or 1,
with 0 specifying special added tokens and 1 specifying sequence tokens.
Included when `return_special_tokens_mask` is `True`.
- **offset_mapping** (list[int], optional): list of pair preserving the
index of start and end char in original input for each token.
For a special token, the index pair is `(0, 0)`. Included when
`stride` works.
- **overflow_to_sample** (int, optional): Index of example from which this
feature is generated. Included when `stride` works.
"""
global _add_prefix_space
if add_prefix_space:
_add_prefix_space = True
encode_output = super(LukeTokenizer, self).__call__(
text,
text_pair=text_pair,
max_seq_len=max_seq_len,
stride=stride,
is_split_into_words=is_split_into_words,
pad_to_max_seq_len=pad_to_max_seq_len,
truncation_strategy=truncation_strategy,
return_position_ids=return_position_ids,
return_token_type_ids=return_token_type_ids,
return_attention_mask=return_attention_mask,
return_length=return_length,
return_overflowing_tokens=return_overflowing_tokens,
return_special_tokens_mask=return_special_tokens_mask)
if not entity_spans:
return encode_output
is_batched = bool(
(not is_split_into_words and isinstance(text, (list, tuple))) or
(is_split_into_words and isinstance(text, (list, tuple)) and
text and isinstance(text[0], (list, tuple))))
if is_batched:
if entities is None:
entities = [None] * len(entity_spans)
for i, ent in enumerate(zip(entities, entity_spans, text)):
entity_encode = self.entity_encode(ent[2], ent[0],
max_mention_length, ent[1])
encode_output[i].update(entity_encode)
if entity_spans_pair:
if entities_pair is None:
entities_pair = [None] * len(entity_spans_pair)
for i, ent in enumerate(
zip(entities_pair, entity_spans_pair, text_pair)):
entity_encode = self.entity_encode(
ent[2], ent[0], max_mention_length, ent[1], 1,
encode_output[i]['input_ids'].index(self.sep_token_id) +
2)
for k in entity_encode.keys():
encode_output[i][k] = encode_output[i][
k] + entity_encode[k]
else:
entity_encode = self.entity_encode(text, entities,
max_mention_length, entity_spans)
encode_output.update(entity_encode)
if entity_spans_pair:
entity_encode = self.entity_encode(
text_pair, entities_pair, max_mention_length,
entity_spans_pair, 1,
encode_output['input_ids'].index(self.sep_token_id) + 2)
for k in entity_encode.keys():
encode_output[k] = encode_output[k] + entity_encode[k]
return encode_output
def tokenize(self, text, add_prefix_space=False):
"""
Tokenize a string.
Args:
text (str):
The sentence to be tokenized.
add_prefix_space (boolean, default False):
Begin the sentence with at least one space to get invariance
to word order in GPT-2 (and Luke) tokenizers.
"""
if _add_prefix_space:
add_prefix_space = True
def split_on_token(tok, text):
result = []
split_text = text.split(tok)
for i, sub_text in enumerate(split_text):
sub_text = sub_text.strip()
if i == 0 and not sub_text:
result += [tok]
elif i == len(split_text) - 1:
if sub_text:
result += [sub_text]
else:
pass
else:
if sub_text:
result += [sub_text]
result += [tok]
return result
def split_on_tokens(tok_list, text):
if not text.strip():
return []
if not tok_list:
return self._tokenize(text, add_prefix_space)
tokenized_text = []
text_list = [text]
for tok in tok_list:
tokenized_text = []
for sub_text in text_list:
if sub_text not in self.added_tokens_encoder \
and sub_text not in self._all_special_tokens:
tokenized_text += split_on_token(tok, sub_text)
else:
tokenized_text += [sub_text]
text_list = tokenized_text
return list(itertools.chain.from_iterable((self._tokenize(token, add_prefix_space) if token not \
in self.added_tokens_encoder and token not in self._all_special_tokens \
else [token] for token in tokenized_text)))
added_tokens = list(self.added_tokens_encoder.keys(
)) + self._all_special_tokens
tokenized_text = split_on_tokens(added_tokens, text)
return tokenized_text
def bpe(self, token):
if token in self.cache:
return self.cache[token]
word = tuple(token)
pairs = get_pairs(word)
if not pairs:
return token
while True:
bigram = min(
pairs, key=lambda pair: self.bpe_ranks.get(pair, float('inf')))
if bigram not in self.bpe_ranks:
break
first, second = bigram
new_word = []
i = 0
while i < len(word):
try:
j = word.index(first, i)
new_word.extend(word[i:j])
i = j
except:
new_word.extend(word[i:])
break
if word[i] == first and i < len(word) - 1 and word[i +
1] == second:
new_word.append(first + second)
i += 2
else:
new_word.append(word[i])
i += 1
new_word = tuple(new_word)
word = new_word
if len(word) == 1:
break
else:
pairs = get_pairs(word)
word = ' '.join(word)
self.cache[token] = word
return word
def convert_tokens_to_string(self, tokens):
""" Converts a sequence of tokens (string) in a single string. """
text = ''.join(tokens)
text = bytearray([self.byte_decoder[c] for c in text]).decode(
'utf-8', errors=self.errors)
return text
def convert_tokens_to_ids(self, tokens):
if tokens is None:
return None
ids = []
for token in tokens:
ids.append(self._convert_token_to_id_with_added_voc(token))
return ids
def _convert_token_to_id_with_added_voc(self, token):
if token is None:
return None
if token in self.added_tokens_encoder:
return self.added_tokens_encoder[token]
return self._convert_token_to_id(token)
def add_special_tokens(self, token_list: Union[List[int], Dict]):
"""
Adding special tokens if you need.
Args:
token_list (List[int], Dict[List[int]]):
The special token list you provided. If you provide a Dict, the key of the Dict must
be "additional_special_tokens" and the value must be token list.
"""
if isinstance(token_list, dict):
token_list = token_list['additional_special_tokens']
encoder_dict = dict()
decoder_dict = dict()
for token in token_list:
encoder_dict[token] = len(self.encoder.keys())
decoder_dict[len(self.decoder.keys())] = token
self.added_tokens_encoder.update(encoder_dict)
self.added_tokens_decoder.update(decoder_dict)
def convert_entity_to_id(self, entity: str):
"""Convert the entity to id"""
if not self.entity_vocab.get(entity, None):
warnings.warn(f"{entity} not found in entity thesaurus")
return None
else:
return self.entity_vocab[entity]
def entity_encode(self,
text,
entities,
max_mention_length,
entity_spans,
ent_sep=0,
offset_a=1):
"""Convert the string entity to digital entity"""
def convert_tuple_to_list(x):
"""This function aim to convert tuple to list"""
if isinstance(x, tuple):
x = list(x)
for i, each_x in enumerate(x):
if isinstance(each_x, tuple):
x[i] = list(each_x)
return x
mentions = []
if entities:
for i, entity in enumerate(zip(entities, entity_spans)):
entity = convert_tuple_to_list(entity)
entity[1][0], entity[1][1] = self._convert_entity_pos(text,
entity[1])
if not self.entity_vocab.get(entity[0], None):
warnings.warn(f"{entity[0]} not found in entity thesaurus")
mentions.append((1, entity[1][0], entity[1][1]))
else:
mentions.append((self.entity_vocab[entity[0]], entity[1][0],
entity[1][1]))
else:
entities = [2] * len(entity_spans)
for i, entity in enumerate(zip(entities, entity_spans)):
entity = convert_tuple_to_list(entity)
entity[1][0], entity[1][1] = self._convert_entity_pos(text,
entity[1])
mentions.append((entity[0], entity[1][0], entity[1][1]))
entity_ids = [0] * len(mentions)
entity_segment_ids = [ent_sep] * len(mentions)
entity_attention_mask = [1] * len(mentions)
entity_position_ids = [[-1 for y in range(max_mention_length)]
for x in range(len(mentions))]
for i, (offset, (entity_id, start,
end)) in enumerate(zip(repeat(offset_a), mentions)):
entity_ids[i] = entity_id
entity_position_ids[i][:end - start] = range(start + offset,
end + offset)
return dict(
entity_ids=entity_ids,
entity_token_type_ids=entity_segment_ids,
entity_attention_mask=entity_attention_mask,
entity_position_ids=entity_position_ids)
def _convert_entity_pos(self, text, entity_span):
text_token = self.tokenize(text[0:entity_span[0]].strip())
entity_token = self.tokenize(text[entity_span[0]:entity_span[1]].strip(
))
return len(text_token), len(text_token) + len(entity_token)
def get_offset_mapping(self, text):
tokens = self._tokenize(text)
offset_mapping = []
offset = 0
for token in tokens:
if token[0] == 'Ġ':
offset_mapping.append((offset + 1, offset + len(token)))
else:
offset_mapping.append((offset, offset + len(token)))
offset += len(token)
return offset_mapping
def create_token_type_ids_from_sequences(self,
token_ids_0,
token_ids_1=None):
"""
Create a mask from the two sequences passed to be used in a sequence-pair classification task.
A Luke sequence pair mask has the following format:
::
0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1
| first sequence | second sequence |
If :obj:`token_ids_1` is :obj:`None`, this method only returns the first portion of the mask (0s).
Args:
token_ids_0 (List[int]):
A list of `inputs_ids` for the first sequence.
token_ids_1 (List[int], optional):
Optional second list of IDs for sequence pairs. Defaults to None.
Returns:
List[int]: List of token_type_id according to the given sequence(s).
"""
_sep = [self.sep_token_id]
_cls = [self.cls_token_id]
if token_ids_1 is None:
return len(_cls + token_ids_0 + _sep) * [0]
return len(_cls + token_ids_0 + _sep) * [0] + len(_sep + token_ids_1 +
_sep) * [1]
def num_special_tokens_to_add(self, pair=False):
"""
Returns the number of added tokens when encoding a sequence with special tokens.
Args:
pair(bool):
Whether the input is a sequence pair or a single sequence.
Defaults to `False` and the input is a single sequence.
Returns:
int: Number of tokens added to sequences.
"""
token_ids_0 = []
token_ids_1 = []
return len(
self.build_inputs_with_special_tokens(token_ids_0, token_ids_1
if pair else None))
def build_inputs_with_special_tokens(self, token_ids_0, token_ids_1=None):
"""
Build model inputs from a sequence or a pair of sequence for sequence classification
tasks by concatenating and adding special tokens.
"""
_cls = [self.cls_token_id]
_sep = [self.sep_token_id]
if token_ids_1 is None:
return _cls + token_ids_0 + _sep
return _cls + token_ids_0 + _sep + _sep + token_ids_1 + _sep
```
#### File: transformers/nystromformer/utils.py
```python
import paddle
def gelu_python(x):
return x * 0.5 * (1.0 + paddle.erf(x / paddle.sqrt(2.0)))
def gelu_new(x):
return 0.5 * x * (1.0 + paddle.tanh(
paddle.sqrt(2.0 / 3.141592653589793) *
(x + 0.044715 * paddle.pow(x, 3.0))))
def gelu_fast(x):
return 0.5 * x * (1.0 + paddle.tanh(x * 0.7978845608 *
(1.0 + 0.044715 * x * x)))
def quick_gelu(x):
return x * paddle.nn.functional.sigmoid(1.702 * x)
def linear_act(x):
return x
ACT2FN = {
"relu": paddle.nn.functional.relu,
"silu": paddle.nn.functional.silu,
"swish": paddle.nn.functional.silu,
"gelu": paddle.nn.functional.gelu,
"tanh": paddle.tanh,
"gelu_python": gelu_python,
"gelu_new": gelu_new,
"gelu_fast": gelu_fast,
"quick_gelu": quick_gelu,
"mish": paddle.nn.functional.mish,
"linear": linear_act,
"sigmoid": paddle.nn.functional.sigmoid,
}
def get_activation(activation_string):
if activation_string in ACT2FN:
return ACT2FN[activation_string]
else:
raise KeyError(
f"function {activation_string} not found in ACT2FN mapping {list(ACT2FN.keys())}"
)
def apply_chunking_to_forward(forward_fn, chunk_size, chunk_dim,
*input_tensors):
assert len(
input_tensors) > 0, f"{input_tensors} has to be a tuple/list of tensors"
if chunk_size > 0:
tensor_shape = input_tensors[0].shape[chunk_dim]
for input_tensor in input_tensors:
if input_tensor.shape[chunk_dim] != tensor_shape:
raise ValueError(
f"All input tenors have to be of the same shape: {tensor_shape}, "
f"found shape {input_tensor.shape[chunk_dim]}")
if input_tensors[0].shape[chunk_dim] % chunk_size != 0:
raise ValueError(
f"The dimension to be chunked {input_tensors[0].shape[chunk_dim]} has to be a multiple of the chunk "
f"size {chunk_size}")
num_chunks = input_tensors[0].shape[chunk_dim] // chunk_size
input_tensors_chunks = tuple(
input_tensor.chunk(
num_chunks, dim=chunk_dim) for input_tensor in input_tensors)
output_chunks = tuple(
forward_fn(*input_tensors_chunk)
for input_tensors_chunk in zip(*input_tensors_chunks))
return paddle.concat(output_chunks, axis=chunk_dim)
return forward_fn(*input_tensors)
def get_extended_attention_mask(attention_mask, input_shape):
if attention_mask.dim() == 3:
extended_attention_mask = attention_mask[:, None, :, :]
elif attention_mask.dim() == 2:
extended_attention_mask = attention_mask[:, None, None, :]
else:
raise ValueError(
f"Wrong shape for input_ids (shape {input_shape}) or attention_mask (shape {attention_mask.shape})"
)
extended_attention_mask = (1.0 - extended_attention_mask) * -10000.0
return extended_attention_mask
def trans_matrix(matrix):
dim = matrix.ndim
trans_list = [i for i in range(dim - 2)] + [dim - 1, dim - 2]
return matrix.transpose(trans_list)
def update_metrics(logits, labels, metrics):
for metric in metrics:
metric.update(logits.argmax(axis=1), labels)
def get_f1_score(precision, recall):
p, r = precision.accumulate(), recall.accumulate()
return 2 * p * r / (p + r)
```
#### File: transformers/semantic_indexing/modeling.py
```python
import paddle
import paddle.nn as nn
import paddle.nn.functional as F
from ..ernie.modeling import ErniePretrainedModel
__all__ = ['ErnieDualEncoder']
class ErnieEncoder(ErniePretrainedModel):
def __init__(self, ernie):
super(ErnieEncoder, self).__init__()
self.ernie = ernie # allow ernie to be config
self.apply(self.init_weights)
def init_weights(self, layer):
""" Initialization hook """
if isinstance(layer, nn.LayerNorm):
layer._epsilon = 1e-5
def forward(self,
input_ids,
token_type_ids=None,
position_ids=None,
attention_mask=None):
sequence_output, _ = self.ernie(
input_ids,
token_type_ids=token_type_ids,
position_ids=position_ids,
attention_mask=attention_mask)
# Outputs pooled_embedding
pooled_output = sequence_output[:, 0]
return pooled_output
class ErnieDualEncoder(nn.Layer):
"""
This class encapsulates two ErnieEncoder models into one model, so query
embedding and title embedding could be obtained using one model. And this
class allows two ErnieEncoder models to be trained at the same time.
Example:
.. code-block::
import paddle
from paddlenlp.transformers import ErnieDualEncoder, ErnieTokenizer
model = ErnieDualEncoder("rocketqa-zh-dureader-query-encoder", "rocketqa-zh-dureader-para-encoder")
tokenizer = ErnieTokenizer.from_pretrained("rocketqa-zh-dureader-query-encoder")
inputs = tokenizer("Welcome to use PaddlePaddle and PaddleNLP!")
inputs = {k:paddle.to_tensor([v]) for (k, v) in inputs.items()}
# Get query embedding
query_embedding = model.get_pooled_embedding(**inputs)
# Get title embedding
title_embedding = model.get_pooled_embedding(**inputs, is_query=False)
"""
def __init__(self,
query_model_name_or_path,
title_model_name_or_path=None,
share_parameters=False,
dropout=None,
use_cross_batch=False):
super().__init__()
self.query_ernie, self.title_ernie = None, None
self.use_cross_batch = use_cross_batch
self.query_ernie = ErnieEncoder.from_pretrained(
query_model_name_or_path)
if share_parameters:
self.title_ernie = self.query_ernie
elif title_model_name_or_path is not None:
self.title_ernie = ErnieEncoder.from_pretrained(
title_model_name_or_path)
self.dropout = nn.Dropout(dropout if dropout is not None else 0.1)
def get_semantic_embedding(self, data_loader):
self.eval()
with paddle.no_grad():
for batch_data in data_loader:
input_ids, token_type_ids = batch_data
input_ids = paddle.to_tensor(input_ids)
token_type_ids = paddle.to_tensor(token_type_ids)
text_embeddings = self.get_pooled_embedding(
input_ids, token_type_ids=token_type_ids)
yield text_embeddings
def get_pooled_embedding(self,
input_ids,
token_type_ids=None,
position_ids=None,
attention_mask=None,
is_query=True):
assert (is_query and self.query_ernie is not None) or (not is_query and self.title_ernie), \
"Please check whether your parameter for `is_query` are consistent with DualEncoder initialization."
if is_query:
pooled_embedding = self.query_ernie(input_ids, token_type_ids,
position_ids, attention_mask)
else:
pooled_embedding = self.title_ernie(input_ids, token_type_ids,
position_ids, attention_mask)
return pooled_embedding
def cosine_sim(self,
query_input_ids,
title_input_ids,
query_token_type_ids=None,
query_position_ids=None,
query_attention_mask=None,
title_token_type_ids=None,
title_position_ids=None,
title_attention_mask=None):
query_cls_embedding = self.get_pooled_embedding(
query_input_ids, query_token_type_ids, query_position_ids,
query_attention_mask)
title_cls_embedding = self.get_pooled_embedding(
title_input_ids,
title_token_type_ids,
title_position_ids,
title_attention_mask,
is_query=False)
cosine_sim = paddle.sum(query_cls_embedding * title_cls_embedding,
axis=-1)
return cosine_sim
def forward(self,
query_input_ids,
pos_title_input_ids,
neg_title_input_ids,
is_prediction=False,
query_token_type_ids=None,
query_position_ids=None,
query_attention_mask=None,
pos_title_token_type_ids=None,
pos_title_position_ids=None,
pos_title_attention_mask=None,
neg_title_token_type_ids=None,
neg_title_position_ids=None,
neg_title_attention_mask=None):
query_cls_embedding = self.get_pooled_embedding(
query_input_ids, query_token_type_ids, query_position_ids,
query_attention_mask)
pos_title_cls_embedding = self.get_pooled_embedding(
pos_title_input_ids, pos_title_token_type_ids,
pos_title_position_ids, pos_title_attention_mask)
neg_title_cls_embedding = self.get_pooled_embedding(
neg_title_input_ids, neg_title_token_type_ids,
neg_title_position_ids, neg_title_attention_mask)
all_title_cls_embedding = paddle.concat(
x=[pos_title_cls_embedding, neg_title_cls_embedding], axis=0)
if is_prediction:
logits = paddle.dot(query_cls_embedding, pos_title_cls_embedding)
outputs = {
"probs": logits,
"q_rep": query_cls_embedding,
"p_rep": pos_title_cls_embedding
}
return outputs
if self.use_cross_batch:
tensor_list = []
paddle.distributed.all_gather(tensor_list, all_title_cls_embedding)
all_title_cls_embedding = paddle.concat(x=tensor_list, axis=0)
# multiply
logits = paddle.matmul(
query_cls_embedding, all_title_cls_embedding, transpose_y=True)
batch_size = query_cls_embedding.shape[0]
labels = paddle.arange(
batch_size * self.rank * 2,
batch_size * (self.rank * 2 + 1),
dtype='int64')
labels = paddle.reshape(labels, shape=[-1, 1])
accuracy = paddle.metric.accuracy(input=logits, label=labels)
loss = F.cross_entropy(input=logits, label=labels)
outputs = {"loss": loss, "accuracy": accuracy}
return outputs
```
#### File: transformers/auto/modeling_test.py
```python
import paddle
from paddlenlp.transformers.auto.modeling import *
from paddlenlp.transformers import *
import warnings
def from_built_in_model():
print('From_built_in_models:-------------------------------')
# model test
model = AutoModel.from_pretrained('bert-base-uncased')
print(type(model))
#model = AutoModel.from_pretrained('unimo-text-1.0')
#model = AutoModel.from_pretrained('plato-mini')
#model = AutoModel.from_pretrained('unified_transformer-12L-cn')
# pretraining test
#model = AutoModelForPretraining.from_pretrained('roformer-chinese-small')
model = AutoModel.from_pretrained(
'roformer-chinese-small', task='ForPretraining')
print(type(model))
#model = AutoModelForPretraining.from_pretrained('tinybert-4l-312d')
# lm_head test
#model = AutoModelWithLMHead.from_pretrained('gpt-cpm-large-cn')
#print(type(model))
#model = AutoModelWithLMHead.from_pretrained('unified_transformer-12L-cn')
#model = AutoModelWithLMHead.from_pretrained('unimo-text-1.0')
# masked lm test
model = AutoModelForMaskedLM.from_pretrained('albert-base-v1')
print(type(model))
#model = AutoModelForMaskedLM.from_pretrained('bart-base')
#model = AutoModelForMaskedLM.from_pretrained('distilbert-base-uncased')
#model = AutoModelForMaskedLM.from_pretrained('mpnet-base')
# sequence_classification test
model = AutoModelForSequenceClassification.from_pretrained('rbt3')
print(type(model))
#model = AutoModelForSequenceClassification.from_pretrained('roberta-wwm-ext')
#model = AutoModelForSequenceClassification.from_pretrained('roformer-chinese-small')
# multiple choice test
model = AutoModelForMultipleChoice.from_pretrained('albert-base-v1')
print(type(model))
#model = AutoModelForMultipleChoice.from_pretrained('nezha-base-chinese')
# QA test
model = AutoModelForQuestionAnswering.from_pretrained('nezha-base-chinese')
print(type(model))
#model = AutoModelForQuestionAnswering.from_pretrained('ernie-1.0')
#model = AutoModelForQuestionAnswering.from_pretrained('ernie-gram-zh')
# token_classification test
model = AutoModelForTokenClassification.from_pretrained(
'electra-small', num_classes=2)
print(type(model))
#model = AutoModelForTokenClassification.from_pretrained('rbt3')
#model = AutoModelForTokenClassification.from_pretrained('skep_ernie_1.0_large_ch')
#model = AutoModelForTokenClassification.from_pretrained('plato-mini')
# encoder, decoder test
model = AutoDecoder.from_pretrained("bart-base", vocab_size=20000)
print(type(model))
model = AutoEncoder.from_pretrained("bart-base", vocab_size=20000)
print(type(model))
#model = AutoEncoder.from_pretrained("bart-base", vocab_size = 20000)
# discriminator, generotor test
model = AutoGenerator.from_pretrained("convbert-base")
print(type(model))
model = AutoDiscriminator.from_pretrained("convbert-base")
print(type(model))
#model = AutoModelForPretraining.from_pretrained('roberta-wwm-ext')
#model = AutoGenerator.from_pretrained("electra-small")
#model = AutoDiscriminator.from_pretrained("convbert-base")
#model = AutoDiscriminator.from_pretrained("electra-small")
# CausalLM test
model = AutoModelForCausalLM.from_pretrained('blenderbot-3B')
print(type(model))
model = AutoModelForConditionalGeneration.from_pretrained(
'blenderbot_small-90M')
print(type(model))
model = AutoModelForCausalLM.from_pretrained('blenderbot_small-90M')
print(type(model))
def from_local_dir():
print('From_local_dir:-----------------------------------------')
model = AutoModel.from_pretrained(('saved_model/my_bert_model'))
print(type(model))
model = AutoModelForSequenceClassification.from_pretrained(
('saved_model/my_bert_model_for_pretraining'))
print(type(model))
def from_community_model():
print('From_community_models:---------------------------------')
#model = AutoModelForSequenceClassification.from_pretrained(
# 'yingyibiao/bert-base-uncased-sst-2-finetuned')
#print(type(model))
model = AutoModelForSequenceClassification.from_pretrained(
'junnyu/ckiplab-bert-base-chinese-ner')
print(type(model))
if __name__ == '__main__':
from_built_in_model()
from_local_dir()
from_community_model()
```
|
{
"source": "Jeremy-zj/spider",
"score": 3
}
|
#### File: season_one/part_1/part_1_2.py
```python
import requests
import re
def get_one():
"""
发起get请求
:return:
"""
r = requests.get('')
print(r.text)
def get_two():
"""
发起get请求.追加参数
:return:
"""
data = dict(name='', age=0)
r = requests.get('', params=data)
print(r.text)
print(r.json())
print(type(r.json()))
def set_header():
"""
设置header
:return:
"""
USER_AGENT = 'Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/70.0.3538.67 Safari/537.36'
headers = {
'User-Agent': USER_AGENT,
}
r = requests.get('https://www.zhihu.com/explore', headers=headers)
pattern = re.compile('explore-feed.*?question_link.*?>(.*?)</a>', re.S)
titles = re.findall(pattern, r.text)
print(titles)
def get_file_to_save():
"""
抓取二进制文件保存。图片。音频。视频
:return:
"""
url = 'https://www.crummy.com/software/BeautifulSoup/bs4/doc/_images/6.1.jpg'
r = requests.get(url)
with open("D://x.jpg", 'wb') as f:
f.write(r.content)
print("success")
def post_one():
"""
发起post请求
:return:
"""
data = {"name": '', 'age': 0}
r = requests.post('', data=data)
print(r.text)
```
|
{
"source": "JeremyZoss/industrial_training",
"score": 3
}
|
#### File: myworkcell_support/launch/workcell.launch.py
```python
import os
import yaml
import xacro
from launch import LaunchDescription
from launch_ros.actions import Node
from ament_index_python import get_package_share_directory
def get_package_file(package, file_path):
"""Get the location of a file installed in an ament package"""
package_path = get_package_share_directory(package)
absolute_file_path = os.path.join(package_path, file_path)
return absolute_file_path
def load_file(file_path):
"""Load the contents of a file into a string"""
try:
with open(file_path, 'r') as file:
return file.read()
except EnvironmentError: # parent of IOError, OSError *and* WindowsError where available
return None
def load_yaml(file_path):
"""Load a yaml file into a dictionary"""
try:
with open(file_path, 'r') as file:
return yaml.safe_load(file)
except EnvironmentError: # parent of IOError, OSError *and* WindowsError where available
return None
def run_xacro(xacro_file):
"""Run xacro and output a file in the same directory with the same name, w/o a .xacro suffix"""
urdf_file, ext = os.path.splitext(xacro_file)
if ext != '.xacro':
raise RuntimeError(f'Input file to xacro must have a .xacro extension, got {xacro_file}')
os.system(f'xacro {xacro_file} -o {urdf_file}')
return urdf_file
def generate_launch_description():
xacro_file = get_package_file('myworkcell_support', 'urdf/workcell.urdf.xacro')
urdf_file = run_xacro(xacro_file)
srdf_file = get_package_file('myworkcell_moveit_config', 'config/myworkcell.srdf')
kinematics_file = get_package_file('myworkcell_moveit_config', 'config/kinematics.yaml')
ompl_config_file = get_package_file('myworkcell_moveit_config', 'config/ompl_planning.yaml')
joint_limits_file = get_package_file('myworkcell_moveit_config','config/joint_limits.yaml')
moveit_controllers_file = get_package_file('myworkcell_moveit_config', 'config/controllers.yaml')
robot_description = load_file(urdf_file)
robot_description_semantic = load_file(srdf_file)
kinematics_config = load_yaml(kinematics_file)
ompl_config = load_yaml(ompl_config_file)
joint_limits_config = load_yaml(joint_limits_file)
# Setting up MoveitCpp configuration parameters
moveit_controllers = {
'moveit_simple_controller_manager' : load_yaml(moveit_controllers_file),
'moveit_controller_manager': 'moveit_simple_controller_manager/MoveItSimpleControllerManager'
}
trajectory_execution = {
'moveit_manage_controllers': True,
'trajectory_execution.allowed_execution_duration_scaling': 1.2,
'trajectory_execution.allowed_goal_duration_margin': 0.5,
'trajectory_execution.allowed_start_tolerance': 0.01
}
planning_scene_monitor_config = {
'publish_planning_scene': True,
'publish_geometry_updates': True,
'publish_state_updates': True,
'publish_transforms_updates': True
}
moveit_cpp_config = yaml.load("""
planning_scene_monitor_options:
name: "planning_scene_monitor"
robot_description: "robot_description"
joint_state_topic: "/joint_states"
attached_collision_object_topic: "/moveit_cpp/planning_scene_monitor"
publish_planning_scene_topic: "/moveit_cpp/publish_planning_scene"
monitored_planning_scene_topic: "/moveit_cpp/monitored_planning_scene"
wait_for_initial_state_timeout: 10.0
planning_pipelines:
#namespace: "moveit_cpp" # optional, default is ~
pipeline_names: ["ompl"]
plan_request_params:
planning_time: 10.0
planning_attempts: 3
planning_pipeline: ompl
max_velocity_scaling_factor: 0.5
max_acceleration_scaling_factor: 0.5
# octomap parameters (when used)
octomap_frame: world
octomap_resolution: 0.01
max_range: 5.0""")
return LaunchDescription([
Node(
name='myworkcell_node',
package='myworkcell_core',
executable='myworkcell_node',
output='screen',
parameters=[
{
'base_frame': 'world',
'robot_description': robot_description,
'robot_description_semantic': robot_description_semantic,
'robot_description_kinematics': kinematics_config,
'robot_description_planning' : joint_limits_config,
'planning_pipelines': ['ompl'],
'ompl': ompl_config
},
moveit_cpp_config,
moveit_controllers,
trajectory_execution,
planning_scene_monitor_config,
],
),
Node(
name='fake_ar_publisher_node',
package='fake_ar_publisher',
executable='fake_ar_publisher_node',
output='screen',
),
Node(
name='vision_node',
package='myworkcell_core',
executable='vision_node',
output='screen',
),
])
```
|
{
"source": "jerenner/stempy",
"score": 3
}
|
#### File: stempy/examples/create_images.py
```python
import sys
from pathlib import Path
import click
import stempy.io as stio
import stempy.image as stim
@click.command()
@click.option('-i', '--input-path', help='HDF5 file containing the electron counts',
type=click.Path(exists=True, dir_okay=False), default=None, show_default=True)
@click.argument('scan-num', required=False)
def main(input_path, scan_num):
if input_path is None:
if scan_num is None:
raise click.ClickException('Please provide scan number')
input_path = Path(f'/mnt/hdd1/data_scan{scan_num}_th4_electrons.h5')
# Load the electron counted data
ee = stio.load_electron_counts(str(input_path))
# Create STEM images with inner and outer radii
ims = stim.create_stem_images(ee, (0, 0, 110, 220),
(110, 220, 240, 288),
center=(307, 282))
# Calculate summed diffraction pattern
dp = stim.calculate_sum_sparse(ee.data, ee.frame_dimensions)
if __name__ == '__main__':
main()
```
#### File: stempy/tests/conftest.py
```python
import io
import pytest
import requests
DATA_URLS = {
'electron_small': 'https://data.kitware.com/api/v1/file/6065f00d2fa25629b93bdabe/download', # noqa
'electron_large': 'https://data.kitware.com/api/v1/file/6065f2792fa25629b93c0303/download', # noqa
}
DATA_RESPONSES = {}
def response(key):
if key not in DATA_RESPONSES:
r = requests.get(DATA_URLS[key])
r.raise_for_status()
DATA_RESPONSES[key] = r
return DATA_RESPONSES[key]
def io_object(key):
r = response(key)
return io.BytesIO(r.content)
@pytest.fixture
def electron_data_small():
return io_object('electron_small')
@pytest.fixture
def electron_data_large():
return io_object('electron_large')
```
|
{
"source": "JerEpoch/funquotesite",
"score": 2
}
|
#### File: JerEpoch/funquotesite/magic.py
```python
from datetime import datetime
import argparse
import HTMLParser
import json
import os
import sys
import urllib2
###############################################################################
# Options
###############################################################################
MAGIC_URL = 'http://magic.gae-init.appspot.com'
PARSER = argparse.ArgumentParser(description='Visit %s for more.' % MAGIC_URL)
PARSER.add_argument(
'-p', '--project', dest='project_id', action='store',
help='project ID of the project that you want to sync',
)
PARSER.add_argument(
'-r', '--remote', dest='remote_url', action='store', default=MAGIC_URL,
help="set the remote URL if it's not http://magic.gae-init.appspot.com",
)
ARGS = PARSER.parse_args()
###############################################################################
# Constants
###############################################################################
DIR_MAIN = 'main'
DIR_CONTROL = os.path.join(DIR_MAIN, 'control')
FILE_CONTROL_INIT = os.path.join(DIR_CONTROL, '__init__.py')
DIR_MODEL = os.path.join(DIR_MAIN, 'model')
FILE_MODEL_INIT = os.path.join(DIR_MODEL, '__init__.py')
DIR_API = os.path.join(DIR_MAIN, 'api', 'v1')
FILE_API_INIT = os.path.join(DIR_API, '__init__.py')
DIR_TEMPLATES = os.path.join(DIR_MAIN, 'templates')
FILE_HEADER = os.path.join(DIR_TEMPLATES, 'bit', 'header.html')
FILE_ADMIN = os.path.join(DIR_TEMPLATES, 'admin', 'admin.html')
###############################################################################
# Helpers
###############################################################################
def print_out(script, filename=''):
timestamp = datetime.now().strftime('%H:%M:%S')
if not filename:
filename = '-' * 46
script = script.rjust(12, '-')
print '[%s] %12s %s' % (timestamp, script, filename)
def make_dirs(directory):
directory = os.path.dirname(directory)
if not os.path.exists(directory):
os.makedirs(directory)
def append_to(project_url, destination):
url = ('%smagic/%s' % (project_url, destination)).replace('\\', '/')
response = urllib2.urlopen(url)
if response.getcode() == 200:
with open(destination, 'r') as dest:
lines = ''.join(dest.readlines())
content = response.read()
if content in lines:
print_out('IGNORED', destination)
return
with open(destination, 'a') as dest:
dest.write(content)
print_out('APPEND', destination)
def safe_text(text):
return (HTMLParser.HTMLParser().unescape(text.decode('utf8'))).encode('utf8')
def insert_to(project_url, destination, find_what, indent=0):
url = ('%smagic/%s' % (project_url, destination)).replace('\\', '/')
response = urllib2.urlopen(url)
if response.getcode() == 200:
with open(destination, 'r') as dest:
dest_contents = dest.readlines()
lines = ''.join(dest_contents)
content = safe_text(response.read())
if content.replace(' ', '') in lines.replace(' ', ''):
print_out('IGNORED', destination)
return
generated = []
for line in dest_contents:
generated.append(line)
if line.lower().find(find_what.lower()) >= 0:
spaces = len(line) - len(line.lstrip())
for l in content.split('\n'):
if l:
generated.append('%s%s\n' % (' ' * (spaces + indent), l))
with open(destination, 'w') as dest:
for line in generated:
dest.write(line)
print_out('INSERT', destination)
def create_file(project_url, destination):
make_dirs(destination)
url = ('%smagic/%s' % (project_url, destination)).replace('\\', '/')
response = urllib2.urlopen(url)
if response.getcode() == 200:
with open(destination, 'w') as dest:
dest.write(safe_text(response.read()))
dest.write('\n')
print_out('CREATE', destination)
def get_project_db():
url = '%s/api/v1/project/%s/' % (ARGS.remote_url, ARGS.project_id.split('/')[0])
response = urllib2.urlopen(url)
if response.getcode() == 200:
project_body = response.read()
project_db = json.loads(project_body)['result']
project_db['project_url'] = url
return project_db
return None
def sync_from_magic(project_db):
model_dbs = {}
project_url = project_db['project_url']
model_url = '%smodel/' % project_url
response = urllib2.urlopen(model_url)
if response.getcode() == 200:
models_body = response.read()
model_dbs = json.loads(models_body)['result']
print_out('UPDATING')
append_to(project_url, FILE_MODEL_INIT)
append_to(project_url, FILE_CONTROL_INIT)
append_to(project_url, FILE_API_INIT)
insert_to(project_url, FILE_HEADER, '<ul class="nav navbar-nav">', 2)
insert_to(project_url, FILE_ADMIN, "url_for('user_list'")
for index, model_db in enumerate(model_dbs):
print_out('%d of %d' % (index + 1, project_db['model_count']))
name = model_db['variable_name']
create_file(project_url, os.path.join(DIR_MODEL, '%s.py' % name))
create_file(project_url, os.path.join(DIR_CONTROL, '%s.py' % name))
create_file(project_url, os.path.join(DIR_API, '%s.py' % name))
root = os.path.join(DIR_TEMPLATES, name)
create_file(project_url, os.path.join(root, 'admin_%s_update.html' % name))
create_file(project_url, os.path.join(root, 'admin_%s_list.html' % name))
if model_db['has_view']:
create_file(project_url, os.path.join(root, '%s_view.html' % name))
create_file(project_url, os.path.join(root, '%s_list.html' % name))
if model_db['has_update']:
create_file(project_url, os.path.join(root, '%s_update.html' % name))
###############################################################################
# Main
###############################################################################
def magic():
if len(sys.argv) == 1:
PARSER.print_help()
sys.exit(1)
os.chdir(os.path.dirname(os.path.realpath(__file__)))
if ARGS.project_id:
project_db = get_project_db()
answer = raw_input(
'Are you sure you want to sync "%(name)s" with %(model_count)d '
'model(s) that was modified on %(modified)s? (Y/n): '
% {
'name': project_db['name'],
'model_count': project_db['model_count'],
'modified': project_db['modified'][:16].replace('T', ' at '),
}
)
if not answer or answer.lower() == 'y':
sync_from_magic(project_db)
else:
print 'Project ID is not provided.'
PARSER.print_help()
if __name__ == '__main__':
magic()
```
|
{
"source": "jereques/octodns",
"score": 2
}
|
#### File: jereques/octodns/setup.py
```python
from io import StringIO
from os import environ
from os.path import dirname, join
from subprocess import CalledProcessError, check_output
import octodns
try:
from setuptools import find_packages, setup
except ImportError:
from distutils.core import find_packages, setup
cmds = (
'compare',
'dump',
'report',
'sync',
'validate',
'versions',
)
cmds_dir = join(dirname(__file__), 'octodns', 'cmds')
console_scripts = {
'octodns-{name} = octodns.cmds.{name}:main'.format(name=name)
for name in cmds
}
def long_description():
buf = StringIO()
yaml_block = False
supported_providers = False
with open('README.md') as fh:
for line in fh:
if line == '```yaml\n':
yaml_block = True
continue
elif yaml_block and line == '---\n':
# skip the line
continue
elif yaml_block and line == '```\n':
yaml_block = False
continue
elif supported_providers:
if line.startswith('## '):
supported_providers = False
# write this line out, no continue
else:
# We're ignoring this one
continue
elif line == '## Supported providers\n':
supported_providers = True
continue
buf.write(line)
return buf.getvalue()
def version():
# pep440 style public & local version numbers
if environ.get('OCTODNS_RELEASE', False):
# public
return octodns.__VERSION__
try:
sha = check_output(['git', 'rev-parse', 'HEAD']).decode('utf-8')[:8]
except (CalledProcessError, FileNotFoundError):
sha = 'unknown'
# local
return f'{octodns.__VERSION__}+{sha}'
tests_require = (
'pytest>=6.2.5',
'pytest-cov>=3.0.0',
'pytest-network>=0.0.1',
)
setup(
author='<NAME>',
author_email='<EMAIL>',
description=octodns.__doc__,
entry_points={
'console_scripts': console_scripts,
},
extras_require={
'dev': tests_require + (
'build>=0.7.0',
'pycodestyle>=2.6.0',
'pycountry>=19.8.18',
'pycountry-convert>=0.7.2',
'pyflakes>=2.2.0',
'readme_renderer[md]>=26.0',
'twine>=3.4.2',
),
},
install_requires=(
'PyYaml>=4.2b1',
'dnspython>=1.15.0',
'fqdn>=1.5.0',
'natsort>=5.5.0',
'python-dateutil>=2.8.1',
),
license='MIT',
long_description=long_description(),
long_description_content_type='text/markdown',
name='octodns',
packages=find_packages(),
python_requires='>=3.6',
tests_require=tests_require,
url='https://github.com/octodns/octodns',
version=version(),
)
```
|
{
"source": "JereVat/BudgetApp",
"score": 3
}
|
#### File: BudgetApp/app/calculations.py
```python
import numpy as np
from decimal import Decimal
def calculate(net_income, saving_time, target_wealth, beginning_wealth, target_interest):
inflation = 0.011
beginning_wealth = int(beginning_wealth)
target_wealth = int(target_wealth)
target_interest = 5 / 100 - inflation
saving_time_m = int(saving_time) * 12
target_intereset_m = float(target_interest) / 12
result = abs(np.pmt(target_intereset_m, saving_time_m, beginning_wealth, target_wealth, 1))
# result = target_intereset_m / ((1 + target_intereset_m)**saving_time_m - 1) * (beginning_wealth * (1 + target_intereset_m)**saving_time_m + target_wealth) / (1 + target_intereset_m)
result = Decimal(result)
result = round(result, 2)
return result
```
|
{
"source": "Jerey/image-to-pdf-and-txt",
"score": 3
}
|
#### File: image-to-pdf-and-txt/imagetopdfandtxt/main.py
```python
import argparse
import os.path
import warnings
import imagetopdfandtxt.text_extractor as text_extractor
import imagetopdfandtxt.picture_rotator as picture_rotator
import imagetopdfandtxt.pdf_creator as pdf_creator
import imagetopdfandtxt.helper_utils as helper_utils
AP = argparse.ArgumentParser(description="""Takes one to many images, tries to extract text from the
images and stores it in a folder including the input image.""")
GROUP = AP.add_mutually_exclusive_group(required=True)
GROUP.add_argument("-i", "--image", action="append", dest="images", help="""Images, from
which text shall be extracted. Can be added multiple times.""")
GROUP.add_argument("-f", "--folder", dest="folder", help="""Add a directory,
where the pictures are stored to be parsed.""")
ARGS = vars(AP.parse_args())
def main():
list_of_pdfs = []
if ARGS["folder"] != None:
for path, _, files in os.walk(ARGS["folder"]):
currentpdf = []
for name in files:
currentpdf.append(os.path.join(path, name))
if len(currentpdf) != 0:
currentpdf.sort()
list_of_pdfs.append(currentpdf)
else:
list_of_pdfs = ARGS["images"]
for pdf in list_of_pdfs:
print("---------------------------")
filename = ""
if ARGS["images"] != None:
pdf = pdf.split(',')
filename = pdf[0]
else:
filename = os.path.basename(os.path.dirname(pdf[0]))
for idx, page in enumerate(pdf):
print(f"Working on '{page}'. That is page {idx+1}/{len(pdf)}.")
if os.path.isfile(page):
pdf[idx] = picture_rotator.rotate_image_based_on_text(page)
else:
warnings.warn(f"Not a file: '{page}'! Doing nothing with it.. ")
output_directory = "result/"
if not os.path.exists(output_directory):
os.makedirs(output_directory)
text_output = helper_utils.get_output_name_from_input(filename, output_directory, ".txt")
text_extractor.extract_text_from_file(pdf, text_output)
pdf_output = helper_utils.get_output_name_from_input(filename, output_directory, ".pdf")
pdf_creator.create_pdf_from_images(pdf, pdf_output)
print("---------------------------")
main()
```
#### File: image-to-pdf-and-txt/imagetopdfandtxt/pdf_creator.py
```python
import argparse
from argparse import RawTextHelpFormatter
import img2pdf
def create_pdf_from_images(list_of_images, output_dir_and_name="default.pdf"):
with open(output_dir_and_name, "wb") as output_file:
output_file.write(img2pdf.convert(list_of_images))
print(f"--> Created '{output_dir_and_name}' from '{list_of_images}'.")
def main():
for image in ARGS["images"]:
pages = image.split(',')
print(f"-> Trying to create a pdf from '{pages}'.")
create_pdf_from_images(pages)
if __name__ == '__main__':
AP = argparse.ArgumentParser(description="""Takes a list of images and creates one PDF of them.
Multi page pdf are created by delimiting with comma.
Sample:
\tpython3 pdf_creator.py -i DocOnePageOne.jpg -i DocTwoPageOne.jpg,DocTwoPageTwo.png
The output file is named after the first given picture.""",
formatter_class=RawTextHelpFormatter)
AP.add_argument("-i", "--image", action="append", dest="images", required=True,
help="""The images, which shall be converted to a pdf.
Creating multipage pdf is also possible by
delimiting the files with a ",".""")
ARGS = vars(AP.parse_args())
main()
```
#### File: image-to-pdf-and-txt/tests/test_picture_rotator.py
```python
import unittest
from imagetopdfandtxt import picture_rotator
class test_picture_rotator(unittest.TestCase):
def test_get_new_name(self):
filename = "my_image"
self.assertEqual(picture_rotator.get_new_name(f"/some/path/{filename}.jpg", "not_rotated"), f"{filename}_not_rotated.jpg")
def test_get_rotation_angle_almost_perfect(self):
result_angle = picture_rotator.get_rotation_angle(f"tests/test_images/001/image.jpg")
self.assertGreater(result_angle, -1)
self.assertLess(result_angle, 1)
def test_get_rotation_angle_skew_image(self):
result_angle = picture_rotator.get_rotation_angle(f"tests/test_images/002/skew_image.jpg")
self.assertGreater(result_angle, 4)
self.assertLess(result_angle, 5.6)
if __name__ == "__main__":
unittest.main()
```
|
{
"source": "jereywhite2011/s3objectLambda",
"score": 2
}
|
#### File: src/populate-tag/lambda_function.py
```python
import json
import urllib.parse
import boto3
print('Loading function')
s3 = boto3.client('s3')
tagValue = "REDACT_AND_TOKENIZE"
tagName = "PROTECTION"
def lambda_handler(event, context):
print("Recevied event: " + json.dumps(event, indent=2))
# Get the object from the event
bucket = event['Records'][0]['s3']['bucket']['name']
key = urllib.parse.unquote_plus(
event['Records'][0]['s3']['object']['key'], encoding='utf-8')
try:
response = s3.put_object_tagging(
Bucket=bucket,
Key=key,
Tagging={
'TagSet': [
{
'Key': tagName,
'Value': tagValue
},
]
}
)
except Exception as e:
print(e)
print('Error applying tag {} to {}.'.format(tagName, key))
raise e
```
#### File: s3objectLambda/test/generate_tokens.py
```python
import random
import math
# Start with a list of all possible values
starting_string = list('abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ.')
starting_integer = list('0123456789')
def generate_string(txt):
# Generate the key by pulling out one value at a time and print the result
# Void return
final = ''
while len(txt) > 0:
tmp = math.floor(random.random() * len(txt))
final += txt.pop(tmp)
print(final)
generate_string(starting_string)
generate_string(starting_integer)
```
|
{
"source": "Jerez05/BPP",
"score": 3
}
|
#### File: BPP/Practica2/test_gastos.py
```python
import pytest
import pandas as pd
import csv
import gastos
def test_abrirFichero():
file = 'meses.csv'
open(file, newline="")
resultado = "El fichero se ha abierto sin problemas"
assert resultado == gastos.abrirFichero(file)
def test_comprobarColumnas():
file = 'meses.csv'
df = pd.read_csv(file, delimiter="\t")
resultado = 12
assert resultado == gastos.comprobarColumnas(file)
def test_comprobarContenido():
file = 'años.csv'
df = pd.read_csv("finanzas2020.csv", delimiter="\t")
resultado = 223
assert resultado == gastos.comprbarContenido(file)
def test_convertirDataset():
file = 'años.csv'
csvfile = open(file, newline="")
read_csv=csv.reader(csvfile, delimiter='\t')
dataset1=[]
for row in read_csv:
dataset1.append(row)
r_aux = gastos.convertirDataset(file)
resultado = '223'
assert resultado == r_aux[1]
return dataset1
dataset1 = test_convertirDataset()
def test_tratarDatos():
dataset_temp1 = []
for i in range(1, len(dataset1)):
datos_temp =[]
for n in dataset1[i]:
try:
dato = float(n)
datos_temp.append(dato)
except:
dato = 0
datos_temp.append(dato)
dataset_temp1.append(datos_temp)
r_aux_temp = gastos.tratarDatos(dataset1)
resultado = -491.0
assert resultado == r_aux_temp[1]
return dataset_temp1
dataset_temp1 = test_tratarDatos()
def test_crearDataframe():
meses = ["Enero", "Febrero", "Marzo", "Abril", "Mayo", "Junio", "Julio", "Agosto", "Septiembre", "Octubre", "Noviembre", "Diciembre"]
df1 = pd.DataFrame(dataset_temp1, columns=meses)
r_aux_df = gastos.crearDataframe(dataset_temp1)
resultado = 223.0
assert resultado == r_aux_df[1]
return df1
df1 = test_crearDataframe()
def test_Totales():
Total1 = df1['Enero'].sum()
Total2 = df1['Febrero'].sum()
Total3 = df1['Marzo'].sum()
Total4 = df1['Abril'].sum()
Total5 = df1['Mayo'].sum()
Total6 = df1['Junio'].sum()
Total7 = df1['Julio'].sum()
Total8 = df1['Agosto'].sum()
Total9 = df1['Septiembre'].sum()
Total10 = df1['Octubre'].sum()
Total11 = df1['Noviembre'].sum()
Total12 = df1['Diciembre'].sum()
totales = [Total1, Total2, Total3, Total4, Total5, Total6, Total7, Total8, Total9, Total10, Total11, Total12]
meses = ["Enero", "Febrero", "Marzo", "Abril", "Mayo", "Junio", "Julio", "Agosto", "Septiembre", "Octubre", "Noviembre", "Diciembre"]
dftotales = pd.DataFrame(list(zip(meses,totales)), columns = ['Meses','Totales'])
r_aux_dftot = gastos.Totales(df1)
resultado = 12064.0
assert resultado == r_aux_dftot[1]
return dftotales
dftotales1 = test_Totales()
def test_ahorroMax():
max = dftotales1.loc[dftotales1['Totales'].idxmax()]
max = max.iloc[1]
resultado = 12064.0
assert resultado == gastos.ahorroMax(dftotales1)
def test_gastoMax():
min = dftotales1.loc[dftotales1['Totales'].idxmin()]
min = min.iloc[1]
resultado = -18933.0
assert resultado == gastos.gastoMax(dftotales1)
def test_gastosMed():
median = dftotales1['Totales'].astype(float)
median = dftotales1['Totales'].median(axis=0)
resultado = -719.0
assert resultado == gastos.gastosMed(dftotales1)
def test_gastosTotales():
dfgastos = dftotales1['Totales']<0
dffiltrado = dftotales1[dfgastos]
gastos = dffiltrado['Totales'].sum()
resultado = -51106
assert resultado == gastos.gastosTotales(dftotales1)
def test_ingresosTotales():
dfingresos = dftotales1['Totales'].mask(dftotales1['Totales'].lt(0),0)
ingresos = dfingresos.sum()
resultado = 35276.0
assert resultado == gastos.ingresosTotales(dftotales1)
```
#### File: ope/utils/abrirfichero.py
```python
import pandas as pd
class comprobarFichero:
'''
Tratamientos de los datos.
Atributos
----------------------------------------------------------------
file:
Este es el nombre del fichero (CSV) que queremos tratar.
Metodos
----------------------------------------------------------------
Comprovar que el fichero existe, tiene el formato correcto y contiene datos
Abrir fichero:
Intenta abrir el fichero.
Si lo puede abrir muestra el mensaje "El fichero se ha abierto sin problemas".
En caso contratio mmuestra el mensaje "No se ha encontrado el fichero. Error: (tipo de error)".
Comprobar columnas:
Transforma el fiechro CSV en un dataframe y obtiene el numero de columnas.
Si el numero de columnas es igual a 12 muestra el mensaje "Numero de columnas: 12".
Si el numero de columnas no es igual a 12 muestra el mensaje "El fichero debe de tener 12 columnas".
Comprobar contenido:
Transforma el fiechro CSV en un dataframe e intenta printar la primea fila.
Si lo realiza muestra la primera fila de cada columna del dataframe.
Si no lo realiza muestra el mensaje "El fichero no tiene contenido valido"
Ejemplo
-----------------------------------------------------------------
>>> import gastos
>>> g = gastosTratamientoDatos('filename')
>>> res_abrirfichero = g.abrirFichero()
>>> res_comprobarcolumnas = g.comprobarColumnas()
'''
def __init__(self, file):
self.file = file
def abrirFichero(self):
'''
Metodo abrir fichero. Compreuba que el fichero existe.
Inputs
------
self.file
Outputs
-------
res_OK: muesta el mensaje "El fichero se ha abierto sin problemas"
res_NOK: muesta el mensaje "No se ha encontrado el fichero. Error: (error)"
'''
res = open(self.file) if (self.file == True) else print("No se ha encontrado el fichero.")
return res
def comprobarColumnas(self):
'''
Metodo comrpbar columnas. Comprueba el numero de columnas
Inputs
------
self.file
Outputs
-------
res_OK: muestra el mensaje "Numero de columnas: (nº)"
res_NOK: "El fichero debe de tener 12 columnas"
'''
res = print(len(self.file)) if (len(self.file) == 12) else print("El fichero debe de tener 12 columnas")
return res
def comprobarContnido(self):
'''Metodo comrpbar contenido. Comprueba si el fichero tiene contenido.
Inputs
------
self.file
Outputs
-------
res_OK: mustra la segunda lista del dataframe
res_NOK: "El fichero no tiene contenido valido"
'''
df = pd.read_csv(self.file, delimiter='\t')
try:
print(df.iloc[1, :])
except ValueError:
print("El fichero no tiene contenido valido")
res= print(df.iloc[1, :]) if (df.loc[1, :] == True) else print("El fichero no tiene contenido valido")
return res
```
|
{
"source": "Jergb/Preparation",
"score": 3
}
|
#### File: Jergb/Preparation/funs.py
```python
import numpy as np
import pandas as pd
import mysql.connector
import seaborn as sns
import matplotlib.pyplot as plt
from ggplot import *
import scipy as sp
from IPython.display import Markdown
from scipy.spatial.distance import mahalanobis as MH
from scipy.spatial.distance import euclidean as EU
from matplotlib.ticker import FuncFormatter
from datetime import timedelta
def transcurrido_fechas(f2, f1):
'Calcula el tiempo transcurrido entre dos fechas'
tiempo_transcurrido = int((f2 - f1).seconds / 60)
tiempo_transcurrido += (f2 - f1).days * 24 * 60
return tiempo_transcurrido
def separa_datos(datos, edge):
'Divide los datos cuando faltan edge registros'
inicio, fin, p = [], [], 0
inicio.append(0)
for i in range(1, len(datos)):
if transcurrido_fechas(datos.index[i], datos.index[i-1]) > edge:
fin.append(i-1)
inicio.append(i)
fin.append(i)
data, separados = [], []
for fin_i in range(len(fin)):
dataset = datos[inicio[fin_i]:fin[fin_i]]
for dataset_i in range(len(dataset)):
data.append(dataset.iloc[dataset_i])
separar = pd.DataFrame(data)
separados.append(separar)
data = []
inicio = [datos.index[x] for x in inicio]
fin = [datos.index[x-1] for x in fin]
registros = [len(separados[x])+1 for x in range(len(separados))]
tiempo = [(fin[x] - inicio[x]) for x in range(len(fin))]
inicio.append(inicio[-1])
sregistros = [(inicio[x+1]-fin[x]) for x in range(len(fin))]
inicio.pop(-1)
sregistros[-1] = '-'
faltantes = [(transcurrido_fechas(fin[x], inicio[x]) + 1 - registros[x]) for x in range(len(fin))]
r_registros = [(registros[x] / (transcurrido_fechas(fin[x], inicio[x]) +1)*100) for x in range(len(fin))]
n_registros = [(faltantes[x] / (transcurrido_fechas(fin[x], inicio[x]) +1)*100) for x in range(len(fin))]
duracion = pd.DataFrame({'Registro Inicial': inicio, 'Registro Final': fin,
'Registrado': registros, 'No Registrado':faltantes,
'Registrado(%)':r_registros,'No Registrado(%)':n_registros,
'Duración': tiempo, 'Tiempo Hasta el Siguiente Registro':sregistros})
return separados, duracion
def info_periodo(dataset,inicio,fin):
'Presenta información detallada sobre un rango de registros de un dataset'
inicioi = pd.Timestamp(inicio)
fini = pd.Timestamp(fin)
data = dataset.loc[inicioi:fini,:]
inicio = data.index[0]
fin = data.index[-1]
tiempo = fin - inicio
registros = len(data)
faltantes = transcurrido_fechas(fin, inicio) + 1 - registros
r_registros = (registros / (transcurrido_fechas(fin, inicio) +1))*100
n_registros = (faltantes / (transcurrido_fechas(fin, inicio) +1))*100
info = pd.DataFrame({'Registro Inicial': [inicio], 'Registro Final': [fin],
'Registrado': [registros], 'No Registrado':[faltantes],
'Registrado(%)':[r_registros],'No Registrado(%)':[n_registros],
'Duración':[tiempo]})
return data, info
def info_relation(df,v1,v2):
'Permite visualizar la relación entre dos variables'
df = df.loc[:,[v1, v2]].copy()
corr = df.loc[:,[v1,v2]].corr().iloc[0,1]
mag = {'TEMPERATURA':'°C', 'HUMEDAD RELATIVA':'%RH',
'HUMEDAD DE LA TIERRA':'%', 'INTENSIDAD LUMÍNICA':'LX'}
a=1
fig = plt.figure(figsize=[17,8])
display(Markdown('<center>**GRÁFICOS DE \
%s Y %s**</center>'%(v1,v2)))
display(Markdown('<center>$\\rho = %.2f$\
</center>'%corr))
with plt.style.context(('seaborn')):
plt.subplot(331)
plt.text(0.26,-.05,'%s'%(pd.DataFrame(
raw[v1].describe())),fontsize=15)
plt.axis('off')
plt.subplot(332)
plt.boxplot(df[v1],vert=False,flierprops=dict(markerfacecolor='r'))
plt.yticks([])
plt.subplot(333)
plt.text(0.26,-.05,'%s'%(pd.DataFrame(
raw[v2].describe())),fontsize=15)
plt.axis('off')
plt.subplot(334)
plt.hist(df[v2],orientation='horizontal',alpha=.6)
plt.xlabel('FRECUENCIA')
plt.ylabel('%s %s'%(v2,mag[v2]))
plt.subplot(337)
plt.plot(df[v1],alpha=.6)
plt.xlabel('FECHA')
plt.ylabel('%s'%(mag[v1]))
plt.subplot(339)
plt.plot(df[v2],alpha=.6)
plt.xlabel('FECHA')
plt.ylabel('%s'%(mag[v2]))
plt.subplot(338)
plt.hist(df[v1],alpha=.6)
plt.xlabel('%s %s'%(v1,mag[v1]))
plt.ylabel('FRECUENCIA')
plt.subplot(335)
plt.scatter(df[v1],df[v2],linewidths=.8,alpha=.6)
for v in [v1,v2]:
li,ls = tukey(df[v])
try:
ind = [[x,df[v][x]] for x in list(df[v].index)
if df[v].loc[x]<=li or df[v].loc[x]>=ls]
df.loc[pd.DataFrame(ind).iloc[:,0],'o%d'%a] = pd.DataFrame(ind).iloc[:,1].values
a+=1
try:
plt.scatter(df['o1'],df[v2],color='r')
plt.subplot(337)
plt.plot(df['o1'],'ro')
plt.subplot(338)
plt.hist(df['o1'][~np.isnan(df['o1'])],
facecolor='r')
plt.subplot(335)
except KeyError:
pass
try:
plt.scatter(df[v1],df['o2'],color='r')
plt.subplot(339)
plt.plot(df['o2'],'ro')
plt.subplot(334)
plt.hist(df['o2'][~np.isnan(df['o2'])],
facecolor='r',orientation='horizontal')
plt.subplot(335)
except KeyError:
pass
except (IndexError,NameError) as e:
pass
plt.subplot(336)
plt.boxplot(df[v2],flierprops=dict(markerfacecolor='r'))
plt.xticks([])
plt.tight_layout()
def dist_variable(var):
plt.figure(figsize=[17,6])
variable = pd.DataFrame(var)
vari = variable.columns
mag = {'TEMPERATURA':'°C', 'HUMEDAD RELATIVA':'%RH',
'HUMEDAD DE LA TIERRA':'%', 'INTENSIDAD LUMÍNICA':'LX'}
plt.suptitle('DISTRIBUCIÓN DE VALORES DE LA VARIABLE %s'%vari[0])
plt.subplot(121)
plt.hist(var,orientation='horizontal',alpha=.6)
plt.title('HISTOGRAMA DE %s'%vari[0])
plt.xlabel('Frecuencia')
plt.ylabel('%s'%(mag[vari[0]]))
plt.subplot(122)
plt.boxplot(var,flierprops=dict(markerfacecolor='r'))
plt.text(1.08, var.quantile(.25), 'Q1', fontsize=12)
plt.text(1.08, var.quantile(.75), 'Q3', fontsize=12)
li,ls=tukey(var)
if var.max() > ls:
plt.text(1.04, ls, 'Límite superior', fontsize=12)
plt.text(0.85, np.mean([var.max(),ls]), 'OUTLIERS', fontsize=12,color='r')
else:
plt.text(1.04, np.mean([var.max()]), 'Valor máximo', fontsize=12)
if var.min() < li:
plt.text(1.04, li, 'Límite inferior', fontsize=12)
plt.text(0.85, np.mean([var.min(),li]), 'OUTLIERS', fontsize=12,color='r')
else:
plt.text(1.04, np.mean([var.min()]), 'Valor mínimo', fontsize=12)
plt.ylabel('%s'%(mag[vari[0]]))
plt.xlabel('%s'%vari[0])
plt.title('DIAGRAME DE CAJAS Y BIGOTES PARA %s'%vari[0])
display(variable.describe())
def info_var(v):
'Grafica todos los valores medidos para la variable v'
variable = raw.columns[v]
plt.figure(figsize=[17,5])
plt.plot(raw[variable],'orange',label='datos')
plt.title(variable)
plt.legend()
def tukey(variable):
'Obtiene los límites superiores e inferiores para v'
RIQ = variable.quantile(q=.75) - variable.quantile(q=.25)
Li = variable.quantile(q=.25) - RIQ * 1.5
Ls = variable.quantile(q=.75) + RIQ * 1.5
return Li, Ls
def filtro(filtrar,variable,i,q):
'Iguala el valor de una medición a un determinado valor límite'
try:
d=len(filtrar[variable].iloc[0:i])/(60*24)
val=[filtrar[variable][i-(60*24)*(1+j)] for j in range(int(d)) if d>1]
val.append(filtrar[variable][i-1])
filtrar[variable][i] = np.mean(val)
except KeyError:
filtrar[variable][i]= q
return filtrar
def filtrar_variables(variable,lim_i,lim_s):
'Ajusta los valores atípicos en las mediciones de la variable v'
global filtrado
Li, Ls = tukey(filtrar[variable])
for i in range(len(filtrar[variable])):
if filtrar[variable][i] >= lim_s:
filtro(filtrado,variable,i,filtrado[variable].quantile(.75))
elif filtrar[variable][i] <= lim_i:
filtro(filtrado,variable,i,filtrado[variable].quantile(.25))
# plt.plot(filtrado.loc[:,variable],'blue',label='filtrar')
#plt.legend()
return
def filtro_var(variable,lim_i,lim_s):
'Ajusta los valores atípicos en las mediciones de la variable v'
global raw
filtrar = raw.copy()
Li, Ls = tukey(filtrar[variable])
for i in range(len(filtrar[variable])):
if filtrar[variable][i] >= lim_s:
filtro(filtrar,variable,i,filtrar[variable].quantile(.75))
elif filtrar[variable][i] <= lim_i:
filtro(filtrar,variable,i,filtrar[variable].quantile(.25))
return filtrar.corr()
def up_ajuste(filtrar,variable):
global ajustado,filtar
ajustado[variable]=filtrado[variable].copy()
return
def ajuste_variacion(variable):
'Ajusta los valores que presentan una variación superior a la std'
global ajustado,filtrado,raw
mag = {'TEMPERATURA':'°C', 'HUMEDAD RELATIVA':'%RH',
'HUMEDAD DE LA TIERRA':'%', 'INTENSIDAD LUMÍNICA':'LX'}
up_ajuste(filtrado,variable)
ventana=60
rolling_std = ajustado[variable].rolling(ventana).std()
rolling_std_inv=ajustado[variable].iloc[::-1].rolling(ventana).std().iloc[::-1]
indice=rolling_std[rolling_std.isnull().values].index
rolling_std.loc[indice]=rolling_std_inv.loc[indice]
suave=ajustado[variable].copy()
nvalores=suave[rolling_std>(rolling_std.quantile(.5))].index
nnvalores=[suave.index.get_loc(x) for x in nvalores]
#print('nvalores: ',nvalores)
rolling_median=suave.rolling(ventana).median()
# Hace una ventana deslizante desde la última fila
rolling_median_inv=suave.iloc[::-1].rolling(ventana).median().iloc[::-1]
indice=rolling_median[rolling_median.isnull().values].index
rolling_median.loc[indice]=rolling_median_inv.loc[indice]
# mueve el dataframe ventana filas
nsu = [x+int(ventana/2) for x in nnvalores if x+ventana < len(suave)]
#nsu = [x+ventana for x in nnvalores if x+ventana < len(suave)]
suave[nnvalores[0:len(nsu)]]=rolling_median[nsu]
plt.figure(figsize=[17,5])
plt.title(variable)
plt.plot(raw[variable],label='datos')
plt.plot(suave,label='suavizado')
plt.ylabel('%s'%mag[variable])
plt.xlabel('FECHA')
plt.legend()
ajustado[variable]=suave
return
def val_menor(L1,L2,lim_s,var,var_rel):
'Determia la columna de correlación entre dos variables'
correl = [[filtro_var(var,rango,lim_s).loc[var,var_rel],rango] for rango in range(int(L1),int(L2))]
return correl
def filtro_menor(v,vr,lim_i,lim_s,c):
global filtrar
# Se realiza el test de Tukey
Li, Ls = tukey(filtrar[v])
if Li>lim_i:
# val_rngo determina unna columna de correlación para diferentes rangos
correl = val_menor(lim_i,int(Li),lim_s,v,vr)
L = lim_i
else:
correl = val_menor(int(Li),lim_i,lim_s,v,vr)
L = Li
corre={'corre':[],'lim':[]}
corre['corre']=[f0[0] for f0 in correl]
corre['lim']=[f1[1] for f1 in correl]
correl=pd.DataFrame(corre)
#correl=pd.DataFrame(correl)
# Devuelve el límite infeior más cercano al límite de Tukey con menor correlación
if c == 1:
menor = correl.index[correl['corre'] == correl.corre.max()].tolist()
else:
menor = correl.index[correl['corre'] == correl.corre.min()].tolist()
if len(menor) == abs(Li-lim_i):
menor_ = L
else:
menor_ = correl['lim'][menor[-1]]
#print(correl['lim'][menor],correl['corre'][menor])
return menor_
def val_mayor(L1,L2,lim_i,var,var_rel):
'Determia la columna de correlación entre dos variables'
global filtrar
correl = [[filtro_var(var,lim_i,rango).loc[var,var_rel],rango] for rango in range(int(L1),int(L2))]
return correl
def filtro_mayor(v,vr,lim_i,lim_s,c):
global filtrar
# Se realiza el test de Tukey
Li, Ls = tukey(filtrar[v])
if Ls>lim_s:
# val_rngo determina unna columna de correlación para diferentes rangos
correl = val_mayor(lim_s,int(Ls),lim_i,v,vr)
L=lim_s
else:
correl = val_mayor(int(Ls),lim_s,lim_i,v,vr)
L=Ls
#correl=pd.DataFrame(correl)
corre={'corre':[],'lim':[]}
corre['corre']=[f0[0] for f0 in correl]
corre['lim']=[f1[1] for f1 in correl]
corre=pd.DataFrame(corre)
# Devuelve el límite infeior más cercano al límite de Tukey con menor correlación
if c == 1:
mayor = corre.index[corre['corre'] == corre.corre.max()].tolist()
else:
mayor = corre.index[corre['corre'] == corre.corre.min()].tolist()
if len(mayor) == abs(Ls-lim_s):
mayor_ = L
else:
mayor_ = corre['lim'][mayor[0]]
return mayor_
```
|
{
"source": "jergeno/DadBotV2.0",
"score": 2
}
|
#### File: DadBotV2.0/noncommands/imchecker.py
```python
from collections import defaultdict
import re
import yaml
import sys
import os
import mysql.connector
import random
if "DadBot" not in str(os.getcwd()):
os.chdir("./DadBot")
with open("config.yaml") as file:
config = yaml.load(file, Loader=yaml.FullLoader)
class ImChecker:
def __init__(self):
self.imList = [" im ", " i'm ", " Im ", " I'm ", " IM ", " I'M ", " i am ", " I am ", " I AM ", " lm ", " l'm ", " lM ", " l'M ", " l am ", " l AM "]
self.confusables = Confusables('./resources/likeness.txt')
async def checkIm(self, message):
for string in self.imList:
cpattern = self.confusables.confusables_regex(string)
r = re.compile(cpattern)
fake_string = " " + message.content
res = r.match(fake_string)
rand = random.randint(0, 9)
if res and rand == 3:
typeIm = res.group().strip() + " "
await message.reply("Hi " + str(message.content).split(typeIm, 1)[1] + ", I'm Dad")
mydb = mysql.connector.connect(
host=config["dbhost"],
user=config["dbuser"],
password=config["<PASSWORD>"],
database=config["databasename"]
)
mycursor = mydb.cursor(buffered=True)
mycursor.execute("SELECT * FROM caught WHERE user = '" + str(message.author) + "'")
hascolumn = False
for m in mycursor:
hascolumn = True
if not hascolumn:
mycursor.execute("INSERT INTO caught (user, count) VALUES ('"+ str(message.author) +"', 1)")
else:
mycursor.execute("UPDATE caught SET count = count + 1 WHERE user = '" + str(message.author) + "'")
mydb.commit()
mycursor.close()
mydb.close()
break
class Confusables:
def __init__(self, confusables_filename):
f = open(confusables_filename, 'r', encoding="utf-8")
confusables_dict = defaultdict(list)
pattern = re.compile(r'(.) → (.)')
for line in f:
r = pattern.search(line)
if r:
fake = r.group(1)
auth = r.group(2)
confusables_dict[auth].append(fake)
self.confusables_dict = confusables_dict
def expand_char_to_confusables(self, c):
if c in self.confusables_dict:
return '[{}{}]'.format(re.escape(c), re.escape("".join(self.confusables_dict[c])))
else:
return c
def confusables_regex(self, pattern, letter_test_function=None):
new = ""
for c in pattern:
if ((not letter_test_function) or
(letter_test_function and letter_test_function(c))):
new += self.expand_char_to_confusables(c)
else:
new += c
return new
```
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.