blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 3
616
| content_id
stringlengths 40
40
| detected_licenses
listlengths 0
112
| license_type
stringclasses 2
values | repo_name
stringlengths 5
115
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringclasses 777
values | visit_date
timestamp[us]date 2015-08-06 10:31:46
2023-09-06 10:44:38
| revision_date
timestamp[us]date 1970-01-01 02:38:32
2037-05-03 13:00:00
| committer_date
timestamp[us]date 1970-01-01 02:38:32
2023-09-06 01:08:06
| github_id
int64 4.92k
681M
⌀ | star_events_count
int64 0
209k
| fork_events_count
int64 0
110k
| gha_license_id
stringclasses 22
values | gha_event_created_at
timestamp[us]date 2012-06-04 01:52:49
2023-09-14 21:59:50
⌀ | gha_created_at
timestamp[us]date 2008-05-22 07:58:19
2023-08-21 12:35:19
⌀ | gha_language
stringclasses 149
values | src_encoding
stringclasses 26
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 3
10.2M
| extension
stringclasses 188
values | content
stringlengths 3
10.2M
| authors
listlengths 1
1
| author_id
stringlengths 1
132
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
901d73e1e1b9fbab700e456ee163cba1d0d65fe4 | 9fcc6ed9d6ddff6d183a891066f6e2be5c3875e8 | /pandasdmx/source/sgr.py | cb73a9a7c0c1aad9de3316676facf7c3269555fc | [
"Apache-2.0",
"LicenseRef-scancode-warranty-disclaimer",
"Python-2.0"
]
| permissive | daoluan/pandaSDMX | ea289db699d2516cf563194214d1e70adb61dca7 | 2efcb5a429a5306efd89bed4cd55946d1ad5067b | refs/heads/master | 2020-07-12T21:37:20.617115 | 2019-08-28T11:09:59 | 2019-08-28T11:09:59 | 204,912,582 | 0 | 0 | Apache-2.0 | 2019-08-28T19:08:08 | 2019-08-28T11:08:08 | null | UTF-8 | Python | false | false | 667 | py | from . import Source as BaseSource
class Source(BaseSource):
_id = 'SGR'
def handle_response(self, response, content):
"""SGR responses do not specify content-type; set it directly."""
if response.headers.get('content-type', None) is None:
response.headers['content-type'] = 'application/xml'
return response, content
def modify_request_args(self, kwargs):
"""SGR is a data source but not a data provider.
Override the ``agency`` argument by setting ``agency='all'`` to
retrieve all data republished by SGR from different providers.
"""
kwargs.setdefault('provider', 'all')
| [
"[email protected]"
]
| |
cf85859497e9262ab0792ec4e552abbecf6d8798 | 68b7e05830d2480e848b0d1ff49f455e3c2e3a3c | /manage.py | 70ae5959854c8281d4a31549726dba3ecf87c16d | []
| no_license | Zauberzunge/Umfragen | 24414567ad8dfeb89a5b7267841a08bf6d035625 | 3e57da7e87d2aebc596878800fd4fe8008f38944 | refs/heads/master | 2023-01-28T20:02:19.044334 | 2020-12-07T21:06:47 | 2020-12-07T21:06:47 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 799 | py | #!/usr/bin/env python
"""Django's command-line utility for administrative tasks."""
import os
import sys
from django.core.management.commands.runserver import Command as runserver
runserver.default_port = "8002"
def main():
"""Run administrative tasks."""
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'djangoProject.settings')
try:
from django.core.management import execute_from_command_line
except ImportError as exc:
raise ImportError(
"Could not import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
) from exc
execute_from_command_line(sys.argv)
if __name__ == '__main__':
main()
| [
"="
]
| = |
646600322f93ff2c0453d17bf7823470b5dc6892 | 14421a12c4e80395567e676394d369fd9619bd32 | /Scripts/PythonMidLvl/84/84test.py | 06a05233ce41b852f56020436f8510d38948fc20 | []
| no_license | jawor92/Python-Udemy-Mobilo | 7b331e8197233c3116e43e0b3c1110b9b878762e | 8098508835121a1536c2753bc4eedbf17163c93d | refs/heads/master | 2020-12-09T21:39:09.366604 | 2020-01-12T19:31:09 | 2020-01-12T19:31:09 | 233,423,486 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 339 | py | # -*- coding: utf-8 -*-
"""
Created on Mon Jan 6 22:09:49 2020
@author: Mateusz.Jaworski
"""
class MailToSantaClaus:
def __init__(self, presents):
self.presents = presents.copy()
def show_presents(self):
print(self.presents)
mail = MailToSantaClaus(['Teddy Bear', 'Teddy Bear House'])
mail.show_presents() | [
"[email protected]"
]
| |
dfd5f1ab44402a21ebfea238e5b70d78f4c08847 | ac5e52a3fc52dde58d208746cddabef2e378119e | /exps-mrsp.0/mrsp_ut=3.5_rd=0.8_rw=0.04_rn=4_u=0.075-0.325_p=harmonic-2/sched=RUN_trial=79/params.py | bbe63e8c0e1fb04e03a09bb6ca4fadafda488f37 | []
| no_license | ricardobtxr/experiment-scripts | 1e2abfcd94fb0ef5a56c5d7dffddfe814752eef1 | 7bcebff7ac2f2822423f211f1162cd017a18babb | refs/heads/master | 2023-04-09T02:37:41.466794 | 2021-04-25T03:27:16 | 2021-04-25T03:27:16 | 358,926,457 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 250 | py | {'cpus': 4,
'duration': 30,
'final_util': '3.530310',
'max_util': '3.5',
'periods': 'harmonic-2',
'release_master': False,
'res_distr': '0.8',
'res_nmb': '4',
'res_weight': '0.04',
'scheduler': 'RUN',
'trial': 79,
'utils': 'uni-medium-3'}
| [
"[email protected]"
]
| |
6c2bda0345755e152e1819fa282be7e05a97e988 | 15e85b4d9527e7a87aded5b3c99ad9c785bca915 | /data-storage-manager-sdk/python/simcore_dsm_sdk/configuration.py | 422f971c74adb284286a59de28d37d9be9f11594 | [
"MIT"
]
| permissive | mguidon/aiohttp-dsm | 4161f9977d3dffbb727aa26cce4e9fb347aa4e21 | 612e4c7f6f73df7d6752269965c428fda0276191 | refs/heads/master | 2020-03-30T09:03:49.791406 | 2018-10-02T07:05:35 | 2018-10-02T07:05:35 | 151,058,427 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,125 | py | # coding: utf-8
"""
dsm-api
dsm api # noqa: E501
OpenAPI spec version: 2.0.0
Generated by: https://openapi-generator.tech
"""
from __future__ import absolute_import
import copy
import logging
import multiprocessing
import sys
import urllib3
import six
from six.moves import http_client as httplib
class TypeWithDefault(type):
def __init__(cls, name, bases, dct):
super(TypeWithDefault, cls).__init__(name, bases, dct)
cls._default = None
def __call__(cls):
if cls._default is None:
cls._default = type.__call__(cls)
return copy.copy(cls._default)
def set_default(cls, default):
cls._default = copy.copy(default)
class Configuration(six.with_metaclass(TypeWithDefault, object)):
"""NOTE: This class is auto generated by OpenAPI Generator
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
def __init__(self):
"""Constructor"""
# Default Base url
self.host = "http://{host}:{port}/{version}"
# Temp file folder for downloading files
self.temp_folder_path = None
# Authentication Settings
# dict to store API key(s)
self.api_key = {}
# dict to store API prefix (e.g. Bearer)
self.api_key_prefix = {}
# Username for HTTP basic authentication
self.username = ""
# Password for HTTP basic authentication
self.password = ""
# Logging Settings
self.logger = {}
self.logger["package_logger"] = logging.getLogger("simcore_dsm_sdk")
self.logger["urllib3_logger"] = logging.getLogger("urllib3")
# Log format
self.logger_format = '%(asctime)s %(levelname)s %(message)s'
# Log stream handler
self.logger_stream_handler = None
# Log file handler
self.logger_file_handler = None
# Debug file location
self.logger_file = None
# Debug switch
self.debug = False
# SSL/TLS verification
# Set this to false to skip verifying SSL certificate when calling API
# from https server.
self.verify_ssl = True
# Set this to customize the certificate file to verify the peer.
self.ssl_ca_cert = None
# client certificate file
self.cert_file = None
# client key file
self.key_file = None
# Set this to True/False to enable/disable SSL hostname verification.
self.assert_hostname = None
# urllib3 connection pool's maximum number of connections saved
# per pool. urllib3 uses 1 connection as default value, but this is
# not the best value when you are making a lot of possibly parallel
# requests to the same host, which is often the case here.
# cpu_count * 5 is used as default value to increase performance.
self.connection_pool_maxsize = multiprocessing.cpu_count() * 5
# Proxy URL
self.proxy = None
# Safe chars for path_param
self.safe_chars_for_path_param = ''
@property
def logger_file(self):
"""The logger file.
If the logger_file is None, then add stream handler and remove file
handler. Otherwise, add file handler and remove stream handler.
:param value: The logger_file path.
:type: str
"""
return self.__logger_file
@logger_file.setter
def logger_file(self, value):
"""The logger file.
If the logger_file is None, then add stream handler and remove file
handler. Otherwise, add file handler and remove stream handler.
:param value: The logger_file path.
:type: str
"""
self.__logger_file = value
if self.__logger_file:
# If set logging file,
# then add file handler and remove stream handler.
self.logger_file_handler = logging.FileHandler(self.__logger_file)
self.logger_file_handler.setFormatter(self.logger_formatter)
for _, logger in six.iteritems(self.logger):
logger.addHandler(self.logger_file_handler)
@property
def debug(self):
"""Debug status
:param value: The debug status, True or False.
:type: bool
"""
return self.__debug
@debug.setter
def debug(self, value):
"""Debug status
:param value: The debug status, True or False.
:type: bool
"""
self.__debug = value
if self.__debug:
# if debug status is True, turn on debug logging
for _, logger in six.iteritems(self.logger):
logger.setLevel(logging.DEBUG)
# turn on httplib debug
httplib.HTTPConnection.debuglevel = 1
else:
# if debug status is False, turn off debug logging,
# setting log level to default `logging.WARNING`
for _, logger in six.iteritems(self.logger):
logger.setLevel(logging.WARNING)
# turn off httplib debug
httplib.HTTPConnection.debuglevel = 0
@property
def logger_format(self):
"""The logger format.
The logger_formatter will be updated when sets logger_format.
:param value: The format string.
:type: str
"""
return self.__logger_format
@logger_format.setter
def logger_format(self, value):
"""The logger format.
The logger_formatter will be updated when sets logger_format.
:param value: The format string.
:type: str
"""
self.__logger_format = value
self.logger_formatter = logging.Formatter(self.__logger_format)
def get_api_key_with_prefix(self, identifier):
"""Gets API key (with prefix if set).
:param identifier: The identifier of apiKey.
:return: The token for api key authentication.
"""
if (self.api_key.get(identifier) and
self.api_key_prefix.get(identifier)):
return self.api_key_prefix[identifier] + ' ' + self.api_key[identifier] # noqa: E501
elif self.api_key.get(identifier):
return self.api_key[identifier]
def get_basic_auth_token(self):
"""Gets HTTP basic authentication header (string).
:return: The token for basic HTTP authentication.
"""
return urllib3.util.make_headers(
basic_auth=self.username + ':' + self.password
).get('authorization')
def auth_settings(self):
"""Gets Auth Settings dict for api client.
:return: The Auth Settings information dict.
"""
return {
}
def to_debug_report(self):
"""Gets the essential information for debugging.
:return: The report for debugging.
"""
return "Python SDK Debug Report:\n"\
"OS: {env}\n"\
"Python Version: {pyversion}\n"\
"Version of the API: 2.0.0\n"\
"SDK Package Version: 1.0.0".\
format(env=sys.platform, pyversion=sys.version)
| [
"[email protected]"
]
| |
11d574873016cebec8bc817967783c8384d642f8 | 9b9a02657812ea0cb47db0ae411196f0e81c5152 | /repoData/opinkerfi-adagios/allPythonContent.py | 2c936d37e8f3eb56e3d4cf91652d8ed5eed39dfd | []
| no_license | aCoffeeYin/pyreco | cb42db94a3a5fc134356c9a2a738a063d0898572 | 0ac6653219c2701c13c508c5c4fc9bc3437eea06 | refs/heads/master | 2020-12-14T14:10:05.763693 | 2016-06-27T05:15:15 | 2016-06-27T05:15:15 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 462,213 | py | __FILENAME__ = auth
# Adagios is a web based Nagios configuration interface
#
# Copyright (C) 2014, Pall Sigurdsson <[email protected]>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
""" Authorization related stuff in Adagios
"""
import adagios.status.utils
import adagios.views
auditors = []
operators = []
administrators = []
# administrator belongs to all the other groups
administrators += operators + auditors
access_list = list()
# Explicitly grant configuration access only to admins
access_list.append(('adagios.objectbrowser', "administrators"))
access_list.append(('adagios.okconfig_', "administrators"))
access_list.append(('adagios.misc.helpers', "administrators"))
access_list.append(('adagios.misc.views.settings', "administrators"))
access_list.append(('adagios.misc.views.gitlog', "administrators"))
access_list.append(('adagios.misc.views.service', "administrators"))
access_list.append(('adagios.rest.status.edit', "administrators"))
access_list.append(('adagios.status.views.contact', "administrators"))
access_list.append(('adagios.status.views.state_history', "administrators"))
access_list.append(('adagios.status.views.log', "administrators"))
access_list.append(('adagios.status.views.servicegroup', "administrators"))
access_list.append(('adagios.rest.status.state_history', "administrators"))
access_list.append(('adagios.rest.status.top_alert_producers', "administrators"))
access_list.append(('adagios.rest.status.update_check_command', "administrators"))
access_list.append(('adagios.rest.status.log_entries', "administrators"))
# Access to rest interface
access_list.append(('adagios.rest.views', "everyone"))
access_list.append(('adagios.rest.status', "everyone"))
access_list.append(('adagios.misc.rest', "everyone"))
# These modules should more or less be considered "safe"
access_list.append(('django.views.static', "everyone"))
access_list.append(('django.views.i18n', "everyone"))
access_list.append(('adagios.views', "everyone"))
access_list.append(('adagios.status', "everyone"))
access_list.append(('adagios.pnp', "everyone"))
access_list.append(('adagios.contrib', "everyone"))
access_list.append(('adagios.bi.views.index', "everyone"))
access_list.append(('adagios.bi.views.view', "everyone"))
access_list.append(('adagios.bi.views.json', "everyone"))
access_list.append(('adagios.bi.views.graphs_json', "everyone"))
access_list.append(('adagios.misc.helpers.needs_reload', "everyone"))
# If no other rule matches, assume administrators have access
access_list.append(('', "administrators"))
def check_access_to_path(request, path):
""" Raises AccessDenied if user does not have access to path
path in this case is a full path to a python module name for example: "adagios.objectbrowser.views.index"
"""
for search_path, role in access_list:
if path.startswith(search_path):
if has_role(request, role):
return None
else:
user = request.META.get('REMOTE_USER', 'anonymous')
message = "You do not have permission to access %s" % (path, )
raise adagios.exceptions.AccessDenied(user, access_required=role, message=message, path=path)
else:
return None
def has_access_to_path(request, path):
""" Returns True/False if user in incoming request has access to path
Arguments:
path -- string describing a path to a method or module, example: "adagios.objectbrowser.views.index"
"""
for search_path, role in access_list:
if path.startswith(search_path):
return has_role(request, role)
else:
return False
def has_role(request, role):
""" Returns true if the username in current request has access to a specific role """
user = request.META.get('REMOTE_USER', "anonymous")
# Allow if everyone is allowed access
if role == 'everyone':
return True
# Deny if nobody is allowed access
if role == 'nobody':
return False
# Allow if role is "contacts" and user is in fact a valid contact
if role == 'contacts' and adagios.status.utils.get_contacts(None, name=user):
return True
# Allow if role is "users" and we are in fact logged in
if role == 'users' and user != "anonymous":
return True
users_and_groups = globals().get(role, None)
if hasattr(adagios.settings, role):
for i in str(getattr(adagios.settings, role)).split(','):
i = i.strip()
if i not in users_and_groups:
users_and_groups.append(i)
# Deny if no role exists with this name
if not users_and_groups:
return False
# Allow if user is mentioned in your role
if user in users_and_groups:
return True
# If it is specifically stated that "everyone" belongs to the group
if "everyone" in users_and_groups:
return True
# Check if user belongs to any contactgroup that has access
contactgroups = adagios.status.utils.get_contactgroups(None, 'Columns: name', 'Filter: members >= %s' % user)
# Allow if we find user belongs to one contactgroup that has this role
for contactgroup in contactgroups:
if contactgroup['name'] in users_and_groups:
return True
# If we get here, the user clearly did not have access
return False
def check_role(request, role):
""" Raises AccessDenied if user in request does not have access to role """
if not has_role(request, role):
user = request.META.get('REMOTE_USER', 'anonymous')
message = "User does not have the required role"
raise adagios.exceptions.AccessDenied(username=user, access_required=role, message=message)
class AuthorizationMiddleWare(object):
""" Django MiddleWare class. It's responsibility is to check if an adagios user has access
if user does not have access to a given view, it is given a 403 error.
"""
def process_request(self, request):
return None
def process_view(self, request, view_func, view_args, view_kwargs):
if not adagios.settings.enable_authorization:
return None
function_name = view_func.__name__
module_name = view_func.__module__
if module_name == "adagios.rest.views" and function_name == 'handle_request':
module_name = view_kwargs['module_path']
function_name = view_kwargs['attribute']
try:
path = module_name + '.' + function_name
check_access_to_path(request, path)
except adagios.exceptions.AccessDenied, e:
return adagios.views.http_403(request, exception=e)
########NEW FILE########
__FILENAME__ = forms
# -*- coding: utf-8 -*-
#
# Adagios is a web based Nagios configuration interface
#
# Copyright (C) 2010, Pall Sigurdsson <[email protected]>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from django import forms
from django.utils.translation import ugettext as _
import adagios.status.utils
import adagios.bi
class RemoveSubProcessForm(forms.Form):
""" Remove one specific sub process from a business process
"""
process_name = forms.CharField(max_length=100, required=True)
process_type = forms.CharField(max_length=100, required=True)
def __init__(self, instance, *args, **kwargs):
self.bp = instance
super(RemoveSubProcessForm, self).__init__(*args, **kwargs)
def save(self):
process_name = self.cleaned_data.get('process_name')
process_type = self.cleaned_data.get('process_type')
self.bp.remove_process(process_name, process_type)
self.bp.save()
status_method_choices = map(
lambda x: (x, x), adagios.bi.BusinessProcess.status_calculation_methods)
class BusinessProcessForm(forms.Form):
""" Use this form to edit a BusinessProcess """
name = forms.CharField(max_length=100, required=True,
help_text=_("Unique name for this business process."))
#processes = forms.CharField(max_length=100, required=False)
display_name = forms.CharField(max_length=100, required=False,
help_text=_("This is the name that will be displayed to users on this process. Usually it is the name of the system this business group represents."))
notes = forms.CharField(max_length=1000, required=False,
help_text=_("Here you can put in any description of the business process you are adding. Its a good idea to write down what the business process is about and who to contact in case of downtimes."))
status_method = forms.ChoiceField(
choices=status_method_choices, help_text=_("Here you can choose which method is used to calculate the global status of this business process"))
state_0 = forms.CharField(max_length=100, required=False,
help_text=_("Human friendly text for this respective state. You can type whatever you want but nagios style exit codes indicate that 0 should be 'ok'"))
state_1 = forms.CharField(max_length=100, required=False,
help_text=_("Typically used to represent warning or performance problems"))
state_2 = forms.CharField(max_length=100, required=False,
help_text=_("Typically used to represent critical status"))
state_3 = forms.CharField(
max_length=100, required=False, help_text=_("Use this when status is unknown"))
#graphs = models.ManyToManyField(BusinessProcess, unique=False, blank=True)
#graphs = models.ManyToManyField(BusinessProcess, unique=False, blank=True)
def __init__(self, instance, *args, **kwargs):
self.bp = instance
super(BusinessProcessForm, self).__init__(*args, **kwargs)
def save(self):
c = self.cleaned_data
self.bp.data.update(c)
self.bp.save()
def remove(self):
c = self.data
process_name = c.get('process_name')
process_type = c.get('process_type')
if process_type == 'None':
process_type = None
self.bp.remove_process(process_name, process_type)
self.bp.save()
def clean(self):
cleaned_data = super(BusinessProcessForm, self).clean()
# If name has changed, look if there is another business process with
# same name.
new_name = cleaned_data.get('name')
if new_name and new_name != self.bp.name:
if new_name in adagios.bi.get_all_process_names():
raise forms.ValidationError(
_("Cannot rename process to %s. Another process with that name already exists") % new_name
)
return cleaned_data
def delete(self):
""" Delete this business process """
self.bp.delete()
def add_process(self):
process_name = self.data.get('process_name')
hostgroup_name = self.data.get('hostgroup_name')
servicegroup_name = self.data.get('servicegroup_name')
service_name = self.data.get('service_name')
if process_name:
self.bp.add_process(process_name, None)
if hostgroup_name:
self.bp.add_process(hostgroup_name, None)
if servicegroup_name:
self.bp.add_process(servicegroup_name, None)
if service_name:
self.bp.add_process(service_name, None)
self.bp.save()
choices = 'businessprocess', 'hostgroup', 'servicegroup', 'service', 'host'
process_type_choices = map(lambda x: (x, x), choices)
class AddSubProcess(forms.Form):
process_type = forms.ChoiceField(choices=process_type_choices)
process_name = forms.CharField(
widget=forms.HiddenInput(attrs={'style': "width: 300px;"}), max_length=100)
display_name = forms.CharField(max_length=100, required=False)
tags = forms.CharField(
max_length=100, required=False, initial="not critical")
def __init__(self, instance, *args, **kwargs):
self.bp = instance
super(AddSubProcess, self).__init__(*args, **kwargs)
def save(self):
self.bp.add_process(**self.cleaned_data)
self.bp.save()
class AddHostgroupForm(forms.Form):
pass
class AddGraphForm(forms.Form):
host_name = forms.CharField(max_length=100,)
service_description = forms.CharField(max_length=100, required=False)
metric_name = forms.CharField(max_length=100, required=True)
notes = forms.CharField(max_length=100, required=False,
help_text=_("Put here a friendly description of the graph"))
def __init__(self, instance, *args, **kwargs):
self.bp = instance
super(AddGraphForm, self).__init__(*args, **kwargs)
def save(self):
self.bp.add_pnp_graph(**self.cleaned_data)
self.bp.save()
########NEW FILE########
__FILENAME__ = models
# Adagios is a web based Nagios configuration interface
#
# Copyright (C) 2014, Pall Sigurdsson <[email protected]>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from django.db import models
# Create your models here.
########NEW FILE########
__FILENAME__ = tests
# Adagios is a web based Nagios configuration interface
#
# Copyright (C) 2014, Pall Sigurdsson <[email protected]>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
This file demonstrates writing tests using the unittest module. These will pass
when you run "manage.py test".
Replace this with more appropriate tests for your application.
"""
import tempfile
import os
import time
from django.test import TestCase
from django.test.client import Client
from django.utils.translation import ugettext as _
from adagios.bi import *
import adagios.utils
class TestBusinessProcess(TestCase):
def setUp(self):
fd, filename = tempfile.mkstemp()
BusinessProcess._default_filename = filename
def tearDown(self):
os.remove(BusinessProcess._default_filename)
def test_save_and_load(self):
""" This test will test load/save of a business process.
The procedure is as follows:
* Load a business process
* Save it
* Make changes
* Load it again, and verify changes were saved.
"""
bp_name = 'test_business_process'
b = BusinessProcess(bp_name)
b.load()
# Append a dot to the bp name and save
new_display_name = b.display_name or '' + "."
b.display_name = new_display_name
b.save()
# Load bp again
b = BusinessProcess(bp_name)
b.load()
self.assertEqual(b.display_name, new_display_name)
def test_add_process(self):
""" Test adding new processes to a current BP
"""
bp_name = 'test'
sub_process_name = 'sub_process'
sub_process_display_name = 'This is a subprocess of test'
b = BusinessProcess(bp_name)
b.add_process(sub_process_name, display_name=sub_process_display_name)
for i in b.get_processes():
if i.name == sub_process_name and i.display_name == sub_process_display_name:
return
else:
self.assertTrue(
False, 'We tried adding a business process but could not find it afterwards')
def test_hostgroup_bp(self):
bp_name = 'test'
hostgroup_name = 'acme-network'
b = BusinessProcess(bp_name)
b.add_process(hostgroup_name, 'hostgroup')
def test_remove_process(self):
""" Test removing a subprocess from a businessprocess
"""
bp_name = 'test'
sub_process_name = 'sub_process'
sub_process_display_name = 'This is a subprocess of test'
b = BusinessProcess(bp_name)
b.add_process(sub_process_name, display_name=sub_process_display_name)
self.assertNotEqual([], b.processes)
b.remove_process(sub_process_name)
self.assertEqual([], b.processes)
def test_get_all_processes(self):
get_all_processes()
def test_macros(self):
bp = get_business_process('uniq test case', status_method="use_worst_state")
macros_for_empty_process = {
'num_problems': 0,
'num_state_0': 0,
'num_state_1': 0,
'num_state_2': 0,
'num_state_3': 0,
'current_state': 3,
'friendly_state': 'unknown',
'percent_problems': 0,
'percent_state_3': 0,
'percent_state_2': 0,
'percent_state_1': 0,
'percent_state_0': 0
}
self.assertEqual(3, bp.get_status())
self.assertEqual(macros_for_empty_process, bp.resolve_all_macros())
bp.add_process("always_ok", status_method="always_ok")
bp.add_process("always_major", status_method="always_major")
macros_for_nonempty_process = {
'num_problems': 1,
'num_state_0': 1,
'num_state_1': 0,
'num_state_2': 1,
'num_state_3': 0,
'current_state': 2,
'friendly_state': 'major problems',
'percent_problems': 50.0,
'percent_state_3': 0.0,
'percent_state_2': 50.0,
'percent_state_1': 0.0,
'percent_state_0': 50.0
}
self.assertEqual(2, bp.get_status())
self.assertEqual(macros_for_nonempty_process, bp.resolve_all_macros())
def testPageLoad(self):
self.loadPage('/bi')
self.loadPage('/bi/add')
self.loadPage('/bi/add/subprocess')
self.loadPage('/bi/add/graph')
def loadPage(self, url):
""" Load one specific page, and assert if return code is not 200 """
try:
c = Client()
response = c.get(url)
self.assertEqual(response.status_code, 200, _("Expected status code 200 for page %s") % url)
except Exception, e:
self.assertEqual(True, "Unhandled exception while loading %s: %s" % (url, e))
class TestBusinessProcessLogic(TestCase):
""" This class responsible for testing business classes logic """
def setUp(self):
self.environment = adagios.utils.FakeAdagiosEnvironment()
self.environment.create_minimal_environment()
self.environment.configure_livestatus()
self.environment.update_adagios_global_variables()
self.environment.start()
self.livestatus = self.environment.get_livestatus()
self.livestatus.test()
fd, filename = tempfile.mkstemp()
BusinessProcess._default_filename = filename
def tearDown(self):
self.environment.terminate()
os.remove(BusinessProcess._default_filename)
def testBestAndWorstState(self):
s = BusinessProcess("example process")
s.status_method = 'use_worst_state'
self.assertEqual(3, s.get_status(), _("Empty bi process should have status unknown"))
s.add_process(process_name="always_ok", process_type="businessprocess", status_method='always_ok')
self.assertEqual(0, s.get_status(), _("BI process with one ok subitem, should have state OK"))
s.add_process("fail subprocess", status_method="always_major")
self.assertEqual(2, s.get_status(), _("BI process with one failed item should have a critical state"))
s.status_method = 'use_best_state'
self.assertEqual(0, s.get_status(), _("BI process using use_best_state should be returning OK"))
def testBusinessRules(self):
s = BusinessProcess("example process")
self.assertEqual(3, s.get_status(), _("Empty bi process should have status unknown"))
s.add_process(process_name="always_ok", process_type="businessprocess", status_method='always_ok')
self.assertEqual(0, s.get_status(), _("BI process with one ok subitem, should have state OK"))
s.add_process("untagged process", status_method="always_major")
self.assertEqual(0, s.get_status(), _("BI subprocess that is untagged should yield an ok state"))
s.add_process("not critical process", status_method="always_major", tags="not critical")
self.assertEqual(1, s.get_status(), _("A Non critical subprocess should yield 'minor problem'"))
s.add_process("critical process", status_method="always_major", tags="mission critical")
self.assertEqual(2, s.get_status(), _("A critical process in failed state should yield major problem"))
s.add_process("another noncritical process", status_method="always_major", tags="not critical")
self.assertEqual(2, s.get_status(), _("Adding another non critical subprocess should still yield a critical state"))
class TestDomainProcess(TestCase):
""" Test the Domain business process type
"""
def setUp(self):
self.environment = adagios.utils.FakeAdagiosEnvironment()
self.environment.create_minimal_environment()
self.environment.configure_livestatus()
self.environment.update_adagios_global_variables()
self.environment.start()
self.livestatus = self.environment.get_livestatus()
self.livestatus.test()
def tearDown(self):
self.environment.terminate()
def testHost(self):
domain = get_business_process(process_name='ok.is', process_type='domain')
# We don't exactly know the status of the domain, but lets run it anyway
# for smoketesting
domain.get_status()
class TestServiceProcess(TestCase):
""" Test Service Business process type """
def setUp(self):
self.environment = adagios.utils.FakeAdagiosEnvironment()
self.environment.create_minimal_environment()
self.environment.configure_livestatus()
self.environment.update_adagios_global_variables()
self.environment.start()
self.livestatus = self.environment.get_livestatus()
self.livestatus.test()
def tearDown(self):
self.environment.terminate()
def testService(self):
service = get_business_process('ok_host/ok service 1', process_type='service')
status = service.get_status()
self.assertFalse(service.errors)
self.assertEqual(0, status, "The service should always have status OK")
class TestHostProcess(TestCase):
""" Test the Host business process type
"""
def setUp(self):
self.environment = adagios.utils.FakeAdagiosEnvironment()
self.environment.create_minimal_environment()
self.environment.configure_livestatus()
self.environment.update_adagios_global_variables()
self.environment.start()
self.livestatus = self.environment.get_livestatus()
self.livestatus.test()
def tearDown(self):
self.environment.terminate()
def testNonExistingHost(self):
host = get_business_process('non-existant host', process_type='host')
self.assertEqual(3, host.get_status(), _("non existant host processes should have unknown status"))
def testExistingHost(self):
#localhost = self.livestatus.get_hosts('Filter: host_name = ok_host')
host = get_business_process('ok_host', process_type='host')
self.assertEqual(0, host.get_status(), _("the host ok_host should always has status ok"))
def testDomainProcess(self):
domain = get_business_process(process_name='oksad.is', process_type='domain')
# We don't exactly know the status of the domain, but lets run it anyway
# for smoketesting
########NEW FILE########
__FILENAME__ = urls
# Adagios is a web based Nagios configuration interface
#
# Copyright (C) 2014, Pall Sigurdsson <[email protected]>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from django.conf.urls.defaults import *
from django.conf import settings
urlpatterns = patterns('adagios',
(r'^/?$', 'bi.views.index'),
(r'^/add/?$', 'bi.views.add'),
(r'^/add/subprocess/?$', 'bi.views.add_subprocess'),
(r'^/add/graph/?$', 'bi.views.add_graph'),
(r'^/(?P<process_name>.+)/edit/status_method$', 'bi.views.change_status_calculation_method'),
(r'^/edit/(?P<process_type>.+?)/(?P<process_name>.+?)/?$', 'bi.views.edit'),
(r'^/json/(?P<process_type>.+?)/(?P<process_name>.+?)/?$', 'bi.views.json'),
(r'^/graphs/(?P<process_type>.+?)/(?P<process_name>.+?)/?$', 'bi.views.graphs_json'),
(r'^/delete/(?P<process_type>.+?)/(?P<process_name>.+?)/?$', 'bi.views.delete'),
(r'^/view/(?P<process_type>.+?)/(?P<process_name>.+?)/?$', 'bi.views.view'),
#(r'^/view/(?P<process_name>.+)/?$', 'bi.views.view'),
)
########NEW FILE########
__FILENAME__ = views
# Adagios is a web based Nagios configuration interface
#
# Copyright (C) 2014, Pall Sigurdsson <[email protected]>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import simplejson
from django.http import HttpResponse
from django.shortcuts import render_to_response, redirect
from django.template import RequestContext
from django.core.context_processors import csrf
from django.core.urlresolvers import reverse
from django.utils.translation import ugettext as _
from adagios.pnp.functions import run_pnp
from adagios.views import adagios_decorator
import adagios.bi
import adagios.bi.forms
from adagios.views import adagios_decorator, error_page
@adagios_decorator
def edit(request, process_name, process_type):
""" Edit one specific business process
"""
messages = []
bp = adagios.bi.get_business_process(process_name)
errors = bp.errors or []
status = bp.get_status()
add_subprocess_form = adagios.bi.forms.AddSubProcess(instance=bp)
form = adagios.bi.forms.BusinessProcessForm(instance=bp, initial=bp.data)
add_graph_form = adagios.bi.forms.AddGraphForm(instance=bp)
if request.method == 'GET':
form = adagios.bi.forms.BusinessProcessForm(
instance=bp, initial=bp.data)
elif request.method == 'POST':
if 'save_process' in request.POST:
form = adagios.bi.forms.BusinessProcessForm(
instance=bp, data=request.POST)
if form.is_valid():
form.save()
elif 'remove_process' in request.POST:
removeform = adagios.bi.forms.RemoveSubProcessForm(
instance=bp, data=request.POST)
if removeform.is_valid():
removeform.save()
elif 'add_process' in request.POST:
if form.is_valid():
form.add_process()
elif 'add_graph_submit_button' in request.POST:
add_graph_form = adagios.bi.forms.AddGraphForm(
instance=bp, data=request.POST)
if add_graph_form.is_valid():
add_graph_form.save()
elif 'add_subprocess_submit_button' in request.POST:
add_subprocess_form = adagios.bi.forms.AddSubProcess(
instance=bp, data=request.POST)
if add_subprocess_form.is_valid():
add_subprocess_form.save()
else:
errors.append(_("failed to add subprocess"))
add_subprocess_failed = True
else:
errors.append(
_("I don't know what submit button was clicked. please file a bug."))
# Load the process again, since any of the above probably made changes
# to it.
bp = adagios.bi.get_business_process(process_name)
return render_to_response('business_process_edit.html', locals(), context_instance=RequestContext(request))
@adagios_decorator
def add_graph(request):
""" Add one or more graph to a single business process
"""
c = {}
c['errors'] = []
c.update(csrf(request))
if request.method == 'GET':
source = request.GET
else:
source = request.POST
name = source.get('name', None)
if name:
c['name'] = name
bp = adagios.bi.get_business_process(name)
c['graphs'] = []
# Convert every graph= in the querystring into
# host_name,service_description,metric attribute
graphs = source.getlist('graph')
for graph in graphs:
tmp = graph.split(',')
if len(tmp) != 3:
c['errors'].append(_("Invalid graph string: %s") % (tmp))
graph_dict = {}
graph_dict['host_name'] = tmp[0]
graph_dict['service_description'] = tmp[1]
graph_dict['metric_name'] = tmp[2]
graph_dict['notes'] = tmp[2]
c['graphs'].append(graph_dict)
#
# When we get here, we have parsed all the data from the client, if
# its a post, lets add the graphs to our business process
if request.method == 'POST':
if not name:
raise Exception(
_("Booh! you need to supply name= to the querystring"))
for graph in c['graphs']:
form = adagios.bi.forms.AddGraphForm(instance=bp, data=graph)
if form.is_valid():
form.save()
else:
e = form.errors
raise e
return redirect('adagios.bi.views.edit', bp.process_type, bp.name)
return render_to_response('business_process_add_graph.html', c, context_instance=RequestContext(request))
@adagios_decorator
def view(request, process_name, process_type=None):
""" View one specific business process
"""
c = {}
c['messages'] = []
c['errors'] = []
bp = adagios.bi.get_business_process(
process_name, process_type=process_type)
graphs_url = reverse(
'adagios.bi.views.graphs_json', kwargs={"process_type":process_type, "process_name": process_name})
c['bp'] = bp
c['graphs_url'] = graphs_url
return render_to_response('business_process_view.html', c, context_instance=RequestContext(request))
@adagios_decorator
def json(request, process_name=None, process_type=None):
""" Returns a list of all processes in json format.
If process_name is specified, return all sub processes.
"""
if not process_name:
processes = adagios.bi.get_all_processes()
else:
process = adagios.bi.get_business_process(process_name, process_type)
processes = process.get_processes()
result = []
# Turn processes into nice json
for i in processes:
json = {}
json['state'] = i.get_status()
json['name'] = i.name
json['display_name'] = i.display_name
result.append(json)
json = simplejson.dumps(result)
return HttpResponse(json, content_type="application/json")
@adagios_decorator
def graphs_json(request, process_name, process_type):
""" Get graphs for one specific business process
"""
c = {}
c['messages'] = []
c['errors'] = []
import adagios.businessprocess
bp = adagios.bi.get_business_process(process_name=process_name, process_type=process_type)
graphs = []
if not bp.graphs:
return HttpResponse('[]')
for graph in bp.graphs or []:
if graph.get('graph_type') == 'pnp':
host_name = graph.get('host_name')
service_description = graph.get('service_description')
metric_name = graph.get('metric_name')
pnp_result = run_pnp('json', host=graph.get(
'host_name'), srv=graph.get('service_description'))
json_data = simplejson.loads(pnp_result)
for i in json_data:
if i.get('ds_name') == graph.get('metric_name'):
notes = graph.get('notes')
last_value = bp.get_pnp_last_value(
host_name, service_description, metric_name)
i['last_value'] = last_value
i['notes'] = notes
graphs.append(i)
graph_json = simplejson.dumps(graphs)
return HttpResponse(graph_json)
@adagios_decorator
def add_subprocess(request):
""" Add subitems to one specific businessprocess
"""
c = {}
c['messages'] = []
c['errors'] = []
c.update(csrf(request))
process_list, parameters = _business_process_parse_querystring(request)
if request.method == 'POST':
if 'name' not in request.POST:
raise Exception(
_("You must specify which subprocess to add all these objects to"))
parameters.pop('name')
bp = adagios.bi.get_business_process(request.POST.get('name'))
# Find all subprocesses in the post, can for each one call add_process
# with all parmas as well
for i in process_list:
process_name = i.get('name')
process_type = i.get('process_type')
bp.add_process(process_name, process_type, **parameters)
c['messages'].append('%s: %s added to %s' %
(process_type, process_name, bp.name))
bp.save()
return redirect('adagios.bi.views.edit', bp.process_type, bp.name)
c['subprocesses'] = process_list
c['parameters'] = parameters
return render_to_response('business_process_add_subprocess.html', c, context_instance=RequestContext(request))
@adagios_decorator
def add(request):
""" View one specific business process
"""
c = {}
c['messages'] = []
c['errors'] = []
import adagios.businessprocess
bp = adagios.bi.BusinessProcess(_("New Business Process"))
if request.method == 'GET':
form = adagios.bi.forms.BusinessProcessForm(
instance=bp, initial=bp.data)
elif request.method == 'POST':
form = adagios.bi.forms.BusinessProcessForm(
instance=bp, data=request.POST)
if form.is_valid():
form.save()
return redirect('adagios.bi.views.edit', bp.process_type, bp.name)
return render_to_response('business_process_edit.html', locals(), context_instance=RequestContext(request))
@adagios_decorator
def index(request):
""" List all configured business processes
"""
c = {}
c['messages'] = []
c['errors'] = []
processes = adagios.bi.get_all_processes()
return render_to_response('business_process_list.html', locals(), context_instance=RequestContext(request))
@adagios_decorator
def delete(request, process_name, process_type):
""" Delete one specific business process """
import adagios.businessprocess
bp = adagios.bi.get_business_process(process_name=process_name, process_type=process_type)
if request.method == 'POST':
form = adagios.bi.forms.BusinessProcessForm(
instance=bp, data=request.POST)
form.delete()
return redirect('adagios.bi.views.index')
return render_to_response('business_process_delete.html', locals(), context_instance=RequestContext(request))
@adagios_decorator
def change_status_calculation_method(request, process_name):
import adagios.businessprocess
bp = adagios.bi.get_business_process(process_name)
if request.method == 'POST':
for i in bp.status_calculation_methods:
if i in request.POST:
bp.status_method = i
bp.save()
return redirect('adagios.bi.views.index')
def _business_process_parse_querystring(request):
""" Parses querystring into process_list and parameters
Returns:
(parameters,processs_list) where:
-- process_list is a list of all business processes that were mentioned in the querystring
-- Parameters is a dict of all other querystrings that were not in process_list and not in exclude list
"""
ignored_querystring_parameters = ("csrfmiddlewaretoken")
import adagios.businessprocess
data = {}
if request.method == 'GET':
data = request.GET
elif request.method == 'POST':
data = request.POST
else:
raise Exception(_("Booh, use either get or POST"))
parameters = {}
process_list = []
for key in data:
for value in data.getlist(key):
if key in ignored_querystring_parameters:
continue
type_of_process = adagios.bi.get_class(key, None)
if type_of_process is None:
parameters[key] = value
else:
process_type = type_of_process.process_type
process = adagios.bi.get_business_process(
value, process_type=process_type)
process_list.append(process)
return process_list, parameters
########NEW FILE########
__FILENAME__ = businessprocess
# Adagios is a web based Nagios configuration interface
#
# Copyright (C) 2014, Pall Sigurdsson <[email protected]>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from adagios.bi import *
########NEW FILE########
__FILENAME__ = context_processors
# Adagios is a web based Nagios configuration interface
#
# Copyright (C) 2014, Pall Sigurdsson <[email protected]>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import pynag.Model
import os
import getpass
from adagios import notifications, settings, add_plugin
from adagios.misc.rest import add_notification, clear_notification
import pynag.Model.EventHandlers
import pynag.Parsers
from pynag.Parsers import Livestatus
import adagios
import adagios.status.utils
from pynag import Model
import time
import datetime
from adagios import __version__
from adagios import userdata
from django.utils.translation import ugettext as _
def on_page_load(request):
""" Collection of actions that take place every page load """
results = {}
for k, v in reload_configfile(request).items():
results[k] = v
for k, v in get_httpuser(request).items():
results[k] = v
for k, v in get_tagged_comments(request).items():
results[k] = v
for k, v in check_nagios_running(request).items():
results[k] = v
for k, v in get_notifications(request).items():
results[k] = v
for k, v in get_unhandled_problems(request).items():
results[k] = v
for k, v in resolve_urlname(request).items():
results[k] = v
for k, v in check_selinux(request).items():
results[k] = v
for k, v in activate_plugins(request).items():
results[k] = v
for k, v in check_destination_directory(request).items():
results[k] = v
for k, v in check_nagios_cfg(request).items():
results[k] = v
for k, v in get_current_time(request).items():
results[k] = v
for k, v in get_okconfig(request).items():
results[k] = v
for k, v in get_nagios_url(request).items():
results[k] = v
for k, v in get_local_user(request).items():
results[k] = v
for k, v in get_current_settings(request).items():
results[k] = v
for k, v in get_plugins(request).items():
results[k] = v
for k, v in get_current_version(request).items():
results[k] = v
for k, v in get_serverside_includes(request).items():
results[k] = v
for k, v in get_user_preferences(request).items():
results[k] = v
for k, v in get_all_backends(request).items():
results[k] = v
for k, v in get_all_nonworking_backends(request).items():
results[k] = v
return results
def get_current_time(request):
""" Make current timestamp available to templates
"""
result = {}
try:
now = datetime.datetime.now()
result['current_time'] = now.strftime("%b %d %H:%M")
result['current_timestamp'] = int(time.time())
except Exception:
return result
return result
def get_serverside_includes(request):
""" Returns a list of serverside includes to include on this page """
result = {}
try:
result['ssi_headers'] = []
result['ssi_footers'] = []
dirname = adagios.settings.serverside_includes
current_url = resolve_urlname(request)
if not dirname:
return {}
if not os.path.isdir(dirname):
return {}
files = os.listdir(dirname)
common_header_file = "common-header.ssi"
common_footer_file = "common-footer.ssi"
custom_header_file = "{urlname}-header.ssi".format(urlname=current_url)
custom_footer_file = "{urlname}-footer.ssi".format(urlname=current_url)
if common_header_file in files:
result['ssi_headers'].append(dirname + "/" + common_header_file)
if common_footer_file in files:
result['ssi_footers'].append(dirname + "/" + common_footer_file)
if custom_header_file in files:
result['ssi_headers'].append(dirname + "/" + custom_header_file)
if custom_footer_file in files:
result['ssi_footers'].append(dirname + "/" + custom_footer_file)
except Exception:
return {}
return result
def activate_plugins(request):
""" Activates any plugins specified in settings.plugins """
for k, v in settings.plugins.items():
add_plugin(name=k, modulepath=v)
return {'misc_menubar_items': adagios.misc_menubar_items, 'menubar_items': adagios.menubar_items}
def get_local_user(request):
""" Return user that is running the adagios process under apache
"""
user = getpass.getuser()
return {'local_user': user}
def get_current_version(request):
""" Returns current adagios version """
return {'adagios_version': __version__}
def get_current_settings(request):
""" Return a copy of adagios.settings
"""
return {'settings': adagios.settings}
def resolve_urlname(request):
"""Allows us to see what the matched urlname for this
request is within the template"""
from django.core.urlresolvers import resolve
try:
res = resolve(request.path)
if res:
return {'urlname': res.url_name}
except Exception:
return {'urlname': 'None'}
def get_httpuser(request):
""" Get the current user that is authenticating to us and update event handlers"""
try:
remote_user = request.META.get('REMOTE_USER', None)
except Exception:
remote_user = "anonymous"
return {'remote_user': remote_user or "anonymous"}
def get_nagios_url(request):
""" Get url to legasy nagios interface """
return {'nagios_url': settings.nagios_url}
def get_tagged_comments(request):
""" (for status view) returns number of comments that mention the remote_user"""
try:
remote_user = request.META.get('REMOTE_USER', 'anonymous')
livestatus = adagios.status.utils.livestatus(request)
tagged_comments = livestatus.query(
'GET comments', 'Stats: comment ~ %s' % remote_user, columns=False)[0]
if tagged_comments > 0:
return {'tagged_comments': tagged_comments}
else:
return {}
except Exception:
return {}
def get_unhandled_problems(request):
""" Get number of any unhandled problems via livestatus """
results = {}
try:
livestatus = adagios.status.utils.livestatus(request)
num_problems = livestatus.query('GET services',
'Filter: state != 0',
'Filter: acknowledged = 0',
'Filter: host_acknowledged = 0',
'Filter: scheduled_downtime_depth = 0',
'Filter: host_scheduled_downtime_depth = 0',
'Stats: state != 0',
'Stats: host_state != 0',
columns=False)
results['num_problems'] = num_problems[0] + num_problems[1]
results['num_unhandled_problems'] = num_problems[0] + num_problems[1]
result = livestatus.query('GET services',
'Stats: state != 0',
'Stats: state != 0',
'Stats: acknowledged = 0',
'Stats: scheduled_downtime_depth = 0',
'Stats: host_state = 0',
'StatsAnd: 4',
columns=False
)
num_service_problems_all = result[0]
num_service_problems_unhandled = result[1]
result = livestatus.query('GET hosts',
'Stats: state != 0',
'Stats: state != 0',
'Stats: acknowledged = 0',
'Stats: scheduled_downtime_depth = 0',
'Stats: host_state = 1',
'StatsAnd: 4',
columns=False
)
num_host_problems_all = result[0]
num_host_problems_unhandled = result[1]
num_problems_all = num_service_problems_all + num_host_problems_all
num_problems_unhandled = num_service_problems_unhandled + num_host_problems_unhandled
num_problems = num_problems_unhandled
results = locals()
del results['livestatus']
del results['result']
del results['request']
except Exception:
pass
return results
def check_nagios_cfg(request):
""" Check availability of nagios.cfg """
return {'nagios_cfg': pynag.Model.config.cfg_file}
def check_destination_directory(request):
""" Check that adagios has a place to store new objects """
dest = settings.destination_directory
dest_dir_was_found = False
# If there are problems with finding nagios.cfg, we don't
# need to display any errors here regarding destination_directories
try:
Model.config.parse_maincfg()
except Exception:
return {}
for k, v in Model.config.maincfg_values:
if k != 'cfg_dir':
continue
if os.path.normpath(v) == os.path.normpath(dest):
dest_dir_was_found = True
if not dest_dir_was_found:
add_notification(level="warning", notification_id="dest_dir",
message=_("Destination for new objects (%s) is not defined in nagios.cfg") % dest)
elif not os.path.isdir(dest):
add_notification(level="warning", notification_id="dest_dir",
message=_("Destination directory for new objects (%s) is not found. Please create it.") % dest)
else:
clear_notification(notification_id="dest_dir")
return {}
def check_nagios_running(request):
""" Notify user if nagios is not running """
try:
if pynag.Model.config is None:
pynag.Model.config = pynag.Parsers.config(
adagios.settings.nagios_config)
nagios_pid = pynag.Model.config._get_pid()
return {"nagios_running": (nagios_pid is not None)}
except Exception:
return {}
def check_selinux(request):
""" Check if selinux is enabled and notify user """
notification_id = "selinux_active"
if settings.warn_if_selinux_is_active:
try:
if open('/sys/fs/selinux/enforce', 'r').readline().strip() == "1":
add_notification(
level="warning",
message=_('SELinux is enabled, which is likely to give your monitoring engine problems., see <a href="https://access.redhat.com/knowledge/docs/en-US/Red_Hat_Enterprise_Linux/6/html-single/Security-Enhanced_Linux/index.html#sect-Security-Enhanced_Linux-Enabling_and_Disabling_SELinux-Disabling_SELinux">here</a> for information on how to disable it.'),
notification_id=notification_id,
)
except Exception:
pass
else:
clear_notification(notification_id)
return {}
def get_notifications(request):
""" Returns a hash map of adagios.notifications """
return {"notifications": notifications}
def get_okconfig(request):
""" Returns {"okconfig":True} if okconfig module is installed.
"""
try:
if "okconfig" in settings.plugins:
return {"okconfig": True}
return {}
except Exception:
return {}
def get_plugins(request):
"""
"""
return {'plugins': settings.plugins}
def reload_configfile(request):
""" Load the configfile from settings.adagios_configfile and put its content in adagios.settings. """
try:
clear_notification("configfile")
locals = {}
execfile(settings.adagios_configfile, globals(), locals)
for k, v in locals.items():
settings.__dict__[k] = v
except Exception, e:
add_notification(
level="warning", message=str(e), notification_id="configfile")
return {}
def get_user_preferences(request):
""" Loads the preferences for the logged-in user. """
def theme_to_themepath(theme):
return os.path.join(settings.THEMES_FOLDER,
theme,
settings.THEME_ENTRY_POINT)
try:
user = userdata.User(request)
user.trigger_hooks()
results = user.to_dict()
except Exception:
results = adagios.settings.PREFS_DEFAULT
theme = results.get('theme', 'default')
results['theme_path'] = theme_to_themepath(theme)
return {'user_data': results}
def get_all_backends(request):
backends = adagios.status.utils.get_all_backends()
return {'backends': backends}
def get_all_nonworking_backends(request):
""" Returns the backends which don't answer at the time. """
b = [x for x in get_all_backends(request)['backends']
if not Livestatus(x).test(raise_error=False)]
return {'nonworking_backends': b}
if __name__ == '__main__':
on_page_load(request=None)
########NEW FILE########
__FILENAME__ = forms
# Adagios is a web based Nagios configuration interface
#
# Copyright (C) 2014, Pall Sigurdsson <[email protected]>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
########NEW FILE########
__FILENAME__ = models
########NEW FILE########
__FILENAME__ = tests
# -*- coding: utf-8 -*-
#
# Adagios is a web based Nagios configuration interface
#
# Copyright (C) 2014, Pall Sigurdsson <[email protected]>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import os
from django.utils import unittest
from django.test.client import Client
import pynag.Parsers
import tempfile
import os
from adagios.contrib import get_template_name
import pynag.Utils
class ContribTests(unittest.TestCase):
def setUp(self):
base_path = tempfile.mkdtemp()
self.base_path = base_path
def tearDown(self):
command = ['rm', '-rf', self.base_path]
pynag.Utils.runCommand(command=command, shell=False)
def testGetTemplateFilename(self):
base_path = self.base_path
file1 = base_path + '/file1'
dir1 = base_path + '/dir1'
file2 = dir1 + '/file2'
open(file1, 'w').write('this is file1')
os.mkdir(dir1)
open(file2, 'w').write('this is file2')
self.assertEqual(file1, get_template_name(base_path, 'file1'))
self.assertEqual(file2, get_template_name(base_path, 'dir1', 'file2'))
self.assertEqual(file2, get_template_name(base_path, 'dir1', 'file2', 'unneeded_argument'))
# Try to return a filename that is outside base_path
exception1 = lambda: get_template_name(base_path, '/etc/passwd')
self.assertRaises(Exception, exception1)
# Try to return a filename that is outside base_path
exception2 = lambda: get_template_name(base_path, '/etc/', 'passwd')
self.assertRaises(Exception, exception2)
# Try to return a filename that is outside base_path
exception3 = lambda: get_template_name(base_path, '..', 'passwd')
self.assertRaises(Exception, exception3)
########NEW FILE########
__FILENAME__ = urls
# Adagios is a web based Nagios configuration interface
#
# Copyright (C) 2014, Pall Sigurdsson <[email protected]>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from django.conf.urls.defaults import *
from django.conf import settings
urlpatterns = patterns('adagios',
(r'^/$', 'contrib.views.index'),
(r'^/(?P<arg1>.+)?$', 'contrib.views.contrib'),
(r'^/(?P<arg1>.+)/(?P<arg2>.+)/?$', 'contrib.views.contrib'),
(r'^/(?P<arg1>.+)(?P<arg2>.+)/(?P<arg3>.+)/?$', 'contrib.views.contrib'),
(r'^/(?P<arg1>.+)(?P<arg2>.+)/(?P<arg3>.+)/(?P<arg4>.+)/?$', 'contrib.views.contrib'),
)
########NEW FILE########
__FILENAME__ = views
# -*- coding: utf-8 -*-
#
# Adagios is a web based Nagios configuration interface
#
# Copyright (C) 2014, Pall Sigurdsson <[email protected]>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from django.core.context_processors import csrf
from django.shortcuts import render_to_response
from django.shortcuts import HttpResponse
import adagios.settings
import adagios.status.utils
import os
from adagios.views import adagios_decorator, error_page
from django.template import RequestContext
from adagios.contrib import get_template_name
from django import template
from django.utils.translation import ugettext as _
@adagios_decorator
def index(request, contrib_dir=None):
""" List all available user contributed views in adagios.settings.contrib_dir """
messages = []
errors = []
if not contrib_dir:
contrib_dir = adagios.settings.contrib_dir
views = os.listdir(contrib_dir)
if not views:
errors.append(_("Directory '%s' is empty") % contrib_dir)
return render_to_response("contrib_index.html", locals(), context_instance=RequestContext(request))
@adagios_decorator
def contrib(request, arg1, arg2=None, arg3=None, arg4=None):
messages = []
errors = []
full_path = get_template_name(adagios.settings.contrib_dir, arg1, arg2, arg3, arg4)
if os.path.isdir(full_path):
return index(request, contrib_dir=full_path)
with open(full_path) as f:
content = f.read()
# Lets populate local namespace with convenient data
services = lambda: locals().get('services', adagios.status.utils.get_services(request))
hosts = lambda: locals().get('hosts', adagios.status.utils.get_hosts(request))
service_problems = lambda: locals().get('service_problems', adagios.status.utils.get_hosts(request, state__isnot='0'))
host_problems = lambda: locals().get('host_problems', adagios.status.utils.get_hosts(request, state__isnot='0'))
statistics = lambda: locals().get('statistics', adagios.status.utils.get_statistics(request))
t = template.Template(content)
c = RequestContext(request, locals())
html = t.render(c)
return HttpResponse(html)
########NEW FILE########
__FILENAME__ = exceptions
# Adagios is a web based Nagios configuration interface
#
# Copyright (C) 2014, Pall Sigurdsson <[email protected]>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
""" Exceptions that Adagios uses and raises
"""
class AdagiosError(Exception):
""" Base Class for all Adagios Exceptions """
pass
class AccessDenied(AdagiosError):
""" This exception is raised whenever a user tries to access a page he does not have access to. """
def __init__(self, username, access_required, message, path=None, *args, **kwargs):
self.username = username
self.access_required = access_required
self.message = message
self.path = path
super(AccessDenied, self).__init__(message, *args, **kwargs)
########NEW FILE########
__FILENAME__ = forms
# -*- coding: utf-8 -*-
#
# Adagios is a web based Nagios configuration interface
#
# Copyright (C) 2014, Pall Sigurdsson <[email protected]>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from django.utils.encoding import smart_str
from django import forms
class AdagiosForm(forms.Form):
""" Base class for all forms in this module. Forms that use pynag in any way should inherit from this one.
"""
def clean(self):
cleaned_data = {}
tmp = super(AdagiosForm, self).clean()
for k,v in tmp.items():
if isinstance(k, (unicode)):
k = smart_str(k)
if isinstance(v, (unicode)):
v = smart_str(v)
cleaned_data[k] = v
return cleaned_data
########NEW FILE########
__FILENAME__ = manage
#!/usr/bin/python
#
# Adagios is a web based Nagios configuration interface
#
# Copyright (C) 2014, Pall Sigurdsson <[email protected]>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from django.core.management import execute_manager
try:
import settings # Assumed to be in the same directory.
except ImportError:
import sys
sys.stderr.write("Error: Can't find the file 'settings.py' in the directory containing %r. It appears you've customized things.\nYou'll have to run django-admin.py, passing it your settings module.\n(If the file settings.py does indeed exist, it's causing an ImportError somehow.)\n" % __file__)
sys.exit(1)
if __name__ == "__main__":
execute_manager(settings)
########NEW FILE########
__FILENAME__ = forms
# -*- coding: utf-8 -*-
#
# Adagios is a web based Nagios configuration interface
#
# Copyright (C) 2010, Pall Sigurdsson <[email protected]>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from django import forms
from django.core.mail import send_mail
from django.core.validators import validate_email
from django.core.exceptions import ValidationError
from django.utils.translation import ugettext as _
import os.path
from adagios import settings
import adagios.utils
from pynag import Model, Control
from django.core.mail import EmailMultiAlternatives
import pynag.Parsers
import pynag.Control.Command
TOPIC_CHOICES = (
('general', _('General Suggestion')),
('bug', _('I think i have found a bug')),
('suggestion', _('I have a particular task in mind that i would like to do with Adagios')),
('easier', _('I have an idea how make a certain task easier to do')),
)
pnp_loglevel_choices = [
('0', _('0 - Only Errors')),
('1', _('1 - Little logging')),
('2', _('2 - Log Everything')),
('-1', _('-1 Debug mode (log all and slower processing'))
]
pnp_log_type_choices = [('syslog', 'syslog'), ('file', 'file')]
COMMAND_CHOICES = [('reload', 'reload'), ('status', 'status'),
('restart', 'restart'), ('stop', 'stop'), ('start', 'start')]
initial_paste = """
define service {
host_name host01.example.com
service_description http://host01.example.com
use template-http
}
define service {
name template-http
check_command okc-check_http
}
"""
class ContactUsForm(forms.Form):
topic = forms.ChoiceField(choices=TOPIC_CHOICES)
sender = forms.CharField(
required=False,
help_text=_("Optional email address if you want feedback from us"),
)
message = forms.CharField(
widget=forms.widgets.Textarea(
attrs={'rows': 15, 'cols': 40}),
help_text=_("See below for examples of good suggestions"),
)
def save(self):
from_address = '[email protected]'
to_address = ["[email protected]"]
subject = _("Suggestion from Adagios")
sender = self.cleaned_data['sender']
topic = self.cleaned_data['topic']
message = self.cleaned_data['message']
msg = _("""
topic: %(topic)s
from: %(sender)s
%(message)s
""") % {'topic': topic, 'sender': sender, 'message': message}
send_mail(subject, msg, from_address, to_address, fail_silently=False)
class UserdataForm(forms.Form):
language = forms.ChoiceField(
choices=settings.LANGUAGES,
required=False
)
theme = forms.ChoiceField(
choices=[(x, x) for x in adagios.utils.get_available_themes()],
required=False
)
refresh_rate = forms.IntegerField(
help_text="For pages that auto-reload. Set the number of seconds to wait between page refreshes. "
"Set refresh rate to 0 to disable automatic refreshing.",
required=False,
)
class AdagiosSettingsForm(forms.Form):
nagios_config = forms.CharField(
required=False, initial=settings.nagios_config,
help_text=_("Path to nagios configuration file. i.e. /etc/nagios/nagios.cfg"))
destination_directory = forms.CharField(
required=False, initial=settings.destination_directory, help_text=_("Where to save new objects that adagios creates."))
nagios_url = forms.CharField(required=False, initial=settings.nagios_url,
help_text=_("URL (relative or absolute) to your nagios webcgi. Adagios will use this to make it simple to navigate from a configured host/service directly to the cgi."))
nagios_init_script = forms.CharField(
help_text=_("Path to you nagios init script. Adagios will use this when stopping/starting/reloading nagios"))
nagios_binary = forms.CharField(
help_text=_("Path to you nagios daemon binary. Adagios will use this to verify config with 'nagios -v nagios_config'"))
livestatus_path = forms.CharField(
help_text=_("Path to MK Livestatus socket. If left empty Adagios will try to autodiscover from your nagios.cfg"),
required=False,
)
enable_githandler = forms.BooleanField(
required=False, initial=settings.enable_githandler, help_text=_("If set. Adagios will commit any changes it makes to git repository."))
enable_loghandler = forms.BooleanField(
required=False, initial=settings.enable_loghandler, help_text=_("If set. Adagios will log any changes it makes to a file."))
enable_authorization = forms.BooleanField(
required=False, initial=settings.enable_authorization,
help_text=_("If set. Users in Status view will only see hosts/services they are a contact for. Unset means everyone will see everything."))
enable_status_view = forms.BooleanField(
required=False, initial=settings.enable_status_view,
help_text=_("If set. Enable status view which is an alternative to nagios legacy web interface. You will need to restart web server for the changes to take effect"))
auto_reload = forms.BooleanField(
required=False, initial=settings.auto_reload,
help_text=_("If set. Nagios is reloaded automatically after every change."))
warn_if_selinux_is_active = forms.BooleanField(
required=False, help_text=_("Adagios does not play well with SElinux. So lets issue a warning if it is active. Only disable this if you know what you are doing."))
pnp_filepath = forms.CharField(
help_text=_("Full path to your pnp4nagios/index.php file. Adagios will use this to generate graphs"))
pnp_url = forms.CharField(
help_text=_("Full or relative url to pnp4nagios web interface, adagios can use this to link directly to pnp"))
map_center = forms.CharField(
help_text=_("Default coordinates when opening up the world map. This should be in the form of longitude,latitude"))
map_zoom = forms.CharField(
help_text=_("Default Zoom level when opening up the world map. 10 is a good default value"))
language = forms.ChoiceField(choices=settings.LANGUAGES, required=False)
theme = forms.ChoiceField(required=False, choices=[(x,x) for x in adagios.utils.get_available_themes()])
refresh_rate = forms.IntegerField(
help_text="For pages that auto-reload. Set the number of seconds to wait between page refreshes. "
"Set refresh rate to 0 to disable automatic refreshing."
)
enable_graphite = forms.BooleanField(required=False, help_text="If set. Include graphite graphs in status views")
graphite_url = forms.CharField(help_text="Path to your graphite install.", required=False)
graphite_querystring = forms.CharField(help_text="Querystring that is passed into graphite's /render method. {host} is replaced with respective hostname while {host_} will apply common graphite escaping. i.e. example.com -> example_com", required=False)
graphite_title = forms.CharField(help_text="Use this title on all graphs coming from graphite", required=False)
include = forms.CharField(
required=False, help_text=_("Include configuration options from files matching this pattern"))
def save(self):
# First of all, if configfile does not exist, lets try to create it:
if not os.path.isfile(settings.adagios_configfile):
open(settings.adagios_configfile, 'w').write(
_("# Autocreated by adagios"))
for k, v in self.cleaned_data.items():
Model.config._edit_static_file(
attribute=k, new_value=v, filename=settings.adagios_configfile)
self.adagios_configfile = settings.adagios_configfile
#settings.__dict__[k] = v
def __init__(self, *args, **kwargs):
# Since this form is always bound, lets fetch current configfiles and
# prepare them as post:
if 'data' not in kwargs or kwargs['data'] == '':
kwargs['data'] = settings.__dict__
super(self.__class__, self).__init__(*args, **kwargs)
def clean_pnp_filepath(self):
filename = self.cleaned_data['pnp_filepath']
return self.check_file_exists(filename)
def clean_destination_directory(self):
filename = self.cleaned_data['destination_directory']
return self.check_file_exists(filename)
def clean_nagios_init_script(self):
filename = self.cleaned_data['nagios_init_script']
if filename.startswith('sudo'):
self.check_file_exists(filename.split()[1])
else:
self.check_file_exists(filename)
return filename
def clean_nagios_binary(self):
filename = self.cleaned_data['nagios_binary']
return self.check_file_exists(filename)
def clean_nagios_config(self):
filename = self.cleaned_data['nagios_config']
return self.check_file_exists(filename)
def check_file_exists(self, filename):
""" Raises validation error if filename does not exist """
if not os.path.exists(filename):
raise forms.ValidationError('No such file or directory')
return filename
def clean(self):
cleaned_data = super(self.__class__, self).clean()
for k, v in cleaned_data.items():
# Convert all unicode to quoted strings
if type(v) == type(u''):
cleaned_data[k] = str('''"%s"''' % v)
# Convert all booleans to True/False strings
elif type(v) == type(False):
cleaned_data[k] = str(v)
return cleaned_data
class EditAllForm(forms.Form):
""" This form intelligently modifies all attributes of a specific type.
"""
def __init__(self, object_type, attribute, new_value, *args, **kwargs):
self.object_type = object_type
self.attribute = attribute
self.new_value = new_value
super(self.__class__, self).__init__(self, args, kwargs)
search_filter = {}
search_filter['object_type'] = object_type
search_filter['%s__isnot' % attribute] = new_value
items = Model.ObjectDefinition.objects.filter(**search_filter)
interesting_objects = []
for i in items:
if attribute in i._defined_attributes or i.use is None:
interesting_objects.append(i)
self.interesting_objects = interesting_objects
for i in interesting_objects:
self.fields['modify_%s' % i.get_id()] = forms.BooleanField(
required=False, initial=True)
class PNPActionUrlForm(forms.Form):
""" This form handles applying action_url to bunch of hosts and services """
#apply_action_url = forms.BooleanField(required=False,initial=True,help_text="If set, apply action_url to every service object in nagios")
action_url = forms.CharField(
required=False, initial="/pnp4nagios/graph?host=$HOSTNAME$&srv=$SERVICEDESC$",
help_text=_("Reset the action_url attribute of every service check in your nagios configuration with this one. "))
def save(self):
action_url = self.cleaned_data['action_url']
services = Model.Service.objects.filter(action_url__isnot=action_url)
self.total_services = len(services)
self.error_services = 0
for i in services:
if 'action_url' in i._defined_attributes or i.use is None:
i.action_url = action_url
try:
i.save()
except Exception:
self.error_services += 1
class PNPTemplatesForm(forms.Form):
""" This form manages your pnp4nagios templates """
def __init__(self, *args, **kwargs):
self.template_directories = []
self.templates = []
tmp = Model.config._load_static_file('/etc/pnp4nagios/config.php')
for k, v in tmp:
if k == "$conf['template_dirs'][]":
# strip all ' and " from directory
directory = v.strip(";").strip('"').strip("'")
self.template_directories.append(directory)
if os.path.isdir(directory):
for f in os.listdir(directory):
self.templates.append("%s/%s" % (directory, f))
super(self.__class__, self).__init__(*args, **kwargs)
class PNPConfigForm(forms.Form):
""" This form handles the npcd.cfg configuration file """
user = forms.CharField(
help_text=_("npcd service will have privileges of this group"))
group = forms.CharField(
help_text=_("npcd service will have privileges of this user"))
log_type = forms.ChoiceField(
widget=forms.RadioSelect, choices=pnp_log_type_choices, help_text=_("Define if you want to log to 'syslog' or 'file'"))
log_file = forms.CharField(
help_text=_("If log_type is set to file. Log to this file"))
max_logfile_size = forms.IntegerField(
help_text=_("Defines the maximum filesize (bytes) before logfile will rotate."))
log_level = forms.ChoiceField(
help_text=_("How much should we log?"), choices=pnp_loglevel_choices)
perfdata_spool_dir = forms.CharField(
help_text=_("where we can find the performance data files"))
perfdata_file_run_cmd = forms.CharField(
help_text=_("execute following command for each found file in perfdata_spool_dir"))
perfdata_file_run_cmd_args = forms.CharField(
required=False, help_text=_("optional arguments to perfdata_file_run_cmd"))
identify_npcd = forms.ChoiceField(widget=forms.RadioSelect, choices=(
('1', 'Yes'), ('0', 'No')), help_text=_("If yes, npcd will append -n to the perfdata_file_run_cmd"))
npcd_max_threads = forms.IntegerField(
help_text=_("Define how many parallel threads we should start"))
sleep_time = forms.IntegerField(
help_text=_("How many seconds npcd should wait between dirscans"))
load_threshold = forms.FloatField(
help_text=_("npcd won't start if load is above this threshold"))
pid_file = forms.CharField(help_text=_("Location of your pid file"))
perfdata_file = forms.CharField(
help_text=_("Where should npcdmod.o write the performance data. Must not be same directory as perfdata_spool_dir"))
perfdata_spool_filename = forms.CharField(
help_text=_("Filename for the spooled files"))
perfdata_file_processing_interval = forms.IntegerField(
help_text=_("Interval between file processing"))
def __init__(self, initial=None, *args, **kwargs):
if not initial:
initial = {}
my_initial = {}
# Lets use PNPBrokerModuleForm to find sensible path to npcd config
# file
broker_form = PNPBrokerModuleForm()
self.npcd_cfg = broker_form.initial.get('config_file')
npcd_values = Model.config._load_static_file(self.npcd_cfg)
for k, v in npcd_values:
my_initial[k] = v
super(self.__class__, self).__init__(
initial=my_initial, *args, **kwargs)
def save(self):
for i in self.changed_data:
Model.config._edit_static_file(
attribute=i, new_value=self.cleaned_data[i], filename=self.npcd_cfg)
class EditFileForm(forms.Form):
""" Manages editing of a single file """
filecontent = forms.CharField(widget=forms.Textarea(
attrs={'wrap': 'off', 'rows': '50', 'cols': '2000'}))
def __init__(self, filename, initial=None, *args, **kwargs):
if not initial:
initial = {}
self.filename = filename
my_initial = initial.copy()
if 'filecontent' not in my_initial:
my_initial['filecontent'] = open(filename).read()
super(self.__class__, self).__init__(
initial=my_initial, *args, **kwargs)
def save(self):
if 'filecontent' in self.changed_data:
data = self.cleaned_data['filecontent']
open(self.filename, 'w').write(data)
class PNPBrokerModuleForm(forms.Form):
""" This form is responsible for configuring PNP4Nagios. """
#enable_pnp= forms.BooleanField(required=False, initial=True,help_text="If set, PNP will be enabled and will graph Nagios Performance Data.")
broker_module = forms.CharField(
help_text=_("Full path to your npcdmod.o broker module that shipped with your pnp4nagios installation"))
config_file = forms.CharField(
help_text=_("Full path to your npcd.cfg that shipped with your pnp4nagios installation"))
event_broker_options = forms.IntegerField(
initial="-1", help_text=_("Nagios's default of -1 is recommended here. PNP Documentation says you will need at least bits 2 and 3. Only change this if you know what you are doing."))
process_performance_data = forms.BooleanField(
required=False, initial=True, help_text=_("PNP Needs the nagios option process_performance_data enabled to function. Make sure it is enabled."))
#apply_action_url = forms.BooleanField(required=False,initial=True,help_text="If set, apply action_url to every service object in nagios")
#action_url=forms.CharField(required=False,initial="/pnp4nagios/graph?host=$HOSTNAME$&srv=$SERVICEDESC$", help_text="Action url that your nagios objects can use to access perfdata")
def clean_broker_module(self):
""" Raises validation error if filename does not exist """
filename = self.cleaned_data['broker_module']
if not os.path.exists(filename):
raise forms.ValidationError('File not found')
return filename
def clean_config_file(self):
""" Raises validation error if filename does not exist """
filename = self.cleaned_data['config_file']
if not os.path.exists(filename):
raise forms.ValidationError('File not found')
return filename
def __init__(self, initial=None, *args, **kwargs):
if not initial:
initial = {}
my_initial = {}
Model.config.parse()
maincfg_values = Model.config.maincfg_values
self.nagios_configline = None
for k, v in Model.config.maincfg_values:
if k == 'broker_module' and v.find('npcdmod.o') > 0:
self.nagios_configline = v
v = v.split()
my_initial['broker_module'] = v.pop(0)
for i in v:
if i.find('config_file=') > -1:
my_initial['config_file'] = i.split('=', 1)[1]
elif k == "event_broker_options":
my_initial[k] = v
# If view specified any initial values, they overwrite ours
for k, v in initial.items():
my_initial[k] = v
if 'broker_module' not in my_initial:
my_initial['broker_module'] = self.get_suggested_npcdmod_path()
if 'config_file' not in my_initial:
my_initial['config_file'] = self.get_suggested_npcd_path()
super(self.__class__, self).__init__(
initial=my_initial, *args, **kwargs)
def get_suggested_npcdmod_path(self):
""" Returns best guess for full path to npcdmod.o file """
possible_locations = [
"/usr/lib/pnp4nagios/npcdmod.o",
"/usr/lib64/nagios/brokers/npcdmod.o",
]
for i in possible_locations:
if os.path.isfile(i):
return i
return possible_locations[-1]
def get_suggested_npcd_path(self):
""" Returns best guess for full path to npcd.cfg file """
possible_locations = [
"/etc/pnp4nagios/npcd.cfg"
]
for i in possible_locations:
if os.path.isfile(i):
return i
return possible_locations[-1]
def save(self):
if 'broker_module' in self.changed_data or 'config_file' in self.changed_data or self.nagios_configline is None:
v = "%s config_file=%s" % (
self.cleaned_data['broker_module'], self.cleaned_data['config_file'])
Model.config._edit_static_file(
attribute="broker_module", new_value=v, old_value=self.nagios_configline, append=True)
# We are supposed to handle process_performance_data attribute.. lets
# do that here
process_performance_data = "1" if self.cleaned_data[
'process_performance_data'] else "0"
Model.config._edit_static_file(
attribute="process_performance_data", new_value=process_performance_data)
# Update event broker only if it has changed
name = "event_broker_options"
if name in self.changed_data:
Model.config._edit_static_file(
attribute=name, new_value=self.cleaned_data[name])
class PluginOutputForm(forms.Form):
plugin_output = forms.CharField(
widget=forms.Textarea(attrs={'wrap': 'off', 'cols': '80'}))
def parse(self):
from pynag import Utils
plugin_output = self.cleaned_data['plugin_output']
output = Utils.PluginOutput(plugin_output)
self.results = output
class NagiosServiceForm(forms.Form):
""" Maintains control of the nagios service / reload / restart / etc """
#path_to_init_script = forms.CharField(help_text="Path to your nagios init script", initial=NAGIOS_INIT)
#nagios_binary = forms.CharField(help_text="Path to your nagios binary", initial=NAGIOS_BIN)
#command = forms.ChoiceField(choices=COMMAND_CHOICES)
def save(self):
#nagios_bin = self.cleaned_data['nagios_bin']
if "reload" in self.data:
command = "reload"
elif "restart" in self.data:
command = "restart"
elif "stop" in self.data:
command = "stop"
elif "start" in self.data:
command = "start"
elif "status" in self.data:
command = "status"
elif "verify" in self.data:
command = "verify"
else:
raise Exception(_("Unknown command"))
self.command = command
nagios_init = settings.nagios_init_script
nagios_binary = settings.nagios_binary
nagios_config = settings.nagios_config or pynag.Model.config.cfg_file
if command == "verify":
command = "%s -v '%s'" % (nagios_binary, nagios_config)
else:
command = "%s %s" % (nagios_init, command)
code, stdout, stderr = pynag.Utils.runCommand(command)
self.stdout = stdout or ""
self.stderr = stderr or ""
self.exit_code = code
def verify(self):
""" Run "nagios -v nagios.cfg" and returns errors/warning
Returns:
[
{'errors': []},
{'warnings': []}
]
"""
nagios_binary = settings.nagios_binary
nagios_config = settings.nagios_config
command = "%s -v '%s'" % (nagios_binary, nagios_config)
code, stdout, stderr = pynag.Utils.runCommand(command)
self.stdout = stdout or None
self.stderr = stderr or None
self.exit_code = code
for line in stdout.splitlines():
line = line.strip()
warnings = []
errors = []
if line.lower.startswith('warning:'):
warning = {}
class SendEmailForm(forms.Form):
""" Form used to send email to one or more contacts regarding particular services
"""
to = forms.CharField(
required=True,
help_text=_("E-mail address"),
)
message = forms.CharField(
widget=forms.widgets.Textarea(attrs={'rows': 15, 'cols': 40}),
required=False,
help_text=_("Message that is to be sent to recipients"),
)
add_myself_to_cc = forms.BooleanField(
required=False,
help_text=_("If checked, you will be added automatically to CC")
)
acknowledge_all_problems = forms.BooleanField(
required=False,
help_text=_("If checked, also acknowledge all problems as they are sent")
)
def __init__(self, remote_user, *args, **kwargs):
""" Create a new instance of SendEmailForm, contact name and email is used as from address.
"""
self.remote_user = remote_user
#self.contact_email = contact_email
self.html_content = _("There is now HTML content with this message.")
self.services = []
self.hosts = []
self.status_objects = []
self._resolve_remote_user(self.remote_user)
super(self.__class__, self).__init__(*args, **kwargs)
def save(self):
subject = _("%s sent you a a message through adagios") % self.remote_user
cc_address = []
from_address = self._resolve_remote_user(self.remote_user)
# Check if _resolve_remote_user did in fact return an email address - avoid SMTPSenderRefused.
import re # re built in Py1.5+
if re.compile('([\w\-\.]+@(\w[\w\-]+\.)+[\w\-]+)').search(from_address) is None:
from_address = str(from_address) + '@no.domain'
to_address = self.cleaned_data['to']
to_address = to_address.split(',')
text_content = self.cleaned_data['message']
text_content = text_content.replace('\n','<br>')
# self.html_content is rendered in misc.views.mail()
html_content = text_content + "<p></p>" + self.html_content
if self.cleaned_data['add_myself_to_cc']:
cc_address.append(from_address)
if self.cleaned_data['acknowledge_all_problems']:
comment = _("Sent mail to %s") % self.cleaned_data['to']
self.acknowledge_all_services(comment)
self.acknowledge_all_hosts(comment)
# Here we actually send some email:
msg = EmailMultiAlternatives(
subject=subject, body=text_content, from_email=from_address, cc=cc_address, to=to_address)
msg.attach_alternative(html_content, "text/html")
msg.send()
def acknowledge_all_hosts(self, comment):
""" Acknowledge all problems in self.hosts
"""
for i in self.hosts:
host_name = i.get('host_name')
sticky = "1"
persistent = "0"
notify = "0"
author = self.remote_user
pynag.Control.Command.acknowledge_host_problem(host_name=host_name,
sticky=sticky,
persistent=persistent,
notify=notify,
author=author,
comment=comment)
def acknowledge_all_services(self, comment):
""" Acknowledge all problems in self.services
"""
for i in self.services:
host_name = i.get('host_name')
service_description = i.get('description')
sticky = "1"
persistent = "0"
notify = "0"
author = self.remote_user
pynag.Control.Command.acknowledge_svc_problem(host_name=host_name,
service_description=service_description,
sticky=sticky,
persistent=persistent,
notify=notify,
author=author,
comment=comment)
def _resolve_remote_user(self, username):
""" Returns a valid "Full Name <[email protected]>" for remote http authenticated user.
If Remote user is a nagios contact, then return: Contact_Alias <contact_email>"
Else if remote user is a valid email address, return that address
Else return None
"""
import adagios.status.utils
livestatus = adagios.status.utils.livestatus(request=None)
try:
contact = livestatus.get_contact(username)
return "%s <%s>" % (contact.get('alias'), contact.get('email'))
except IndexError:
# If we get here, then remote_user does not exist as a contact.
return username
class PasteForm(forms.Form):
paste = forms.CharField(initial=initial_paste, widget=forms.Textarea())
def parse(self):
c = pynag.Parsers.config()
self.config = c
c.reset()
paste = self.cleaned_data['paste']
# Also convert raw paste into a string so we can display errors at the
# right place:
self.pasted_string = paste.splitlines()
items = c.parse_string(paste)
c.pre_object_list = items
c._post_parse()
all_objects = []
for object_type, objects in c.data.items():
model = pynag.Model.string_to_class.get(
object_type, pynag.Model.ObjectDefinition)
for i in objects:
Class = pynag.Model.string_to_class.get(
i['meta']['object_type'])
my_object = Class(item=i)
all_objects.append(my_object)
self.objects = all_objects
########NEW FILE########
__FILENAME__ = helpers
#!/usr/bin/python
#
# Adagios is a web based Nagios configuration interface
#
# Copyright (C) 2014, Pall Sigurdsson <[email protected]>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
Convenient stateless functions for pynag. This module is used by the /rest/ interface of adagios.
"""
import platform
import re
from pynag import Model
from pynag import Parsers
from pynag import Control
from pynag import Utils
from pynag import __version__
from socket import gethostbyname_ex
import adagios.settings
from django.utils.translation import ugettext as _
#_config = Parsers.config(adagios.settings.nagios_config)
#_config.parse()
version = __version__
def _get_dict(x):
x.__delattr__('objects')
return x._original_attributes
def get_objects(object_type=None, with_fields="id,shortname,object_type", **kwargs):
""" Get any type of object definition in a dict-compatible fashion
Arguments:
object_type (optional) -- Return objects of this type
with_fields (optional) -- comma seperated list of objects to show (default=id,shortname,object_type)
any other argument is passed on as a filter to pynag
Examples:
# show all active hosts and their ip address
get_objects(object_type="host", register="1", with_fields="host_name,address")
# show all attributes of all services
get_objects(object_type="service", with_fields='*')
Returns:
List of ObjectDefinition
"""
tmp = Model.ObjectDefinition.objects.filter(
object_type=object_type, **kwargs)
with_fields = with_fields.split(',')
# return map(lambda x: _get_dict(x), tmp)
return map(lambda x: object_to_dict(x, attributes=with_fields), tmp)
def servicestatus(with_fields="host_name,service_description,current_state,plugin_output"):
""" Returns a list of all active services and their current status """
s = Parsers.status()
s.parse()
fields = with_fields.split(',')
result_list = []
for serv in s.data['servicestatus']:
current_object = {}
for k, v in serv.items():
if fields == ['*'] or k in fields:
current_object[k] = v
result_list.append(current_object)
return result_list
def object_to_dict(object, attributes="id,shortname,object_type"):
""" Takes in a specific object definition, returns a hash maps with "attributes" as keys"""
result = {}
if not attributes or attributes == '*':
return object._original_attributes
elif isinstance(attributes, list):
pass
else:
attributes = attributes.split(',')
for k in attributes:
result[k] = object[k]
return result
def get_object(id, with_fields="id,shortname,object_type"):
"""Returns one specific ObjectDefinition"""
o = Model.ObjectDefinition.objects.get_by_id(id)
return object_to_dict(o, attributes=with_fields)
def delete_object(object_id, recursive=False, cleanup_related_items=True):
""" Delete one specific ObjectDefinition
Arguments:
object_id -- The pynag id of the definition you want to delete
cleanup_related_items -- If True, clean up references to this object in other definitions
recursive -- If True, also remove other objects that depend on this one.
For example, when deleting a host, also delete all its services
Returns:
True on success. Raises exception on failure.
"""
o = Model.ObjectDefinition.objects.get_by_id(object_id)
o.delete(recursive=recursive, cleanup_related_items=cleanup_related_items)
return True
def get_host_names(invalidate_cache=False):
""" Returns a list of all hosts """
if invalidate_cache is True:
raise NotImplementedError()
all_hosts = Model.Host.objects.all
hostnames = []
for i in all_hosts:
if not i['host_name'] is None:
hostnames.append(i['host_name'])
return sorted(hostnames)
def change_attribute(id, attribute_name, new_value):
"""Changes object with the designated ID to file
Arguments:
id -- object_id of the definition to be saved
attribute_name -- name of the attribute (i.e. "host_name")
new_value -- new value (i.e. "host.example.com")
"""
o = Model.ObjectDefinition.objects.get_by_id(id)
o[attribute_name] = new_value
o.save()
def change_service_attribute(identifier, new_value):
"""
Change one service that is identified in the form of:
host_name::service_description::attribute_name
Examples:
>>> change_service_attribute("localhost::Ping::service_description", "Ping2")
Returns:
True on success,
Raises:
Exception on error
"""
tmp = identifier.split('::')
if len(tmp) != 3:
raise ValueError(
_("identifier must be in the form of host_name::service_description::attribute_name (got %s)") % identifier)
host_name, service_description, attribute_name = tmp
try:
service = Model.Service.objects.get_by_shortname(
"%s/%s" % (host_name, service_description))
except KeyError, e:
raise KeyError(_("Could not find service %s") % e)
service[attribute_name] = new_value
service.save()
return True
def copy_object(object_id, recursive=False, **kwargs):
""" Copy one objectdefinition.
Arguments:
object_id -- id of the object to be copied
recursive -- If True, also copy related child objects
**kwargs -- Any other argument will be treated as an attribute
-- to change on the new object
Returns:
"Object successfully copied"
Examples:
copy_object(1234567890, host_name=new_hostname)
"Object successfully copied to <filename>"
"""
o = Model.ObjectDefinition.objects.get_by_id(object_id)
new_object = o.copy(recursive=recursive, **kwargs)
return _("Object successfully copied to %s") % new_object.get_filename()
def run_check_command(object_id):
""" Runs the check_command for one specified object
Arguments:
object_id -- object_id of the definition (i.e. host or service)
Returns:
[return_code,stdout,stderr]
"""
if platform.node() == 'adagios.opensource.is':
return 1, _('Running check commands is disabled in demo-environment')
o = Model.ObjectDefinition.objects.get_by_id(object_id)
return o.run_check_command()
def set_maincfg_attribute(attribute, new_value, old_value='None', append=False):
""" Sets specific configuration values of nagios.cfg
Required Arguments:
attribute -- Attribute to change (i.e. process_performance_data)
new_value -- New value for the attribute (i.e. "1")
Optional Arguments:
old_value -- Specify this to change specific value
filename -- Configuration file to modify (i.e. /etc/nagios/nagios.cfg)
append -- Set to 'True' to append a new configuration attribute
Returns:
True -- If any changes were made
False -- If no changes were made
"""
filename = Model.config.cfg_file
if old_value.lower() == 'none':
old_value = None
if new_value.lower() == 'none':
new_value = None
if filename.lower() == 'none':
filename = None
if append.lower() == 'false':
append = False
elif append.lower() == 'true':
append = True
elif append.lower() == 'none':
append = None
return Model.config._edit_static_file(attribute=attribute, new_value=new_value, old_value=old_value, filename=filename, append=append)
def reload_nagios():
""" Reloads nagios. Returns "Success" on Success """
daemon = Control.daemon(
nagios_cfg=Model.config.cfg_file,
nagios_init=adagios.settings.nagios_init_script,
nagios_bin=adagios.settings.nagios_binary
)
result = {}
if daemon.reload() == 0:
result['status'] = _("success")
result['message'] = _('Nagios Successfully reloaded')
else:
result['status'] = _("error")
result['message'] = _("Failed to reload nagios (do you have enough permissions?)")
return result
def needs_reload():
""" Returns True if Nagios server needs to reload configuration """
return Model.config.needs_reload()
def dnslookup(host_name):
try:
(name, aliaslist, addresslist) = gethostbyname_ex(host_name)
return {'host': name, 'aliaslist': aliaslist, 'addresslist': addresslist}
except Exception, e:
return {'error': str(e)}
def contactgroup_hierarchy(**kwargs):
result = []
try:
groups = Model.Contactgroup.objects.all
for i in groups:
display = {}
display['v'] = i.contactgroup_name
display['f'] = '%s<div style="color:green; font-style:italic">%s contacts</div>' % (
i.contactgroup_name, 0)
arr = [display, i.contactgroup_members or '', str(i)]
result.append(arr)
return result
except Exception, e:
return {'error': str(e)}
def add_object(object_type, filename=None, **kwargs):
""" Create one specific object definition and store it in nagios.
Arguments:
object_type -- What kind of object to create (host, service,contactgroup, etc)
filename -- Which configuration file to store the object in. If filename=None pynag will decide
-- where to store the file
**kwargs -- Any other arguments will be treated as an attribute for the new object definition
Returns:
{'filename':XXX, 'raw_definition':XXX}
Examples:
add_object(object_type=host, host_name="localhost.example", address="127.0.0.1", use="generic-host"
"""
my_object = Model.string_to_class.get(object_type)()
if filename is not None:
my_object.set_filename(filename)
for k, v in kwargs.items():
my_object[k] = v
my_object.save()
return {"filename": my_object.get_filename(), "raw_definition": str(my_object)}
def check_command(host_name, service_description, name=None, check_command=None, **kwargs):
""" Returns all macros of a given service/host
Arguments:
host_name -- Name of host
service_description -- Service description
check_command -- Name of check command
Any **kwargs will be treated as arguments or custom macros that will be changed on-the-fly before returning
Returns:
dict similar to the following:
{ 'host_name': ...,
'service_description': ...,
'check_command': ...,
'$ARG1$': ...,
'$SERVICE_MACROx$': ...,
}
"""
if host_name in ('None', None, ''):
my_object = Model.Service.objects.get_by_name(name)
elif service_description in ('None', None, '', u''):
my_object = Model.Host.objects.get_by_shortname(host_name)
else:
short_name = "%s/%s" % (host_name, service_description)
my_object = Model.Service.objects.get_by_shortname(short_name)
if check_command in (None, '', 'None'):
command = my_object.get_effective_check_command()
else:
command = Model.Command.objects.get_by_shortname(check_command)
# Lets put all our results in a nice little dict
macros = {}
cache = Model.ObjectFetcher._cache_only
try:
Model.ObjectFetcher._cache_only = True
macros['check_command'] = command.command_name
macros['original_command_line'] = command.command_line
macros['effective_command_line'] = my_object.get_effective_command_line()
# Lets get all macros that this check command defines:
regex = re.compile("(\$\w+\$)")
macronames = regex.findall(command.command_line)
for i in macronames:
macros[i] = my_object.get_macro(i) or ''
if not check_command:
# Argument macros are special (ARGX), lets display those as is, without resolving it to the fullest
ARGs = my_object.check_command.split('!')
for i, arg in enumerate(ARGs):
if i == 0:
continue
macronames = regex.findall(arg)
for m in macronames:
macros[m] = my_object.get_macro(m) or ''
macros['$ARG{i}$'.format(i=i)] = arg
finally:
Model.ObjectFetcher._cache_only = cache
return macros
def verify_configuration():
""" Verifies nagios configuration and returns the output of nagios -v nagios.cfg
"""
binary = adagios.settings.nagios_binary
config = adagios.settings.nagios_config
command = "%s -v '%s'" % (binary, config)
code, stdout, stderr = Utils.runCommand(command)
result = {}
result['return_code'] = code
result['output'] = stdout
result['errors'] = stderr
return result
def get_object_statistics():
""" Returns a list of all object_types with total number of configured objects
Example result:
[
{"object_type":"host", "total":50},
{"object_type":"service", "total":50},
]
"""
object_types = []
Model.ObjectDefinition.objects.reload_cache()
for k, v in Model.ObjectFetcher._cached_object_type.items():
total = len(v)
object_types.append({"object_type": k, "total": total})
return object_types
def autocomplete(q):
""" Returns a list of {'hosts':[], 'hostgroups':[],'services':[]} matching search query q
"""
if q is None:
q = ''
result = {}
hosts = Model.Host.objects.filter(host_name__contains=q)
services = Model.Service.objects.filter(service_description__contains=q)
hostgroups = Model.Hostgroup.objects.filter(hostgroup_name__contains=q)
result['hosts'] = sorted(set(map(lambda x: x.host_name, hosts)))
result['hostgroups'] = sorted(set(map(lambda x: x.hostgroup_name, hostgroups)))
result['services'] = sorted(set(map(lambda x: x.service_description, services)))
return result
########NEW FILE########
__FILENAME__ = models
# Adagios is a web based Nagios configuration interface
#
# Copyright (C) 2014, Pall Sigurdsson <[email protected]>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from django.db import models
# Create your models here.
class TestModel(models.Model):
testField = models.CharField(max_length=100)
testField2 = models.CharField(max_length=100)
class BusinessProcess(models.Model):
processes = models.ManyToManyField("self", unique=False, blank=True)
name = models.CharField(max_length=100, unique=True)
display_name = models.CharField(max_length=100, blank=True)
notes = models.CharField(max_length=1000, blank=True)
#graphs = models.ManyToManyField(BusinessProcess, unique=False, blank=True)
#graphs = models.ManyToManyField(BusinessProcess, unique=False, blank=True)
class Graph(models.Model):
host_name = models.CharField(max_length=100)
service_description = models.CharField(max_length=100)
metric_name = models.CharField(max_length=100)
########NEW FILE########
__FILENAME__ = rest
# -*- coding: utf-8 -*-
#
# Adagios is a web based Nagios configuration interface
#
# Copyright (C) 2012, Pall Sigurdsson <[email protected]>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
This is a rest interface used by the "/rest/" module that affects adagios directly.
"""
from adagios import __version__, notifications, tasks
from adagios.settings import plugins
from adagios import userdata
from django.utils.translation import ugettext as _
version = __version__
def add_notification(level="info", message="message", notification_id=None, notification_type=None, user=None):
""" Add a new notification to adagios notification bar.
Arguments:
level -- pick "info" "success" "error" "danger"
message -- Arbitary text message,
notification_id (optional) -- Use this if you want to remote
-- remove this notification later via clear_notification()
notification_type -- Valid options: "generic" and "show_once"
user -- If specified, only display notification for this specific user.
Returns:
None
Examples:
>>> add_notification(level="warning", message="Nagios needs to reload")
"""
if not notification_id:
notification_id = str(message.__hash__())
if not notification_type:
notification_type = "generic"
notification = locals()
notifications[notification_id] = notification
def clear_notification(notification_id):
""" Clear one notification from adagios notification panel """
if notification_id in notifications:
del notifications[notification_id]
return "success"
return "not found"
def get_notifications(request):
""" Shows all current notifications """
result = []
for k in notifications.keys():
i = notifications[k]
if i.get('user') and i.get('user') != request.META.get('remote_user'):
continue # Skipt this message if it is meant for someone else
elif i.get('notification_type') == 'show_once':
del notifications[k]
pass
result.append(i)
return result
def clear_all_notifications():
""" Removes all notifications from adagios notification panel """
notifications.clear()
return "all notifications cleared"
def list_tasks():
"""
"""
result = []
for task in tasks:
current_task = {
'task_id': task.get_id(),
'task_status': task.status()
}
result.append(current_task)
return result
def get_task(task_id="someid"):
""" Return information about one specific background task """
for task in tasks:
if str(task.get_id) == str(task_id) or task_id:
current_task = {
'task_id': task.get_id(),
'task_status': task.status()
}
return current_task
raise KeyError(_("Task not '%s' Found") % task_id)
def get_user_preferences(request):
try:
user = userdata.User(request)
except Exception as e:
raise e
return user.to_dict()
def set_user_preference(request, **kwargs):
try:
user = userdata.User(request)
except Exception as e:
raise e
for (k, v) in kwargs.iteritems():
if not k.startswith('_'):
user.set_pref(k, v)
user.save()
########NEW FILE########
__FILENAME__ = tests
# -*- coding: utf-8 -*-
#
# Adagios is a web based Nagios configuration interface
#
# Copyright (C) 2014, Pall Sigurdsson <[email protected]>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from django.utils import unittest
from django.test.client import Client
import adagios.utils
import os
class FakeAdagiosEnvironment(unittest.TestCase):
""" Test the features of adagios.utils.FakeAdagiosEnvironment
"""
@classmethod
def setUpClass(cls):
cls.fake_adagios = adagios.utils.FakeAdagiosEnvironment()
@classmethod
def tearDownClass(cls):
cls.fake_adagios.terminate()
def testFakeAdagiosEnvironment(self):
fake_adagios = self.fake_adagios
# Make sure temporary environment gets created
fake_adagios.create_minimal_environment()
self.assertTrue(os.path.exists(fake_adagios.adagios_config_file))
# Make sure adagios.settings is updated
global_config_file = adagios.settings.adagios_configfile
fake_adagios.update_adagios_global_variables()
# Make sure adagios_config_file changed
self.assertTrue(adagios.settings.adagios_configfile != global_config_file)
# Make sure the new test is in the tempdir
self.assertTrue(adagios.settings.adagios_configfile.startswith(fake_adagios.tempdir))
# Make sure global variables are proparly restored
fake_adagios.restore_adagios_global_variables()
self.assertTrue(adagios.settings.adagios_configfile == global_config_file)
class MiscTestCase(unittest.TestCase):
def setUp(self):
self.environment = adagios.utils.FakeAdagiosEnvironment()
self.environment.create_minimal_environment()
self.environment.update_adagios_global_variables()
def tearDown(self):
self.environment.terminate()
def _testPageLoad(self, url):
c = Client()
response = c.get(url)
self.assertEqual(response.status_code, 200)
def TestPageLoads(self):
""" Smoke test views in /misc/
"""
self.loadPage("/misc/settings")
self.loadPage("/misc/preferences")
self.loadPage("/misc/nagios")
self.loadPage("/misc/settings")
self.loadPage("/misc/service")
self.loadPage("/misc/pnp4nagios")
self.loadPage("/misc/mail")
self.loadPage("/misc/images")
def loadPage(self, url):
""" Load one specific page, and assert if return code is not 200 """
try:
c = Client()
response = c.get(url)
self.assertEqual(response.status_code, 200, _("Expected status code 200 for page %s") % url)
except Exception, e:
self.assertEqual(True, _("Unhandled exception while loading %(url)s: %(e)s") % {'url': url, 'e': e})
def test_user_preferences(self):
c = Client()
response = c.post('/misc/preferences/',
{'theme': 'spacelab', 'language': 'fr'})
assert(response.status_code == 200)
assert('spacelab/style.css' in response.content)
assert('(fr)' in response.content)
def load_get(self, url):
c = Client()
response = c.get(url)
return response
def test_topmenu_highlight(self):
r = self.load_get('/status/')
assert '<li class="active">\n <a href="/status">' in r.content
def test_leftmenu_highlight(self):
r = self.load_get('/status/problems')
assert '<li class="active">\n <a href="/status/problems">' in r.content
def test_app_name(self):
from adagios import settings
settings.TOPMENU_HOME = 'Free beer'
r = self.load_get('/status')
assert 'Free beer' in r.content
########NEW FILE########
__FILENAME__ = urls
# Adagios is a web based Nagios configuration interface
#
# Copyright (C) 2014, Pall Sigurdsson <[email protected]>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from django.conf.urls.defaults import *
from django.conf import settings
urlpatterns = patterns('',
(r'^/test/?', 'adagios.misc.views.test'),
(r'^/paste/?', 'adagios.misc.views.paste'),
(r'^/?$', 'adagios.misc.views.index'),
(r'^/settings/?', 'adagios.misc.views.settings'),
(r'^/preferences/?', 'adagios.misc.views.preferences'),
(r'^/nagios/?', 'adagios.misc.views.nagios'),
(r'^/iframe/?', 'adagios.misc.views.iframe'),
(r'^/gitlog/?', 'adagios.misc.views.gitlog'),
(r'^/service/?', 'adagios.misc.views.nagios_service'),
(r'^/pnp4nagios/?$', 'adagios.misc.views.pnp4nagios'),
(r'^/pnp4nagios/edit(?P<filename>.+)$', 'adagios.misc.views.pnp4nagios_edit_template'),
(r'^/mail', 'adagios.misc.views.mail'),
url(r'^/images/(?P<path>.+)$', 'django.views.static.serve', {'document_root': '/usr/share/nagios3/htdocs/images/logos/'}, name="logo"),
(r'^/images/?$', 'adagios.misc.views.icons'),
)
########NEW FILE########
__FILENAME__ = views
# -*- coding: utf-8 -*-
#
# Adagios is a web based Nagios configuration interface
#
# Copyright (C) 2010, Pall Sigurdsson <[email protected]>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from django.core.context_processors import csrf
from django.forms.formsets import BaseFormSet
from django.shortcuts import render_to_response
from django.shortcuts import render
from django.utils.translation import ugettext as _
from django.shortcuts import HttpResponse
from django.template import RequestContext
from adagios.misc import forms
import os
import mimetypes
import pynag.Model
import pynag.Utils
import pynag.Control
import pynag.Model.EventHandlers
import pynag.Utils
import os.path
from time import mktime, sleep
from datetime import datetime
from os.path import dirname
from subprocess import Popen, PIPE
import adagios.settings
import adagios.objectbrowser
from adagios import __version__
import adagios.status.utils
from adagios import userdata
from collections import defaultdict
from adagios.views import adagios_decorator, error_page
state = defaultdict(lambda: "unknown")
state[0] = "ok"
state[1] = "warning"
state[2] = "critical"
@adagios_decorator
def index(request):
c = {}
c['nagios_cfg'] = pynag.Model.config.cfg_file
c['version'] = __version__
return render_to_response('frontpage.html', c, context_instance=RequestContext(request))
@adagios_decorator
def settings(request):
c = {}
c.update(csrf(request))
c['messages'] = m = []
c['errors'] = e = []
if request.method == 'GET':
form = forms.AdagiosSettingsForm(initial=request.GET)
form.is_valid()
elif request.method == 'POST':
form = forms.AdagiosSettingsForm(data=request.POST)
if form.is_valid():
try:
form.save()
m.append(_("%s successfully saved.") % form.adagios_configfile)
except IOError, exc:
e.append(exc)
else:
raise Exception(_("We only support methods GET or POST"))
c['form'] = form
return render_to_response('settings.html', c, context_instance=RequestContext(request))
@adagios_decorator
def nagios(request):
return iframe(request, adagios.settings.nagios_url)
@adagios_decorator
def iframe(request, url=None):
if not url:
url = request.GET.get('url', None)
return render_to_response('iframe.html', locals(), context_instance=RequestContext(request))
@adagios_decorator
def gitlog(request):
""" View that displays a nice log of previous git commits in dirname(config.cfg_file) """
c = {}
c.update(csrf(request))
c['messages'] = m = []
c['errors'] = []
# Get information about the committer
author_name = request.META.get('REMOTE_USER', 'anonymous')
try:
contact = pynag.Model.Contact.objects.get_by_shortname(author_name)
author_email = contact.email or None
except Exception:
author_email = None
nagiosdir = dirname(pynag.Model.config.cfg_file or None)
git = pynag.Utils.GitRepo(
directory=nagiosdir, author_name=author_name, author_email=author_email)
c['nagiosdir'] = nagiosdir
c['commits'] = []
if request.method == 'POST':
try:
if 'git_init' in request.POST:
git.init()
elif 'git_commit' in request.POST:
filelist = []
commit_message = request.POST.get(
'git_commit_message', _("bulk commit by adagios"))
for i in request.POST:
if i.startswith('commit_'):
filename = i[len('commit_'):]
git.add(filename)
filelist.append(filename)
if len(filelist) == 0:
raise Exception(_("No files selected."))
git.commit(message=commit_message, filelist=filelist)
m.append(_("%s files successfully commited.") % len(filelist))
except Exception, e:
c['errors'].append(e)
# Check if nagiosdir has a git repo or not
try:
c['uncommited_files'] = git.get_uncommited_files()
except pynag.Model.EventHandlers.EventHandlerError, e:
if e.errorcode == 128:
c['no_git_repo_found'] = True
# Show git history
try:
c['commits'] = git.log()
commit = request.GET.get('show', False)
if commit != False:
c['diff'] = git.show(commit)
difflines = []
for i in c['diff'].splitlines():
if i.startswith('---'):
tag = 'hide'
elif i.startswith('+++'):
tag = 'hide'
elif i.startswith('index'):
tag = 'hide'
elif i.startswith('-'):
tag = "alert-danger"
elif i.startswith('+'):
tag = "alert-success"
elif i.startswith('@@'):
tag = 'alert-unknown'
elif i.startswith('diff'):
tag = "filename"
else:
continue
difflines.append({'tag': tag, 'line': i})
c['difflines'] = difflines
c['commit_id'] = commit
except Exception, e:
c['errors'].append(e)
return render_to_response('gitlog.html', c, context_instance=RequestContext(request))
@adagios_decorator
def nagios_service(request):
""" View to restart / reload nagios service """
c = {}
c['errors'] = []
c['messages'] = []
nagios_bin = adagios.settings.nagios_binary
nagios_init = adagios.settings.nagios_init_script
nagios_cfg = adagios.settings.nagios_config
if request.method == 'GET':
form = forms.NagiosServiceForm(initial=request.GET)
else:
form = forms.NagiosServiceForm(data=request.POST)
if form.is_valid():
form.save()
c['stdout'] = form.stdout
c['stderr'] = form.stderr
c['command'] = form.command
for i in form.stdout.splitlines():
if i.strip().startswith('Error:'):
c['errors'].append(i)
c['form'] = form
service = pynag.Control.daemon(
nagios_bin=nagios_bin, nagios_cfg=nagios_cfg, nagios_init=nagios_init)
c['status'] = s = service.status()
if s == 0:
c['friendly_status'] = "running"
elif s == 1:
c['friendly_status'] = "not running"
else:
c['friendly_status'] = 'unknown (exit status %s)' % (s, )
needs_reload = pynag.Model.config.needs_reload()
c['needs_reload'] = needs_reload
return render_to_response('nagios_service.html', c, context_instance=RequestContext(request))
@adagios_decorator
def pnp4nagios(request):
""" View to handle integration with pnp4nagios """
c = {}
c['errors'] = e = []
c['messages'] = m = []
c['broker_module'] = forms.PNPBrokerModuleForm(initial=request.GET)
c['templates_form'] = forms.PNPTemplatesForm(initial=request.GET)
c['action_url'] = forms.PNPActionUrlForm(initial=request.GET)
c['pnp_templates'] = forms.PNPTemplatesForm(initial=request.GET)
try:
c['npcd_config'] = forms.PNPConfigForm(initial=request.GET)
except Exception, e:
c['errors'].append(e)
#c['interesting_objects'] = form.interesting_objects
if request.method == 'POST' and 'save_broker_module' in request.POST:
c['broker_module'] = broker_form = forms.PNPBrokerModuleForm(
data=request.POST)
if broker_form.is_valid():
broker_form.save()
m.append(_("Broker Module updated in nagios.cfg"))
elif request.method == 'POST' and 'save_action_url' in request.POST:
c['action_url'] = forms.PNPActionUrlForm(data=request.POST)
if c['action_url'].is_valid():
c['action_url'].save()
m.append(_('Action_url updated for %s services') %
c['action_url'].total_services)
if c['action_url'].error_services > 0:
e.append(
_("%s services could not be updated (check permissions?)") %
c['action_url'].error_services)
elif request.method == 'POST' and 'save_npcd_config' in request.POST:
c['npcd_config'] = forms.PNPConfigForm(data=request.POST)
if c['npcd_config'].is_valid():
c['npcd_config'].save()
m.append(_("npcd.cfg updated"))
return render_to_response('pnp4nagios.html', c, context_instance=RequestContext(request))
@adagios_decorator
def edit_file(request, filename):
""" This view gives raw read/write access to a given filename.
Please be so kind as not to give direct url access to this function, because it will allow
Editing of any file the webserver has access to.
"""
c = {}
c['messages'] = []
c['errors'] = []
try:
c['form'] = forms.EditFileForm(filename=filename, initial=request.GET)
c['filename'] = filename
if request.method == 'POST':
c['form'] = forms.EditFileForm(
filename=filename, data=request.POST)
if c['form'].is_valid():
c['form'].save()
except Exception, e:
c['errors'].append(e)
return render_to_response('editfile.html', c, context_instance=RequestContext(request))
@adagios_decorator
def edit_nagios_cfg(request):
""" Allows raw editing of nagios.cfg configfile
"""
return edit_file(request, filename=adagios.settings.nagios_config)
@adagios_decorator
def pnp4nagios_edit_template(request, filename):
""" Allows raw editing of a pnp4nagios template.
Will throw security exception if filename is not a pnp4nagios template
"""
form = forms.PNPTemplatesForm(initial=request.GET)
if filename in form.templates:
return edit_file(request, filename=filename)
else:
raise Exception(
_("Security violation. You are not allowed to edit %s") % filename)
@adagios_decorator
def icons(request, image_name=None):
""" Use this view to see nagios icons/logos
"""
c = {}
c['messages'] = []
c['errors'] = []
image_path = '/usr/share/nagios3/htdocs/images/logos/'
filenames = []
for root, subfolders, files in os.walk(image_path):
for filename in files:
filenames.append(os.path.join(root, filename))
# Cut image_path out of every filename
filenames = map(lambda x: x[len(image_path):], filenames)
# Filter out those silly .gd2 files that don't display inside a browser
filenames = filter(lambda x: not x.lower().endswith('.gd2'), filenames)
filenames.sort()
if not image_name:
# Return a list of images
c['images'] = filenames
return render_to_response('icons.html', c, context_instance=RequestContext(request))
else:
if image_name in filenames:
file_extension = image_name.split('.').pop()
mime_type = mimetypes.types_map.get(file_extension)
fsock = open("%s/%s" % (image_path, image_name,))
return HttpResponse(fsock, mimetype=mime_type)
else:
raise Exception(_("Not allowed to see this image"))
@adagios_decorator
def mail(request):
""" Send a notification email to one or more contacts regarding hosts or services """
c = {}
c['messages'] = []
c['errors'] = []
c.update(csrf(request))
c['http_referer'] = request.META.get("HTTP_REFERER")
c['http_origin'] = request.META.get("HTTP_ORIGIN")
remote_user = request.META.get('REMOTE_USER', 'anonymous adagios user')
hosts = []
services = []
if request.method == 'GET':
c['form'] = forms.SendEmailForm(remote_user, initial=request.GET)
hosts = request.GET.getlist('host') or request.GET.getlist('host[]')
services = request.GET.getlist(
'service') or request.GET.getlist('service[]')
if not services and not hosts:
c['form'].services = adagios.status.utils.get_services(
request, host_name='localhost')
elif request.method == 'POST':
c['form'] = forms.SendEmailForm(remote_user, data=request.POST)
services = request.POST.getlist('service') or request.POST.getlist('service[]')
hosts = request.POST.getlist('host') or request.POST.getlist('host[]')
c['acknowledged_or_not'] = request.POST.get('acknowledge_all_problems') == 'true'
for host_name in hosts:
host_object = adagios.status.utils.get_hosts(request, host_name=host_name)
if not host_object:
c['errors'].append(
_("Host %s not found. Maybe a typo or you do not have access to it.") % host_name
)
continue
for item in host_object:
item['host_name'] = item['name']
item['description'] = _("Host Status")
c['form'].status_objects.append(item)
c['form'].hosts.append(item)
for i in services:
try:
host_name, service_description = i.split('/', 1)
service = adagios.status.utils.get_services(request,
host_name=host_name,
service_description=service_description
)
if not service:
c['errors'].append(
_('Service "%s"" not found. Maybe a typo or you do not have access to it ?') % i)
for x in service:
c['form'].status_objects.append(x)
c['form'].services.append(x)
except AttributeError, e:
c['errors'].append(_("AttributeError for '%(i)s': %(e)s") % {'i': i, 'e': e})
except KeyError, e:
c['errors'].append(_("Error adding service '%(i)s': %(e)s") % {'i': i, 'e': e})
c['services'] = c['form'].services
c['hosts'] = c['form'].hosts
c['status_objects'] = c['form'].status_objects
c['form'].html_content = render(
request, "snippets/misc_mail_objectlist.html", c).content
if request.method == 'POST' and c['form'].is_valid():
c['form'].save()
return render_to_response('misc_mail.html', c, context_instance=RequestContext(request))
@adagios_decorator
def test(request):
""" Generic test view, use this as a sandbox if you like
"""
c = {}
c['messages'] = []
c.update(csrf(request))
# Get some test data
if request.method == 'POST':
c['form'] = forms.PluginOutputForm(data=request.POST)
if c['form'].is_valid():
c['form'].parse()
else:
c['form'] = forms.PluginOutputForm(initial=request.GET)
return render_to_response('test.html', c, context_instance=RequestContext(request))
@adagios_decorator
def paste(request):
""" Generic test view, use this as a sandbox if you like
"""
c = {}
c['messages'] = []
c.update(csrf(request))
# Get some test data
if request.method == 'POST':
c['form'] = forms.PasteForm(data=request.POST)
if c['form'].is_valid():
c['form'].parse()
else:
c['form'] = forms.PasteForm(initial=request.GET)
return render_to_response('test2.html', c, context_instance=RequestContext(request))
@adagios_decorator
def preferences(request):
c = {}
c['messages'] = []
c.update(csrf(request))
user = userdata.User(request)
if request.method == 'POST':
c['form'] = forms.UserdataForm(data=request.POST)
if c['form'].is_valid():
for k, v in c['form'].cleaned_data.iteritems():
user.set_pref(k, v)
user.save() # will save in json and trigger the hooks
c['messages'].append(_('Preferences have been saved.'))
else:
c['form'] = forms.UserdataForm(initial=user.to_dict())
return render_to_response('userdata.html', c, context_instance=RequestContext(request))
########NEW FILE########
__FILENAME__ = models
# Adagios is a web based Nagios configuration interface
#
# Copyright (C) 2014, Pall Sigurdsson <[email protected]>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from django.db import models
# Create your models here.
########NEW FILE########
__FILENAME__ = tests
# Adagios is a web based Nagios configuration interface
#
# Copyright (C) 2014, Pall Sigurdsson <[email protected]>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
This file demonstrates writing tests using the unittest module. These will pass
when you run "manage.py test".
Replace this with more appropriate tests for your application.
"""
from django.test import TestCase
class SimpleTest(TestCase):
def test_basic_addition(self):
"""
Tests that 1 + 1 always equals 2.
"""
self.assertEqual(1 + 1, 2)
########NEW FILE########
__FILENAME__ = urls
# Adagios is a web based Nagios configuration interface
#
# Copyright (C) 2014, Pall Sigurdsson <[email protected]>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from django.conf.urls.defaults import *
urlpatterns = patterns('adagios',
(r'^/?$', 'myapp.views.hello_world'),
(r'^/url1?$', 'myapp.views.hello_world'),
(r'^/url2?$', 'myapp.views.hello_world'),
)
########NEW FILE########
__FILENAME__ = views
# Adagios is a web based Nagios configuration interface
#
# Copyright (C) 2014, Pall Sigurdsson <[email protected]>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
# Create your views here.
from django.core.context_processors import csrf
from django.shortcuts import render_to_response
from django.shortcuts import HttpResponse
from django.shortcuts import RequestContext
def hello_world(request):
""" This is an example view. """
c = {}
return render_to_response("myapp_helloworld.html", c, context_instance=RequestContext(request))
########NEW FILE########
__FILENAME__ = forms
# -*- coding: utf-8 -*-
#
# Adagios is a web based Nagios configuration interface
#
# Copyright (C) 2010, Pall Sigurdsson <[email protected]>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from django import forms
from django.utils.safestring import mark_safe
from django.utils.encoding import smart_str
from django.utils.translation import ugettext as _
from pynag import Model
from pynag.Utils import AttributeList
from adagios.objectbrowser.help_text import object_definitions
from pynag.Model import ObjectDefinition
from adagios.forms import AdagiosForm
import adagios.misc.rest
# These fields are special, they are a comma seperated list, and may or
# may not have +/- in front of them.
MULTICHOICE_FIELDS = ('servicegroups', 'hostgroups', 'contacts',
'contact_groups', 'contactgroups', 'use', 'notification_options')
SERVICE_NOTIFICATION_OPTIONS = (
('w', 'warning'),
('c', 'critical'),
('r', 'recovery'),
('u', 'unreachable'),
('d', 'downtime'),
('f', 'flapping'),
)
HOST_NOTIFICATION_OPTIONS = (
('d', 'down'),
('u', 'unreachable'),
('r', 'recovery'),
('f', 'flapping'),
('s', 'scheduled_downtime')
)
BOOLEAN_CHOICES = (('', 'not set'), ('1', '1'), ('0', '0'))
class PynagChoiceField(forms.MultipleChoiceField):
""" multichoicefields that accepts comma seperated input as values """
def __init__(self, inline_help_text=_("Select some options"), *args, **kwargs):
self.__prefix = ''
self.data = kwargs.get('data')
super(PynagChoiceField, self).__init__(*args, **kwargs)
self.widget.attrs['data-placeholder'] = inline_help_text
def clean(self, value):
"""
Changes list into a comma separated string. Removes duplicates.
"""
if not value:
return "null"
tmp = []
for i in value:
if i not in tmp:
tmp.append(i)
value = self.__prefix + ','.join(tmp)
return value
def prepare_value(self, value):
"""
Takes a comma separated string, removes + if it is prefixed so. Returns a list
"""
if isinstance(value, str):
self.attributelist = AttributeList(value)
self.__prefix = self.attributelist.operator
return self.attributelist.fields
return value
class PynagRadioWidget(forms.widgets.HiddenInput):
""" Special Widget designed to make Nagios attributes with 0/1 values look like on/off buttons """
def render(self, name, value, attrs=None):
output = super(PynagRadioWidget, self).render(name, value, attrs)
one, zero, unset = "", "", ""
if value == "1":
one = "active"
elif value == "0":
zero = "active"
else:
unset = "active"
prefix = """
<div class="btn-group" data-toggle-name="%s" data-toggle="buttons-radio">
<button type="button" value="1" class="btn btn %s">On</button>
<button type="button" value="0" class="btn btn %s">Off</button>
<button type="button" value="" class="btn %s">Not set</button>
</div>
""" % (name, one, zero, unset)
output += prefix
return mark_safe(output)
class PynagForm(AdagiosForm):
def clean(self):
cleaned_data = super(PynagForm, self).clean()
for k, v in cleaned_data.items():
# change from unicode to str
v = cleaned_data[k] = smart_str(v)
# Empty string, or the string None, means remove the field
if v in ('', 'None'):
cleaned_data[k] = v = None
# Maintain operator (+,-, !) for multichoice fields
if k in MULTICHOICE_FIELDS and v and v != "null":
operator = AttributeList(self.pynag_object.get(k, '')).operator or ''
cleaned_data[k] = "%s%s" % (operator, v)
return cleaned_data
def save(self):
changed_keys = map(lambda x: smart_str(x), self.changed_data)
for k in changed_keys:
# Ignore fields that did not appear in the POST at all EXCEPT
# If it it a pynagchoicefield. That is because multichoicefield that
# does not appear in the post, means that the user removed every attribute
# in the multichoice field
if k not in self.data and not isinstance(self.fields.get(k, None), PynagChoiceField):
continue
value = self.cleaned_data[k]
# Sometimes attributes slide in changed_data without having
# been modified, lets ignore those
if self.pynag_object[k] == value:
continue
# Multichoice fields have a special restriction, sometimes they contain
# the same values as before but in a different order.
if k in MULTICHOICE_FIELDS:
original = AttributeList(self.pynag_object[k])
new = AttributeList(value)
if sorted(original.fields) == sorted(new.fields):
continue # If we reach here, it is save to modify our pynag object.
# Here we actually make a change to our pynag object
self.pynag_object[k] = value
# Additionally, update the field for the return form
self.fields[k] = self.get_pynagField(k, css_tag="defined")
self.fields[k].value = value
self.pynag_object.save()
adagios.misc.rest.add_notification(message=_("Object successfully saved"), level="success", notification_type="show_once")
def __init__(self, pynag_object, *args, **kwargs):
self.pynag_object = pynag_object
super(PynagForm, self).__init__(*args, **kwargs)
# Lets find out what attributes to create
object_type = pynag_object['object_type']
defined_attributes = sorted(
self.pynag_object._defined_attributes.keys())
inherited_attributes = sorted(
self.pynag_object._inherited_attributes.keys())
all_attributes = sorted(object_definitions.get(object_type).keys())
all_attributes += ['name', 'use', 'register']
# Special hack for macros
# If this is a post and any post data looks like a nagios macro
# We will generate a field for it on the fly
macros = filter(lambda x: x.startswith('$') and x.endswith('$'), self.data.keys())
for field_name in macros:
# if field_name.startswith('$ARG'):
# self.fields[field_name] = self.get_pynagField(field_name, css_tag='defined')
if object_type == 'service' and field_name.startswith('$_SERVICE'):
self.fields[field_name] = self.get_pynagField(
field_name, css_tag='defined')
elif object_type == 'host' and field_name.startswith('$_HOST'):
self.fields[field_name] = self.get_pynagField(
field_name, css_tag='defined')
# Calculate what attributes are "undefined"
self.undefined_attributes = []
for i in all_attributes:
if i in defined_attributes:
continue
if i in inherited_attributes:
continue
self.undefined_attributes.append(i)
# Find out which attributes to show
for field_name in defined_attributes:
self.fields[field_name] = self.get_pynagField(
field_name, css_tag='defined')
for field_name in inherited_attributes:
self.fields[field_name] = self.get_pynagField(
field_name, css_tag="inherited")
for field_name in self.undefined_attributes:
self.fields[field_name] = self.get_pynagField(
field_name, css_tag='undefined')
return
def get_pynagField(self, field_name, css_tag="", required=None):
""" Takes a given field_name and returns a forms.Field that is appropriate for this field
Arguments:
field_name -- Name of the field to add, example "host_name"
css_tag -- String will make its way as a css attribute in the resulting html
required -- If True, make field required. If None, let pynag decide
"""
# Lets figure out what type of field this is, default to charfield
object_type = self.pynag_object['object_type']
definitions = object_definitions.get(object_type) or {}
options = definitions.get(field_name) or {}
# Find out what type of field to create from the field_name.
# Lets assume charfield in the beginning
field = forms.CharField()
if False is True:
pass
elif field_name in ('contact_groups', 'contactgroups', 'contactgroup_members'):
all_groups = Model.Contactgroup.objects.filter(
contactgroup_name__contains="")
choices = sorted(
map(lambda x: (x.contactgroup_name, x.contactgroup_name), all_groups))
field = PynagChoiceField(
choices=choices, inline_help_text=_("No %(field_name)s selected") % {'field_name': field_name})
elif field_name == 'use':
all_objects = self.pynag_object.objects.filter(name__contains='')
choices = map(lambda x: (x.name, x.name), all_objects)
field = PynagChoiceField(
choices=sorted(choices), inline_help_text=_("No %s selected") % {'field_name': field_name})
elif field_name in ('servicegroups', 'servicegroup_members'):
all_groups = Model.Servicegroup.objects.filter(
servicegroup_name__contains='')
choices = map(
lambda x: (x.servicegroup_name, x.servicegroup_name), all_groups)
field = PynagChoiceField(
choices=sorted(choices), inline_help_text=_("No %(field_name)s selected") % {'field_name': field_name})
elif field_name in ('hostgroups', 'hostgroup_members', 'hostgroup_name') and object_type != 'hostgroup':
all_groups = Model.Hostgroup.objects.filter(
hostgroup_name__contains='')
choices = map(
lambda x: (x.hostgroup_name, x.hostgroup_name), all_groups)
field = PynagChoiceField(
choices=sorted(choices), inline_help_text=_("No %(field_name)s selected") % {'field_name': field_name})
elif field_name == 'members' and object_type == 'hostgroup':
all_groups = Model.Host.objects.filter(host_name__contains='')
choices = map(lambda x: (x.host_name, x.host_name), all_groups)
field = PynagChoiceField(
choices=sorted(choices), inline_help_text=_("No %(field_name)s selected") % {'field_name': field_name})
elif field_name == 'host_name' and object_type == 'service':
all_groups = Model.Host.objects.filter(host_name__contains='')
choices = map(lambda x: (x.host_name, x.host_name), all_groups)
field = PynagChoiceField(
choices=sorted(choices), inline_help_text=_("No %(field_name)s selected") % {'field_name': field_name})
elif field_name in ('contacts', 'members'):
all_objects = Model.Contact.objects.filter(
contact_name__contains='')
choices = map(
lambda x: (x.contact_name, x.contact_name), all_objects)
field = PynagChoiceField(
choices=sorted(choices), inline_help_text=_("No %s selected") % {'field_name': field_name})
elif field_name.endswith('_period'):
all_objects = Model.Timeperiod.objects.filter(
timeperiod_name__contains='')
choices = [('', '')] + map(
lambda x: (x.timeperiod_name, x.timeperiod_name), all_objects)
field = forms.ChoiceField(choices=sorted(choices))
elif field_name.endswith('notification_commands'):
all_objects = Model.Command.objects.filter(
command_name__contains='')
choices = [('', '')] + map(
lambda x: (x.command_name, x.command_name), all_objects)
field = PynagChoiceField(choices=sorted(choices))
# elif field_name == 'check_command':
# all_objects = Model.Command.objects.all
# choices = [('','')] + map(lambda x: (x.command_name, x.command_name), all_objects)
# field = forms.ChoiceField(choices=sorted(choices))
elif field_name.endswith('notification_options') and self.pynag_object.object_type == 'host':
field = PynagChoiceField(
choices=HOST_NOTIFICATION_OPTIONS, inline_help_text=_("No %(field_name)s selected") % {'field_name': field_name})
elif field_name.endswith('notification_options') and self.pynag_object.object_type == 'service':
field = PynagChoiceField(
choices=SERVICE_NOTIFICATION_OPTIONS, inline_help_text=_("No %(field_name)s selected") % {'field_name': field_name})
elif options.get('value') == '[0/1]':
field = forms.CharField(widget=PynagRadioWidget)
# Lets see if there is any help text available for our field
if field_name in object_definitions[object_type]:
help_text = object_definitions[object_type][field_name].get(
'help_text', _("No help available for this item"))
field.help_text = help_text
# No prettyprint for macros
if field_name.startswith('_'):
field.label = field_name
# If any CSS tag was given, add it to the widget
self.add_css_tag(field=field, css_tag=css_tag)
if 'required' in options:
self.add_css_tag(field=field, css_tag=options['required'])
field.required = options['required'] == 'required'
else:
field.required = False
# At the moment, our database of required objects is incorrect
# So if caller did not specify if field is required, we will not
# make it required
if required is None:
field.required = False
else:
field.required = required
# Put inherited value in the placeholder
inherited_value = self.pynag_object._inherited_attributes.get(
field_name)
if inherited_value is not None:
self.add_placeholder(
field, _('%(inherited_value)s (inherited from template)') % {'inherited_value': inherited_value})
if field_name in MULTICHOICE_FIELDS:
self.add_css_tag(field=field, css_tag="multichoice")
return field
def add_css_tag(self, field, css_tag):
""" Add a CSS tag to the widget of a specific field """
if not 'class' in field.widget.attrs:
field.widget.attrs['class'] = ''
field.css_tag = ''
field.widget.attrs['class'] += " " + css_tag
field.css_tag += " " + css_tag
def add_placeholder(self, field, placeholder=_("Insert some value here")):
field.widget.attrs['placeholder'] = placeholder
field.placeholder = placeholder
class AdvancedEditForm(AdagiosForm):
""" A form for pynag.Model.Objectdefinition
This form will display a charfield for every attribute of the objectdefinition
"Every" attribute means:
* Every defined attribute
* Every inherited attribute
* Every attribute that is defined in nagios object definition html
"""
register = forms.CharField(
required=False, help_text=_("Set to 1 if you want this object enabled."))
name = forms.CharField(required=False, label=_("Generic Name"),
help_text=_("This name is used if you want other objects to inherit (with the use attribute) what you have defined here."))
use = forms.CharField(required=False, label=_("Use"),
help_text=_("Inherit all settings from another object"))
__prefix = "advanced" # This prefix will go on every field
def save(self):
for k in self.changed_data:
# change from unicode to str
value = smart_str(self.cleaned_data[k])
# same as original, lets ignore that
if self.pynag_object[k] == value:
continue
if value == '':
value = None
# If we reach here, it is save to modify our pynag object.
self.pynag_object[k] = value
self.pynag_object.save()
def clean(self):
cleaned_data = super(AdvancedEditForm, self).clean()
for k, v in cleaned_data.items():
# change from unicode to str
cleaned_data[k] = smart_str(v)
return cleaned_data
def __init__(self, pynag_object, *args, **kwargs):
self.pynag_object = pynag_object
super(AdvancedEditForm, self).__init__(
*args, prefix=self.__prefix, **kwargs)
# Lets find out what attributes to create
object_type = pynag_object['object_type']
all_attributes = sorted(object_definitions.get(object_type).keys())
for field_name in self.pynag_object.keys() + all_attributes:
if field_name == 'meta':
continue
help_text = ""
if field_name in object_definitions[object_type]:
help_text = object_definitions[object_type][field_name].get(
'help_text', _("No help available for this item"))
self.fields[field_name] = forms.CharField(
required=False, label=field_name, help_text=help_text)
self.fields.keyOrder = sorted(self.fields.keys())
class GeekEditObjectForm(AdagiosForm):
definition = forms.CharField(
widget=forms.Textarea(attrs={'wrap': 'off', 'cols': '80'}))
def __init__(self, pynag_object=None, *args, **kwargs):
self.pynag_object = pynag_object
super(GeekEditObjectForm, self).__init__(*args, **kwargs)
def clean_definition(self, value=None):
definition = smart_str(self.cleaned_data['definition'])
definition = definition.replace('\r\n', '\n')
definition = definition.replace('\r', '\n')
if not definition.endswith('\n'):
definition += '\n'
return definition
def save(self):
definition = self.cleaned_data['definition']
self.pynag_object.rewrite(str_new_definition=definition)
class DeleteObjectForm(AdagiosForm):
""" Form used to handle deletion of one single object """
def __init__(self, pynag_object, *args, **kwargs):
self.pynag_object = pynag_object
super(DeleteObjectForm, self).__init__(*args, **kwargs)
if self.pynag_object.object_type == 'host':
recursive = forms.BooleanField(
required=False, initial=True, label=_("Delete Services"),
help_text=_("Check this box if you also want to delete all services of this host"))
self.fields['recursive'] = recursive
def delete(self):
""" Deletes self.pynag_object. """
recursive = False
if 'recursive' in self.cleaned_data and self.cleaned_data['recursive'] is True:
recursive = True
self.pynag_object.delete(recursive)
class CopyObjectForm(AdagiosForm):
""" Form to assist a user to copy a single object definition
"""
def __init__(self, pynag_object, *args, **kwargs):
self.pynag_object = pynag_object
super(CopyObjectForm, self).__init__(*args, **kwargs)
object_type = pynag_object['object_type']
# For templates we assume the new copy will have its generic name changed
# otherwise we display different field depending on what type of an
# object it is
if pynag_object['register'] == '0':
if pynag_object.name is None:
new_generic_name = "%s-copy" % pynag_object.get_description()
else:
new_generic_name = '%s-copy' % pynag_object.name
self.fields['name'] = forms.CharField(
initial=new_generic_name, help_text=_("Select a new generic name for this %(object_type)s") % {'object_type': object_type})
elif object_type == 'host':
new_host_name = "%s-copy" % pynag_object.get_description()
self.fields['host_name'] = forms.CharField(
help_text=_("Select a new host name for this host"), initial=new_host_name)
self.fields['address'] = forms.CharField(
help_text=_("Select a new ip address for this host"))
self.fields['recursive'] = forms.BooleanField(
required=False, label="Copy Services", help_text=_("Check this box if you also want to copy all services of this host."))
elif object_type == 'service':
service_description = "%s-copy" % pynag_object.service_description
self.fields['host_name'] = forms.CharField(
help_text=_("Select a new host name for this service"), initial=pynag_object.host_name)
self.fields['service_description'] = forms.CharField(
help_text=_("Select new service description for this service"), initial=service_description)
else:
field_name = "%s_name" % object_type
initial = "%s-copy" % pynag_object[field_name]
help_text = object_definitions[
object_type][field_name].get('help_text')
if help_text == '':
help_text = _("Please specify a new %(field_name)s") % {'field_name': field_name}
self.fields[field_name] = forms.CharField(
initial=initial, help_text=help_text)
def save(self):
# If copy() returns a single object, lets transform it into a list
tmp = self.pynag_object.copy(**self.cleaned_data)
if not type(tmp) == type([]):
tmp = [tmp]
self.copied_objects = tmp
def _clean_shortname(self):
""" Make sure shortname of a particular object does not exist.
Raise validation error if shortname is found
"""
object_type = self.pynag_object.object_type
field_name = "%s_name" % object_type
value = smart_str(self.cleaned_data[field_name])
try:
self.pynag_object.objects.get_by_shortname(value)
raise forms.ValidationError(
_("A %(object_type)s with %(field_name)s='%(value)s' already exists.") % {'object_type': object_type,
'field_name': field_name,
'value': value,
})
except KeyError:
return value
def clean_host_name(self):
if self.pynag_object.object_type == 'service':
return smart_str(self.cleaned_data['host_name'])
return self._clean_shortname()
def clean_timeperiod_name(self):
return self._clean_shortname()
def clean_command_name(self):
return self._clean_shortname()
def clean_contactgroup_name(self):
return self._clean_shortname()
def clean_hostgroup_name(self):
return self._clean_shortname()
def clean_servicegroup_name(self):
return self._clean_shortname()
def clean_contact_name(self):
return self._clean_shortname()
class BaseBulkForm(AdagiosForm):
""" To make changes to multiple objects at once
* any POST data that has the name change_<OBJECTID> will be fetched
and the ObjectDefinition saved in self.changed_objects
* any POST data that has the name hidden_<OBJECTID> will be fetched
and the ObjectDefinition saved in self.all_objects
"""
def __init__(self, objects=None, *args, **kwargs):
self.objects = []
self.all_objects = []
self.changed_objects = []
if not objects:
objects = []
forms.Form.__init__(self, *args, **kwargs)
for k, v in self.data.items():
if k.startswith('hidden_'):
obj = Model.ObjectDefinition.objects.get_by_id(v)
if obj not in self.all_objects:
self.all_objects.append(obj)
if k.startswith('change_'):
object_id = k[len("change_"):]
obj = Model.ObjectDefinition.objects.get_by_id(object_id)
if obj not in self.changed_objects:
self.changed_objects.append(obj)
if obj not in self.all_objects:
self.all_objects.append(obj)
def clean(self):
#self.cleaned_data = {}
for k, v in self.data.items():
if k.startswith('hidden_'):
self.cleaned_data[k] = v
obj = Model.ObjectDefinition.objects.get_by_id(v)
if obj not in self.all_objects:
self.all_objects.append(obj)
if k.startswith('change_'):
self.cleaned_data[k] = v
object_id = k[len("change_"):]
obj = Model.ObjectDefinition.objects.get_by_id(object_id)
if obj not in self.changed_objects:
self.changed_objects.append(obj)
for k, v in self.cleaned_data.items():
self.cleaned_data[k] = smart_str(self.cleaned_data[k])
return self.cleaned_data
class BulkEditForm(BaseBulkForm):
attribute_name = forms.CharField()
new_value = forms.CharField()
def save(self):
for i in self.changed_objects:
key = self.cleaned_data['attribute_name']
value = self.cleaned_data['new_value']
i[key] = value
i.save()
class BulkCopyForm(BaseBulkForm):
attribute_name = forms.CharField()
new_value = forms.CharField()
def __init__(self, *args, **kwargs):
BaseBulkForm.__init__(self, *args, **kwargs)
self.fields['attribute_name'].value = "test 2"
# Lets take a look at the first item to be copied and suggest a field
# name to change
def save(self):
for i in self.changed_objects:
key = self.cleaned_data['attribute_name']
value = self.cleaned_data['new_value']
kwargs = {key: value}
i.copy(**kwargs)
class BulkDeleteForm(BaseBulkForm):
""" Form used to delete multiple objects at once """
yes_i_am_sure = forms.BooleanField(label=_("Yes, I am sure"))
def delete(self):
""" Deletes every object in the form """
for i in self.changed_objects:
if i.object_type == 'host':
recursive = True
else:
recursive = False
i.delete(recursive=recursive)
class CheckCommandForm(PynagForm):
def __init__(self, *args, **kwargs):
super(AdagiosForm, self).__init__(*args, **kwargs)
self.pynag_object = Model.Service()
self.fields['host_name'] = self.get_pynagField('host_name')
self.fields['service_description'] = self.get_pynagField(
'service_description')
self.fields['check_command'] = self.get_pynagField('check_command')
choices_for_all_types = sorted(
map(lambda x: (x, x), Model.string_to_class.keys()))
class AddTemplateForm(PynagForm):
""" Use this form to add one template """
object_type = forms.ChoiceField(choices=choices_for_all_types)
name = forms.CharField(max_length=100)
def __init__(self, *args, **kwargs):
super(PynagForm, self).__init__(*args, **kwargs)
def clean(self):
cleaned_data = super(AddTemplateForm, self).clean()
if "object_type" not in cleaned_data:
raise forms.ValidationError(_('Object type is required'))
object_type = cleaned_data['object_type']
name = cleaned_data['name']
if object_type not in Model.string_to_class:
raise forms.ValidationError(
_("We dont know nothing about how to add a '%(object_type)s'") % {'object_type': object_type})
objectdefinition = Model.string_to_class.get(object_type)
# Check if name already exists
try:
objectdefinition.objects.get_by_name(name)
raise forms.ValidationError(
_("A %(object_type)s with name='%(name)s' already exists.") % {'object_type': object_type,
'name': name,
})
except KeyError:
pass
self.pynag_object = objectdefinition()
self.pynag_object['register'] = "0"
return cleaned_data
class AddObjectForm(PynagForm):
def __init__(self, object_type, initial=None, *args, **kwargs):
self.pynag_object = Model.string_to_class.get(object_type)()
super(AdagiosForm, self).__init__(*args, **kwargs)
# Some object types we will suggest a template:
if object_type in ('host', 'contact', 'service'):
self.fields['use'] = self.get_pynagField('use')
self.fields['use'].initial = str('generic-%s' % object_type)
self.fields['use'].help_text = _("Inherit attributes from this template")
if object_type == 'host':
self.fields['host_name'] = self.get_pynagField('host_name', required=True)
self.fields['address'] = self.get_pynagField('address', required=True)
self.fields['alias'] = self.get_pynagField('alias', required=False)
elif object_type == 'service':
self.fields['service_description'] = self.get_pynagField('service_description', required=True)
self.fields['host_name'] = self.get_pynagField('host_name', required=False)
self.fields['host_name'].help_text = _('Tell us which host this service check will be applied to')
self.fields['hostgroup_name'] = self.get_pynagField('hostgroup_name', required=False)
self.fields['hostgroup_name'].help_text = _("If you specify any hostgroups, this service will be applied to all hosts in that hostgroup")
else:
field_name = "%s_name" % object_type
self.fields[field_name] = self.get_pynagField(
field_name, required=True)
# For some reason calling super()__init__() with initial as a parameter
# will not work on PynagChoiceFields. This forces initial value to be set:
initial = initial or {}
for field_name, field in self.fields.items():
initial_value = initial.get(field_name, None)
if initial_value:
field.initial = str(initial_value)
def clean(self):
cleaned_data = super(AddObjectForm, self).clean()
if self.pynag_object.object_type == 'service':
host_name = cleaned_data.get('host_name')
hostgroup_name = cleaned_data.get('hostgroup_name')
if host_name in (None, 'None', '') and hostgroup_name in (None, 'None', ''):
raise forms.ValidationError(_("Please specify either hostgroup_name or host_name"))
return cleaned_data
def clean_timeperiod_name(self):
return self._clean_shortname()
def clean_command_name(self):
return self._clean_shortname()
def clean_contactgroup_name(self):
return self._clean_shortname()
def clean_servicegroup_name(self):
return self._clean_shortname()
def clean_contact_name(self):
return self._clean_shortname()
def clean_host_name(self):
if self.pynag_object.object_type == 'service':
value = self.cleaned_data['host_name']
if not value or value == 'null':
return None
hosts = value.split(',')
for i in hosts:
existing_hosts = Model.Host.objects.filter(host_name=i)
if not existing_hosts:
raise forms.ValidationError(
_("Could not find host called '%(i)s'") % {'i': i})
return smart_str(self.cleaned_data['host_name'])
return self._clean_shortname()
def clean_hostgroup_name(self):
if self.pynag_object.object_type == 'service':
value = self.cleaned_data['hostgroup_name']
if value in (None, '', 'null'):
return None
groups = value.split(',')
for i in groups:
existing_hostgroups = Model.Hostgroup.objects.filter(hostgroup_name=i)
if not existing_hostgroups:
raise forms.ValidationError(
_("Could not find hostgroup called '%(i)s'") % {'i': i})
return smart_str(self.cleaned_data['hostgroup_name'])
return self._clean_shortname()
def _clean_shortname(self):
""" Make sure shortname of a particular object does not exist.
Raise validation error if shortname is found
"""
object_type = self.pynag_object.object_type
field_name = "%s_name" % object_type
value = smart_str(self.cleaned_data[field_name])
try:
self.pynag_object.objects.get_by_shortname(value)
raise forms.ValidationError(
_("A %(object_type)s with %(field_name)s='%(value)s' already exists.") % {'object_type': object_type,
'field_name': field_name,
'value': value,
})
except KeyError:
return value
########NEW FILE########
__FILENAME__ = help_text
# -*- coding: utf-8 -*-
#
# Adagios is a web based Nagios configuration interface
#
# Copyright (C) 2010, Pall Sigurdsson <[email protected]>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
""" objectbrowser/all_attributes.py
This is an extends of pynag's all_attributes with friendly help message for all attributes.
"""
from pynag.Model.all_attributes import object_definitions
from django.utils.translation import ugettext as _
object_definitions["any"]["use"][
"help_text"] = _("Specifies which object to inherit settings from")
object_definitions["any"]["register"][
"help_text"] = _("Specifies if object is active (registered) or not")
object_definitions["any"]["name"][
"help_text"] = _("Generic name of this objects. Only used for templates.")
object_definitions["host"]["host_name"]["help_text"] = _("e.g. web01.example.com")
object_definitions["host"]["alias"]["help_text"] = _("e.g. My Monitored Host")
object_definitions["host"]["display_name"]["help_text"] = _(" ")
object_definitions["host"]["address"]["help_text"] = _("e.g. 127.0.0.1")
object_definitions["host"]["parents"][
"help_text"] = _("Network parents of this host. No notification will be sent if parent is down.")
object_definitions["host"]["hostgroups"][
"help_text"] = _("Which hostgroups this host belongs to")
object_definitions["host"]["check_command"][
"help_text"] = _("Command to execute when this object is checked")
object_definitions["host"]["initial_state"][
"help_text"] = _('By default Nagios will assume that all hosts are in UP states when it starts. You can override the initial state for a host by using this directive. Valid options are: o = UP, d = DOWN, and u = UNREACHABLE.')
object_definitions["host"]["max_check_attempts"][
"help_text"] = _("How many failures do occur before notifications will be sent")
object_definitions["host"]["check_interval"][
"help_text"] = _("How many minutes to wait between checks")
object_definitions["host"]["retry_interval"][
"help_text"] = _("How many minutes to wait between checks when object goes to warning or critical state")
object_definitions["host"]["active_checks_enabled"][
"help_text"] = _("Whether Nagios actively checks this host")
object_definitions["host"]["passive_checks_enabled"][
"help_text"] = _("Whether Nagios passively accepts check results from an external source")
object_definitions["host"]["check_period"][
"help_text"] = _("When nagios checks for this host")
object_definitions["host"]["obsess_over_host"][
"help_text"] = _('This directive determines whether or not checks for the host will be "obsessed" over using the ochp_command.')
object_definitions["host"]["check_freshness"]["help_text"] = _(" ")
object_definitions["host"]["freshness_threshold"]["help_text"] = _(" ")
object_definitions["host"]["event_handler"]["help_text"] = _(" ")
object_definitions["host"]["event_handler_enabled"]["help_text"] = _(" ")
object_definitions["host"]["low_flap_threshold"]["help_text"] = _(" ")
object_definitions["host"]["high_flap_threshold"]["help_text"] = _(" ")
object_definitions["host"]["flap_detection_enabled"]["help_text"] = _(" ")
object_definitions["host"]["flap_detection_options"]["help_text"] = _(" ")
object_definitions["host"]["process_perf_data"]["help_text"] = _(" ")
object_definitions["host"]["retain_status_information"]["help_text"] = _(" ")
object_definitions["host"]["retain_nonstatus_information"]["help_text"] = _(" ")
object_definitions["host"]["contacts"]["help_text"] = _(" ")
object_definitions["host"]["contact_groups"]["help_text"] = _(" ")
object_definitions["host"]["notification_interval"]["help_text"] = _(" ")
object_definitions["host"]["first_notification_delay"]["help_text"] = _(" ")
object_definitions["host"]["notification_period"]["help_text"] = _(" ")
object_definitions["host"]["notification_options"]["help_text"] = _(" ")
object_definitions["host"]["notifications_enabled"]["help_text"] = _(" ")
object_definitions["host"]["stalking_options"]["help_text"] = _(" ")
object_definitions["host"]["notes"]["help_text"] = _(" ")
object_definitions["host"]["notes_url"]["help_text"] = _(" ")
object_definitions["host"]["action_url"]["help_text"] = _(" ")
object_definitions["host"]["icon_image"]["help_text"] = _(" ")
object_definitions["host"]["icon_image_alt"]["help_text"] = _(" ")
object_definitions["host"]["vrml_image"]["help_text"] = _(" ")
object_definitions["host"]["statusmap_image"]["help_text"] = _(" ")
object_definitions["host"]["2d_coords"]["help_text"] = _(" ")
object_definitions["host"]["3d_coords"]["help_text"] = _(" ")
object_definitions["hostgroup"]["hostgroup_name"][
"help_text"] = _("Unique name for this hostgroup (e.g. webservers)")
object_definitions["hostgroup"]["alias"][
"help_text"] = _("Human friendly name (e.g. My Web Servers)")
object_definitions["hostgroup"]["members"][
"help_text"] = _("List of hosts that belong to this group")
object_definitions["hostgroup"]["hostgroup_members"][
"help_text"] = _("List of hostgroups that belong to this group")
object_definitions["hostgroup"]["notes"][
"help_text"] = _("You can put your custom notes here for your hostgroup")
object_definitions["hostgroup"]["notes_url"][
"help_text"] = _("Type in an url for example to a documentation site for this hostgroup")
object_definitions["hostgroup"]["action_url"]["help_text"] = _(" ")
object_definitions["service"]["host_name"][
"help_text"] = _("e.g. web01.example.com")
object_definitions["service"]["hostgroup_name"][
"help_text"] = _("Hostgroup this service belongs to")
object_definitions["service"]["service_description"][
"help_text"] = _("e.g. 'Disk Status'")
object_definitions["service"]["display_name"]["help_text"] = _(" ")
object_definitions["service"]["servicegroups"][
"help_text"] = _("Servicegroups that this service belongs to")
object_definitions["service"]["is_volatile"]["help_text"] = _(" ")
object_definitions["service"]["check_command"][
"help_text"] = _("Command that is executed when this service is checked")
object_definitions["service"]["initial_state"]["help_text"] = _(" ")
object_definitions["service"]["max_check_attempts"][
"help_text"] = _("How many times to try before failure notifications are sent out")
object_definitions["service"]["check_interval"][
"help_text"] = _("How many minutes to wait between checks")
object_definitions["service"]["retry_interval"][
"help_text"] = _("How many minutes to wait between checks when failure occurs")
object_definitions["service"]["active_checks_enabled"][
"help_text"] = _("Enable if you want nagios to actively check this service")
object_definitions["service"]["passive_checks_enabled"][
"help_text"] = _("Enable if you want nagios to passively accept check results from an external source")
object_definitions["service"]["check_period"][
"help_text"] = _("Period which this service is checked.")
object_definitions["service"]["obsess_over_service"]["help_text"] = _(" ")
object_definitions["service"]["check_freshness"]["help_text"] = _(" ")
object_definitions["service"]["freshness_threshold"]["help_text"] = _(" ")
object_definitions["service"]["event_handler"]["help_text"] = _(" ")
object_definitions["service"]["event_handler_enabled"]["help_text"] = _(" ")
object_definitions["service"]["low_flap_threshold"]["help_text"] = _(" ")
object_definitions["service"]["high_flap_threshold"]["help_text"] = _(" ")
object_definitions["service"]["flap_detection_enabled"]["help_text"] = _(" ")
object_definitions["service"]["flap_detection_options"]["help_text"] = _(" ")
object_definitions["service"]["process_perf_data"]["help_text"] = _(" ")
object_definitions["service"]["retain_status_information"]["help_text"] = _(" ")
object_definitions["service"]["retain_nonstatus_information"]["help_text"] = _(" ")
object_definitions["service"]["notification_interval"]["help_text"] = _(" ")
object_definitions["service"]["first_notification_delay"]["help_text"] = _(" ")
object_definitions["service"]["notification_period"][
"help_text"] = _("Period which notifications are sent out for this service")
object_definitions["service"]["notification_options"]["help_text"] = _(" ")
object_definitions["service"]["notifications_enabled"]["help_text"] = _(" ")
object_definitions["service"]["contacts"][
"help_text"] = _("Which contacts to notify if service fails")
object_definitions["service"]["contact_groups"][
"help_text"] = _("Which contactgroups to send notifications to if service fails")
object_definitions["service"]["stalking_options"]["help_text"] = _(" ")
object_definitions["service"]["notes"]["help_text"] = _(" ")
object_definitions["service"]["notes_url"]["help_text"] = _(" ")
object_definitions["service"]["action_url"]["help_text"] = _(" ")
object_definitions["service"]["icon_image"]["help_text"] = _(" ")
object_definitions["service"]["icon_image_alt"]["help_text"] = _(" ")
object_definitions["servicegroup"]["servicegroup_name"][
"help_text"] = _("Unique name for this service group")
object_definitions["servicegroup"]["alias"][
"help_text"] = _("Human friendly name for this servicegroup")
object_definitions["servicegroup"]["members"][
"help_text"] = _("List of services that belong to this group (Example: localhost,CPU Utilization,localhost,Disk Usage)")
object_definitions["servicegroup"]["servicegroup_members"][
"help_text"] = _("Servicegroups that are members of this servicegroup")
object_definitions["servicegroup"]["notes"][
"help_text"] = _("Arbitrary notes or description of this servicegroup")
object_definitions["servicegroup"]["notes_url"][
"help_text"] = _("Arbitrary url to a site of your choice")
object_definitions["servicegroup"]["action_url"][
"help_text"] = _("Arbitrary url to a site of your choice")
object_definitions["contact"]["contact_name"][
"help_text"] = _("Unique name for this contact (e.g. [email protected])")
object_definitions["contact"]["alias"][
"help_text"] = _("Human Friendly Name for this contact (e.g. Full Name)")
object_definitions["contact"]["contactgroups"][
"help_text"] = _("List of groups that this contact is a member of.")
object_definitions["contact"]["host_notifications_enabled"][
"help_text"] = _("If this contact will receive host notifications.")
object_definitions["contact"]["service_notifications_enabled"][
"help_text"] = _("If this contact will receive service notifications.")
object_definitions["contact"]["host_notification_period"][
"help_text"] = _("When will this contact receive host notifications")
object_definitions["contact"]["service_notification_period"][
"help_text"] = _("When will this contact receive service notifications")
object_definitions["contact"]["host_notification_options"][
"help_text"] = _("Which host notifications this contact will receive")
object_definitions["contact"]["service_notification_options"][
"help_text"] = _("Which service notifications this contact will receive")
object_definitions["contact"]["host_notification_commands"][
"help_text"] = _("What command will be used to send host notifications to this contact")
object_definitions["contact"]["service_notification_commands"][
"help_text"] = _("What command will be used to send service notifications to this contact")
object_definitions["contact"]["email"][
"help_text"] = _("E-mail address of this contact")
object_definitions["contact"]["pager"][
"help_text"] = _("Pager number of this contact")
object_definitions["contact"]["address"][
"help_text"] = _("Address of this contact")
object_definitions["contact"]["can_submit_commands"][
"help_text"] = _("If this contact is able to submit commands to nagios command pipe")
object_definitions["contact"]["retain_status_information"]["help_text"] = _(" ")
object_definitions["contact"]["retain_nonstatus_information"]["help_text"] = _(" ")
object_definitions["contactgroup"]["contactgroup_name"][
"help_text"] = _("Unique name for this contact group (e.g. 'webservers')")
object_definitions["contactgroup"]["alias"][
"help_text"] = _("Human Friendly Name (e.g. 'My Web Servers')")
object_definitions["contactgroup"]["members"][
"help_text"] = _("Every Contact listed here will be a member of this contactgroup")
object_definitions["contactgroup"]["contactgroup_members"][
"help_text"] = _("Every Contactgroup listed here will be a member of this contactgroup")
object_definitions["timeperiod"]["timeperiod_name"][
"help_text"] = _("Unique name for this timeperiod (.e.g. 'workhours')")
object_definitions["timeperiod"]["alias"][
"help_text"] = _("Human Friendly name for this timeperiod")
object_definitions["timeperiod"]["[weekday]"]["help_text"] = _(" ")
object_definitions["timeperiod"]["[exception]"]["help_text"] = _(" ")
object_definitions["timeperiod"]["exclude"]["help_text"] = _(" ")
object_definitions["command"]["command_name"][
"help_text"] = _("Unique name for this command")
object_definitions["command"]["command_line"][
"help_text"] = _("Command line of the command that will be executed")
object_definitions["servicedependency"][
"dependent_host_name"]["help_text"] = _(" ")
object_definitions["servicedependency"][
"dependent_hostgroup_name"]["help_text"] = _(" ")
object_definitions["servicedependency"][
"dependent_service_description"]["help_text"] = _(" ")
object_definitions["servicedependency"]["host_name"]["help_text"] = _(" ")
object_definitions["servicedependency"]["hostgroup_name"]["help_text"] = _(" ")
object_definitions["servicedependency"][
"service_description"]["help_text"] = _(" ")
object_definitions["servicedependency"]["inherits_parent"]["help_text"] = _(" ")
object_definitions["servicedependency"][
"execution_failure_criteria"]["help_text"] = _(" ")
object_definitions["servicedependency"][
"notification_failure_criteria"]["help_text"] = _(" ")
object_definitions["servicedependency"]["dependency_period"]["help_text"] = _(" ")
object_definitions["serviceescalation"]["help_text"] = _(" ")
object_definitions["serviceescalation"]["host_name"]["help_text"] = _(" ")
object_definitions["serviceescalation"]["hostgroup_name"]["help_text"] = _(" ")
object_definitions["serviceescalation"][
"service_description"]["help_text"] = _(" ")
object_definitions["serviceescalation"]["contacts"]["help_text"] = _(" ")
object_definitions["serviceescalation"]["contact_groups"]["help_text"] = _(" ")
object_definitions["serviceescalation"]["first_notification"]["help_text"] = _(" ")
object_definitions["serviceescalation"]["last_notification"]["help_text"] = _(" ")
object_definitions["serviceescalation"][
"notification_interval"]["help_text"] = _(" ")
object_definitions["serviceescalation"]["escalation_period"]["help_text"] = _(" ")
object_definitions["serviceescalation"]["escalation_options"]["help_text"] = _(" ")
object_definitions["hostdependency"]["dependent_host_name"]["help_text"] = _(" ")
object_definitions["hostdependency"][
"dependent_hostgroup_name"]["help_text"] = _(" ")
object_definitions["hostdependency"]["host_name"]["help_text"] = _(" ")
object_definitions["hostdependency"]["hostgroup_name"]["help_text"] = _(" ")
object_definitions["hostdependency"]["inherits_parent"]["help_text"] = _(" ")
object_definitions["hostdependency"][
"execution_failure_criteria"]["help_text"] = _(" ")
object_definitions["hostdependency"][
"notification_failure_criteria"]["help_text"] = _(" ")
object_definitions["hostdependency"]["dependency_period"]["help_text"] = _(" ")
object_definitions["hostescalation"]["host_name"]["help_text"] = _(" ")
object_definitions["hostescalation"]["hostgroup_name"]["help_text"] = _(" ")
object_definitions["hostescalation"]["contacts"]["help_text"] = _(" ")
object_definitions["hostescalation"]["contact_groups"]["help_text"] = _(" ")
object_definitions["hostescalation"]["first_notification"]["help_text"] = _(" ")
object_definitions["hostescalation"]["last_notification"]["help_text"] = _(" ")
object_definitions["hostescalation"]["notification_interval"]["help_text"] = _(" ")
object_definitions["hostescalation"]["escalation_period"]["help_text"] = _(" ")
object_definitions["hostescalation"]["escalation_options"]["help_text"] = _(" ")
object_definitions["hostextinfo"]["host_name"]["help_text"] = _(" ")
object_definitions["hostextinfo"]["notes"]["help_text"] = _(" ")
object_definitions["hostextinfo"]["notes_url"]["help_text"] = _(" ")
object_definitions["hostextinfo"]["action_url"]["help_text"] = _(" ")
object_definitions["hostextinfo"]["icon_image"]["help_text"] = _(" ")
object_definitions["hostextinfo"]["icon_image_alt"]["help_text"] = _(" ")
object_definitions["hostextinfo"]["vrml_image"]["help_text"] = _(" ")
object_definitions["hostextinfo"]["statusmap_image"]["help_text"] = _(" ")
object_definitions["hostextinfo"]["2d_coords"]["help_text"] = _(" ")
object_definitions["hostextinfo"]["3d_coords"]["help_text"] = _(" ")
object_definitions["serviceextinfo"]["host_name"]["help_text"] = _(" ")
object_definitions["serviceextinfo"]["service_description"]["help_text"] = _(" ")
object_definitions["serviceextinfo"]["notes"]["help_text"] = _(" ")
object_definitions["serviceextinfo"]["notes_url"]["help_text"] = _(" ")
object_definitions["serviceextinfo"]["action_url"]["help_text"] = _(" ")
object_definitions["serviceextinfo"]["icon_image"]["help_text"] = _(" ")
object_definitions["serviceextinfo"]["icon_image_alt"]["help_text"] = _(" ")
########NEW FILE########
__FILENAME__ = models
# Adagios is a web based Nagios configuration interface
#
# Copyright (C) 2014, Pall Sigurdsson <[email protected]>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from django.db import models
# Create your models here.
class Attribute(models.Model):
"""This class stores info on how attributes are viewed in django"""
attribute_name = models.CharField(max_length=200)
attribute_friendlyname = models.CharField(max_length=200)
attribute_type = models.CharField(max_length=200)
########NEW FILE########
__FILENAME__ = tests
# -*- coding: utf-8 -*-
#
# Adagios is a web based Nagios configuration interface
#
# Copyright (C) 2014, Pall Sigurdsson <[email protected]>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from django.utils import unittest
from django.test.client import Client
from django.utils.translation import ugettext as _
import pynag.Model
import adagios.settings
pynag.Model.cfg_file = adagios.settings.nagios_config
class TestObjectBrowser(unittest.TestCase):
def testNagiosConfigFile(self):
result = pynag.Model.ObjectDefinition.objects.all
config = pynag.Model.config.cfg_file
self.assertGreaterEqual(
len(result), 0, msg=_("Parsed nagios.cfg, but found no objects, are you sure this is the right config file (%(config)s) ? ") % {'config': config})
def testIndexPage(self):
c = Client()
response = c.get('/objectbrowser/')
self.assertEqual(response.status_code, 200)
def testPageLoad(self):
""" Smoke test a bunch of views """
# TODO: Better tests, at least squeeze out a 200OK for these views
self.loadPage('/objectbrowser/')
self.loadPage('/objectbrowser/copy', 404)
self.loadPage('/objectbrowser/search')
self.loadPage('/objectbrowser/delete', 404)
self.loadPage('/objectbrowser/bulk_edit')
self.loadPage('/objectbrowser/bulk_delete')
self.loadPage('/objectbrowser/bulk_copy')
self.loadPage('/objectbrowser/edit_all', 404)
self.loadPage('/objectbrowser/copy_and_edit', 301)
self.loadPage('/objectbrowser/confighealth')
self.loadPage('/objectbrowser/plugins')
self.loadPage('/objectbrowser/nagios.cfg')
self.loadPage('/objectbrowser/geek_edit', 404)
self.loadPage('/objectbrowser/advanced_edit', 404)
#self.loadPage('/objectbrowser/add_to_group')
self.loadPage('/objectbrowser/add/host', 200)
self.loadPage('/objectbrowser/add/hostgroup', 200)
self.loadPage('/objectbrowser/add/service', 200)
self.loadPage('/objectbrowser/add/servicegroup', 200)
self.loadPage('/objectbrowser/add/contact', 200)
self.loadPage('/objectbrowser/add/contactgroup', 200)
self.loadPage('/objectbrowser/add/timeperiod', 200)
self.loadPage('/objectbrowser/add/command', 200)
self.loadPage('/objectbrowser/add/template', 200)
def loadPage(self, url, expected_code=200):
""" Load one specific page, and assert if return code is not 200 """
try:
c = Client()
response = c.get(url)
self.assertEqual(response.status_code, expected_code, _("Expected status code 200 for page %(url)s") % {'url': url})
except Exception, e:
self.assertEqual(True, _("Unhandled exception while loading %(url)s: %(error)s") % {'url': url, 'error': e})
########NEW FILE########
__FILENAME__ = urls
# Adagios is a web based Nagios configuration interface
#
# Copyright (C) 2014, Pall Sigurdsson <[email protected]>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from django.conf.urls.defaults import *
urlpatterns = patterns('adagios',
url(r'^/$', 'objectbrowser.views.list_object_types', name="objectbrowser"),
url(r'^/edit_all/(?P<object_type>.+)/(?P<attribute_name>.+)/?$', 'objectbrowser.views.edit_all'),
url(r'^/search/?$', 'objectbrowser.views.search_objects', name="search"),
url(r'^/edit/(?P<object_id>.+?)?$', 'objectbrowser.views.edit_object', name="edit_object"),
url(r'^/edit/?$', 'objectbrowser.views.edit_object'),
url(r'^/copy_and_edit/(?P<object_id>.+?)?$', 'objectbrowser.views.copy_and_edit_object'),
url(r'^/copy/(?P<object_id>.+)$', 'objectbrowser.views.copy_object', name="copy_object"),
url(r'^/delete/(?P<object_id>.+)$', 'objectbrowser.views.delete_object', name="delete_object"),
url(r'^/delete/(?P<object_type>.+?)/(?P<shortname>.+)/?$', 'objectbrowser.views.delete_object_by_shortname', name="delete_by_shortname"),
url(r'^/add/(?P<object_type>.+)$', 'objectbrowser.views.add_object', name="addobject"),
url(r'^/bulk_edit/?$', 'objectbrowser.views.bulk_edit', name='bulk_edit'),
url(r'^/bulk_delete/?$', 'objectbrowser.views.bulk_delete', name='bulk_delete'),
url(r'^/bulk_copy/?$', 'objectbrowser.views.bulk_copy', name='bulk_copy'),
url(r'^/add_to_group/(?P<group_type>.+)/(?P<group_name>.+)/?$', 'objectbrowser.views.add_to_group'),
url(r'^/add_to_group/(?P<group_type>.+)/?$', 'objectbrowser.views.add_to_group'),
url(r'^/add_to_group', 'objectbrowser.views.add_to_group'),
url(r'^/confighealth/?$', 'objectbrowser.views.config_health'),
url(r'^/plugins/?$', 'objectbrowser.views.show_plugins'),
url(r'^/nagios.cfg/?$', 'objectbrowser.views.edit_nagios_cfg'),
url(r'^/nagios.cfg/edit/?$', 'misc.views.edit_nagios_cfg'),
url(r'^/geek_edit/id=(?P<object_id>.+)$', 'objectbrowser.views.geek_edit'),
url(r'^/advanced_edit/id=(?P<object_id>.+)$', 'objectbrowser.views.advanced_edit'),
# Here for backwards compatibility.
url(r'^/edit/id=(?P<object_id>.+)$', 'objectbrowser.views.edit_object', ),
url(r'^/id=(?P<object_id>.+)$', 'objectbrowser.views.edit_object', ),
# These should be deprecated as of 2012-08-27
url(r'^/copy_object/id=(?P<object_id>.+)$', 'objectbrowser.views.copy_object'),
url(r'^/delete_object/id=(?P<object_id>.+)$', 'objectbrowser.views.delete_object'),
)
########NEW FILE########
__FILENAME__ = views
# -*- coding: utf-8 -*-
#
# Adagios is a web based Nagios configuration interface
#
# Copyright (C) 2010, Pall Sigurdsson <[email protected]>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from django.shortcuts import render_to_response, redirect, HttpResponse, Http404
from django.http import HttpResponseRedirect
from django.template import RequestContext
from django.core.context_processors import csrf
from django.core.urlresolvers import reverse
from django.utils.translation import ugettext as _
import os
from os.path import dirname
from pynag.Model import ObjectDefinition, string_to_class
from pynag import Model
from pynag.Parsers import status
import pynag.Utils
from collections import defaultdict, namedtuple
import pynag.Model
from adagios import settings
from adagios.objectbrowser.forms import *
from adagios.views import adagios_decorator
@adagios_decorator
def home(request):
return redirect('adagios')
@adagios_decorator
def list_object_types(request):
""" Collects statistics about pynag objects and returns to template """
c = {}
return render_to_response('list_object_types.html', c, context_instance=RequestContext(request))
@adagios_decorator
def geek_edit(request, object_id):
""" Function handles POST requests for the geek edit form """
c = {}
c.update(csrf(request))
c['messages'] = m = []
c['errors'] = []
# Get our object
try:
o = ObjectDefinition.objects.get_by_id(id=object_id)
except Exception, e:
# This is an ugly hack. If unknown object ID was specified and it so happens to
# Be the same as a brand new empty object definition we will assume that we are
# to create a new object definition instead of throwing error because ours was
# not found.
for i in Model.string_to_class.values():
if i().get_id() == object_id:
o = i()
break
else:
c['error_summary'] = _('Unable to find object')
c['error'] = e
return render_to_response('error.html', c, context_instance=RequestContext(request))
c['my_object'] = o
if request.method == 'POST':
# Manual edit of the form
form = GeekEditObjectForm(pynag_object=o, data=request.POST)
if form.is_valid():
try:
form.save()
m.append("Object Saved manually to '%s'" % o['filename'])
except Exception, e:
c['errors'].append(e)
return render_to_response('edit_object.html', c, context_instance=RequestContext(request))
else:
c['errors'].append(_("Problem with saving object"))
return render_to_response('edit_object.html', c, context_instance=RequestContext(request))
else:
form = GeekEditObjectForm(
initial={'definition': o['meta']['raw_definition'], })
c['geek_edit'] = form
# Lets return the user to the general edit_object form
return HttpResponseRedirect(reverse('edit_object', kwargs={'object_id': o.get_id()}))
@adagios_decorator
def advanced_edit(request, object_id):
""" Handles POST only requests for the "advanced" object edit form. """
c = {}
c.update(csrf(request))
c['messages'] = m = []
c['errors'] = []
# Get our object
try:
o = ObjectDefinition.objects.get_by_id(id=object_id)
c['my_object'] = o
except Exception, e:
# This is an ugly hack. If unknown object ID was specified and it so happens to
# Be the same as a brand new empty object definition we will assume that we are
# to create a new object definition instead of throwing error because ours was
# not found.
for i in Model.string_to_class.values():
if i().get_id() == object_id:
o = i()
break
else:
c['error_summary'] = _('Unable to get object')
c['error'] = e
return render_to_response('error.html', c, context_instance=RequestContext(request))
if request.method == 'POST':
# User is posting data into our form
c['advanced_form'] = AdvancedEditForm(
pynag_object=o, initial=o._original_attributes, data=request.POST)
if c['advanced_form'].is_valid():
try:
c['advanced_form'].save()
m.append(_("Object Saved to %(filename)s") % o)
except Exception, e:
c['errors'].append(e)
return render_to_response('edit_object.html', c, context_instance=RequestContext(request))
else:
c['errors'].append(_("Problem reading form input"))
return render_to_response('edit_object.html', c, context_instance=RequestContext(request))
return HttpResponseRedirect(reverse('edit_object', args=[o.get_id()]))
@adagios_decorator
def edit_object(request, object_id=None):
""" Brings up an edit dialog for one specific object.
If an object_id is specified, bring us to that exact object.
Otherwise we expect some search arguments to have been provided via querystring
"""
c = {}
c.update(csrf(request))
c['messages'] = []
c['errors'] = []
my_object = None # This is where we store our item that we are editing
# If object_id was not provided, lets see if anything was given to us in a querystring
if not object_id:
objects = pynag.Model.ObjectDefinition.objects.filter(**request.GET)
if len(objects) == 1:
my_object = objects[0]
else:
return search_objects(request)
else:
try:
my_object = pynag.Model.ObjectDefinition.objects.get_by_id(object_id)
except KeyError:
c['error_summary'] = _('Could not find any object with id="%(object_id)s" :/') % {'object_id': object_id}
c['error_type'] = _("object not found")
return render_to_response('error.html', c, context_instance=RequestContext(request))
if request.method == 'POST':
# User is posting data into our form
c['form'] = PynagForm(
pynag_object=my_object,
initial=my_object._original_attributes,
data=request.POST
)
if c['form'].is_valid():
try:
c['form'].save()
c['messages'].append(_("Object Saved to %(filename)s") % my_object)
return HttpResponseRedirect(reverse('edit_object', kwargs={'object_id': my_object.get_id()}))
except Exception, e:
c['errors'].append(e)
else:
c['errors'].append(_("Could not validate form input"))
if 'form' not in c:
c['form'] = PynagForm(pynag_object=my_object, initial=my_object._original_attributes)
c['my_object'] = my_object
c['geek_edit'] = GeekEditObjectForm(
initial={'definition': my_object['meta']['raw_definition'], })
c['advanced_form'] = AdvancedEditForm(
pynag_object=my_object, initial=my_object._original_attributes)
try:
c['effective_hosts'] = my_object.get_effective_hosts()
except KeyError, e:
c['errors'].append(_("Could not find host: %(error)s") % {'error': str(e)})
except AttributeError:
pass
try:
c['effective_parents'] = my_object.get_effective_parents(cache_only=True)
except KeyError, e:
c['errors'].append(_("Could not find parent: %(error)s") % {'error': str(e)})
# Every object type has some special treatment, so lets resort
# to appropriate helper function
if False:
pass
elif my_object['object_type'] == 'servicegroup':
return _edit_servicegroup(request, c)
elif my_object['object_type'] == 'hostdependency':
return _edit_hostdependency(request, c)
elif my_object['object_type'] == 'service':
return _edit_service(request, c)
elif my_object['object_type'] == 'contactgroup':
return _edit_contactgroup(request, c)
elif my_object['object_type'] == 'hostgroup':
return _edit_hostgroup(request, c)
elif my_object['object_type'] == 'host':
return _edit_host(request, c)
elif my_object['object_type'] == 'contact':
return _edit_contact(request, c)
elif my_object['object_type'] == 'command':
return _edit_command(request, c)
elif my_object['object_type'] == 'servicedependency':
return _edit_servicedependency(request, c)
elif my_object['object_type'] == 'timeperiod':
return _edit_timeperiod(request, c)
else:
return render_to_response('edit_object.html', c, context_instance=RequestContext(request))
@pynag.Utils.cache_only
def _edit_contact(request, c):
""" This is a helper function to edit_object """
try:
c['effective_contactgroups'] = c[
'my_object'].get_effective_contactgroups()
except KeyError, e:
c['errors'].append(_("Could not find contact: %(error)s") % {'error': str(e)})
return render_to_response('edit_contact.html', c, context_instance=RequestContext(request))
@pynag.Utils.cache_only
def _edit_service(request, c):
""" This is a helper function to edit_object """
service = c['my_object']
try:
c['command_line'] = service.get_effective_command_line()
except KeyError:
c['command_line'] = None
try:
c['object_macros'] = service.get_all_macros()
except KeyError:
c['object_macros'] = None
# Get the current status from Nagios
try:
s = status()
s.parse()
c['status'] = s.get_servicestatus(
service['host_name'], service['service_description'])
current_state = c['status']['current_state']
if current_state == "0":
c['status']['text'] = 'OK'
c['status']['css_label'] = 'label-success'
elif current_state == "1":
c['status']['text'] = 'Warning'
c['status']['css_label'] = 'label-warning'
elif current_state == "2":
c['status']['text'] = 'Critical'
c['status']['css_label'] = 'label-important'
else:
c['status']['text'] = 'Unknown'
c['status']['css_label'] = 'label-inverse'
except Exception:
pass
try:
c['effective_servicegroups'] = service.get_effective_servicegroups()
except KeyError, e:
c['errors'].append(_("Could not find servicegroup: %(error)s") % {'error': str(e)})
try:
c['effective_contacts'] = service.get_effective_contacts()
except KeyError, e:
c['errors'].append(_("Could not find contact: %(error)s") % {'error': str(e)})
try:
c['effective_contactgroups'] = service.get_effective_contact_groups()
except KeyError, e:
c['errors'].append(_("Could not find contact_group: %(error)s") % {'error': str(e)})
try:
c['effective_hostgroups'] = service.get_effective_hostgroups()
except KeyError, e:
c['errors'].append(_("Could not find hostgroup: %(error)s") % {'error': str(e)})
try:
c['effective_command'] = service.get_effective_check_command()
except KeyError, e:
if service.check_command is not None:
c['errors'].append(_("Could not find check_command: %(error)s") % {'error': str(e)})
elif service.register != '0':
c['errors'].append(_("You need to define a check command"))
# For the check_command editor, we inject current check_command and a list
# of all check_commands
c['check_command'] = (service.check_command or '').split("!")[0]
c['command_names'] = map(
lambda x: x.get("command_name", ''), Model.Command.objects.all)
if c['check_command'] in (None, '', 'None'):
c['check_command'] = ''
if service.hostgroup_name and service.hostgroup_name != 'null':
c['errors'].append(_("This Service is applied to every host in hostgroup %(hostgroup_name)s") % {'hostgroup_name': service.hostgroup_name})
host_name = service.host_name or ''
if ',' in host_name:
c['errors'].append(_("This Service is applied to multiple hosts"))
return render_to_response('edit_service.html', c, context_instance=RequestContext(request))
@pynag.Utils.cache_only
def _edit_contactgroup(request, c):
""" This is a helper function to edit_object """
try:
c['effective_contactgroups'] = c[
'my_object'].get_effective_contactgroups()
except KeyError, e:
c['errors'].append(_("Could not find contact_group: %(error)s") % {'error': str(e)})
try:
c['effective_contacts'] = c['my_object'].get_effective_contacts()
except KeyError, e:
c['errors'].append("Could not find contact: %s" % str(e))
try:
c['effective_memberof'] = Model.Contactgroup.objects.filter(
contactgroup_members__has_field=c['my_object'].contactgroup_name)
except Exception, e:
c['errors'].append(e)
return render_to_response('edit_contactgroup.html', c, context_instance=RequestContext(request))
@pynag.Utils.cache_only
def _edit_hostgroup(request, c):
""" This is a helper function to edit_object """
hostgroup = c['my_object']
try:
c['effective_services'] = sorted(
hostgroup.get_effective_services(), key=lambda x: x.get_description())
except KeyError, e:
c['errors'].append(_("Could not find service: %(error)s") % {'error': str(e)})
try:
c['effective_memberof'] = Model.Hostgroup.objects.filter(
hostgroup_members__has_field=c['my_object'].hostgroup_name)
except Exception, e:
c['errors'].append(e)
return render_to_response('edit_hostgroup.html', c, context_instance=RequestContext(request))
@pynag.Utils.cache_only
def _edit_servicegroup(request, c):
""" This is a helper function to edit_object """
try:
c['effective_memberof'] = Model.Servicegroup.objects.filter(
servicegroup_members__has_field=c['my_object'].servicegroup_name)
except Exception, e:
c['errors'].append(e)
return render_to_response('edit_servicegroup.html', c, context_instance=RequestContext(request))
@pynag.Utils.cache_only
def _edit_command(request, c):
""" This is a helper function to edit_object """
return render_to_response('edit_command.html', c, context_instance=RequestContext(request))
@pynag.Utils.cache_only
def _edit_hostdependency(request, c):
""" This is a helper function to edit_object """
return render_to_response('edit_hostdepedency.html', c, context_instance=RequestContext(request))
@pynag.Utils.cache_only
def _edit_servicedependency(request, c):
""" This is a helper function to edit_object """
return render_to_response('_edit_servicedependency.html', c, context_instance=RequestContext(request))
@pynag.Utils.cache_only
def _edit_timeperiod(request, c):
""" This is a helper function to edit_object """
return render_to_response('edit_timeperiod.html', c, context_instance=RequestContext(request))
@pynag.Utils.cache_only
def _edit_host(request, c):
""" This is a helper function to edit_object """
host = c['my_object']
try:
c['command_line'] = host.get_effective_command_line()
except KeyError:
c['command_line'] = None
try:
c['object_macros'] = host.get_all_macros()
except KeyError:
c['object_macros'] = None
if not 'errors' in c:
c['errors'] = []
try:
c['effective_services'] = sorted(
host.get_effective_services(), key=lambda x: x.get_description())
except KeyError, e:
c['errors'].append(_("Could not find service: %(error)s") % {'error': str(e)})
try:
c['effective_hostgroups'] = host.get_effective_hostgroups()
except KeyError, e:
c['errors'].append(_("Could not find hostgroup: %(error)s") % {'error': str(e)})
try:
c['effective_contacts'] = host.get_effective_contacts()
except KeyError, e:
c['errors'].append(_("Could not find contact: %(error)s") % {'error': str(e)})
try:
c['effective_contactgroups'] = host.get_effective_contact_groups()
except KeyError, e:
c['errors'].append(_("Could not find contact_group: %(error)s") % {'error': str(e)})
try:
c['effective_command'] = host.get_effective_check_command()
except KeyError, e:
if host.check_command is not None:
c['errors'].append(_("Could not find check_command: %(error)s") % {'error': str(e)})
elif host.register != '0':
c['errors'].append(_("You need to define a check command"))
try:
s = status()
s.parse()
c['status'] = s.get_hoststatus(host['host_name'])
current_state = c['status']['current_state']
if int(current_state) == 0:
c['status']['text'] = 'UP'
c['status']['css_label'] = 'label-success'
else:
c['status']['text'] = 'DOWN'
c['status']['css_label'] = 'label-important'
except Exception:
pass
return render_to_response('edit_host.html', c, context_instance=RequestContext(request))
@adagios_decorator
def config_health(request):
""" Display possible errors in your nagios config
"""
c = dict()
c['messages'] = []
c['object_health'] = s = {}
c['booleans'] = {}
services_no_description = Model.Service.objects.filter(
register="1", service_description=None)
hosts_without_contacts = []
hosts_without_services = []
objects_with_invalid_parents = []
services_without_contacts = []
services_using_hostgroups = []
services_without_icon_image = []
c['booleans'][
_('Nagios Service has been reloaded since last configuration change')] = not Model.config.needs_reload()
c['booleans'][
_('Adagios configuration cache is up-to-date')] = not Model.config.needs_reparse()
for i in Model.config.errors:
if i.item:
Class = Model.string_to_class[i.item['meta']['object_type']]
i.model = Class(item=i.item)
c['parser_errors'] = Model.config.errors
try:
import okconfig
c['booleans'][
_('OKConfig is installed and working')] = okconfig.is_valid()
except Exception:
c['booleans'][_('OKConfig is installed and working')] = False
s['Parser errors'] = Model.config.errors
s['Services with no "service_description"'] = services_no_description
s['Hosts without any contacts'] = hosts_without_contacts
s['Services without any contacts'] = services_without_contacts
s['Objects with invalid "use" attribute'] = objects_with_invalid_parents
s['Services applied to hostgroups'] = services_using_hostgroups
s['Services without a logo'] = services_without_icon_image
s['Hosts without Service Checks'] = hosts_without_services
if request.GET.has_key('show') and s.has_key(request.GET['show']):
objects = s[request.GET['show']]
return search_objects(request, objects=objects)
else:
return render_to_response('suggestions.html', c, context_instance=RequestContext(request))
@adagios_decorator
def show_plugins(request):
""" Finds all command_line arguments, and shows missing plugins """
c = {}
missing_plugins = []
existing_plugins = []
finished = []
services = Model.Service.objects.all
common_interpreters = ['perl', 'python', 'sh', 'bash']
for s in services:
if not 'check_command' in s._defined_attributes:
continue
check_command = s.check_command.split('!')[0]
if check_command in finished:
continue
finished.append(check_command)
try:
command_line = s.get_effective_command_line()
except KeyError:
continue
if command_line is None:
continue
command_line = command_line.split()
command_name = command_line.pop(0)
if command_name in common_interpreters:
command_name = command_line.pop(0)
if os.path.exists(command_name):
existing_plugins.append((check_command, command_name))
else:
missing_plugins.append((check_command, command_name))
c['missing_plugins'] = missing_plugins
c['existing_plugins'] = existing_plugins
return render_to_response('show_plugins.html', c, context_instance=RequestContext(request))
@adagios_decorator
def edit_nagios_cfg(request):
""" This views is made to make modifications to nagios.cfg
"""
from pynag.Model.all_attributes import main_config
c = {'filename': Model.config.cfg_file}
c['content'] = []
for conf in sorted(main_config):
values = []
Model.config.parse_maincfg()
for k, v in Model.config.maincfg_values:
if conf == k:
values.append(v)
c['content'].append({
'doc': main_config[conf]['doc'],
'title': main_config[conf]['title'],
'examples': main_config[conf]['examples'],
'format': main_config[conf]['format'],
'options': main_config[conf]['options'],
'key': conf,
'values': values
})
for key, v in Model.config.maincfg_values:
if key not in main_config:
c['content'].append({
'title': _('No documentation found'),
'key': key,
'values': [v],
'doc': _('This seems to be an undefined option and no documentation was found for it. Perhaps it is'
'mispelled.')
})
c['content'] = sorted(c['content'], key=lambda cfgitem: cfgitem['key'])
return render_to_response('edit_configfile.html', c, context_instance=RequestContext(request))
@adagios_decorator
def bulk_edit(request):
""" Edit multiple objects with one post """
c = {}
c.update(csrf(request))
c['messages'] = []
c['errors'] = []
c['objects'] = objects = []
# Newer, alternative way to input items from the post data is in the form of
# object_type=shortname
# i.e. timeperiod=24x7, timeperiod=workhours
for i in _querydict_to_objects(request):
objects.append(i)
if request.method == 'GET':
if len(objects) == 1:
return HttpResponseRedirect(reverse('edit_object', kwargs={'object_id': objects[0].get_id()}), )
c['form'] = BulkEditForm(objects=objects)
if request.method == "POST":
c['form'] = BulkEditForm(objects=objects, data=request.POST)
c['objects'] = c['form'].all_objects
if c['form'].is_valid():
try:
c['form'].save()
for i in c['form'].changed_objects:
c['messages'].append(
_("saved changes to %(object_type)s '%(description)s'") % {'object_type': i.object_type,
'description': i.get_description(),
})
c['success'] = "success"
except IOError, e:
c['errors'].append(e)
return render_to_response('bulk_edit.html', c, context_instance=RequestContext(request))
@adagios_decorator
def bulk_delete(request):
""" Edit delete multiple objects with one post """
c = {}
c.update(csrf(request))
c['messages'] = []
c['errors'] = []
c['objects'] = objects = []
c['form'] = BulkDeleteForm(objects=objects)
# Newer, alternative way to input items from the post data is in the form of
# object_type=shortname
# i.e. timeperiod=24x7, timeperiod=workhours
for i in _querystring_to_objects(request.GET or request.POST):
try:
obj = pynag.Model.string_to_class[i.object_type].objects.get_by_shortname(i.description)
if obj not in objects:
objects.append(obj)
except KeyError:
c['errors'].append(_("Could not find %(object_type)s '%(description)s' "
"Maybe it has already been deleted.") % {'object_type': i.object_type,
'description': i.description})
if request.method == "GET" and len(objects) == 1:
return HttpResponseRedirect(reverse('delete_object', kwargs={'object_id': objects[0].get_id()}), )
if request.method == "POST":
# Post items starting with "hidden_" will be displayed on the resulting web page
# Post items starting with "change_" will be modified
for i in request.POST.keys():
if i.startswith('change_'):
my_id = i[len('change_'):]
my_obj = ObjectDefinition.objects.get_by_id(my_id)
if my_obj not in objects:
objects.append(my_obj)
c['form'] = BulkDeleteForm(objects=objects, data=request.POST)
if c['form'].is_valid():
try:
c['form'].delete()
c['success'] = "Success"
for i in c['form'].changed_objects:
c['messages'].append(
"Deleted %s %s" % (i.object_type, i.get_description()))
except IOError, e:
c['errors'].append(e)
return render_to_response('bulk_delete.html', c, context_instance=RequestContext(request))
@adagios_decorator
def bulk_copy(request):
""" Copy multiple objects with one post """
c = {}
c.update(csrf(request))
c['messages'] = []
c['errors'] = []
c['objects'] = objects = []
c['form'] = BulkCopyForm(objects=objects)
# Newer, alternative way to input items from the post data is in the form of
# object_type=shortname
# i.e. timeperiod=24x7, timeperiod=workhours
for i in _querystring_to_objects(request.GET or request.POST):
try:
obj = pynag.Model.string_to_class[i.object_type].objects.get_by_shortname(i.description)
if obj not in objects:
objects.append(obj)
except KeyError:
c['errors'].append(_("Could not find %(object_type)s '%(description)s'") % {'object_type': i.object_type,
'description': i.description,
})
if request.method == "GET" and len(objects) == 1:
return HttpResponseRedirect(reverse('copy_object', kwargs={'object_id': objects[0].get_id()}), )
elif request.method == "POST":
# Post items starting with "hidden_" will be displayed on the resulting web page
# Post items starting with "change_" will be modified
for i in request.POST.keys():
if i.startswith('change_'):
my_id = i[len('change_'):]
my_obj = ObjectDefinition.objects.get_by_id(my_id)
if my_obj not in objects:
objects.append(my_obj)
c['form'] = BulkCopyForm(objects=objects, data=request.POST)
if c['form'].is_valid():
try:
c['form'].save()
c['success'] = "Success"
for i in c['form'].changed_objects:
c['messages'].append(
_("Successfully copied %(object_type)s %(description)s") % {'object_type': i.object_type,
'description': i.get_description()})
except IOError, e:
c['errors'].append(e)
return render_to_response('bulk_copy.html', c, context_instance=RequestContext(request))
@adagios_decorator
def delete_object_by_shortname(request, object_type, shortname):
""" Same as delete_object() but uses object type and shortname instead of object_id
"""
obj_type = Model.string_to_class[object_type]
my_obj = obj_type.objects.get_by_shortname(shortname)
return delete_object(request, object_id=my_obj.get_id())
@adagios_decorator
def delete_object(request, object_id):
""" View to Delete a single object definition """
c = {}
c.update(csrf(request))
c['messages'] = []
c['errors'] = []
c['object'] = my_obj = Model.ObjectDefinition.objects.get_by_id(object_id)
c['form'] = DeleteObjectForm(pynag_object=my_obj, initial=request.GET)
if request.method == 'POST':
try:
c['form'] = f = DeleteObjectForm(
pynag_object=my_obj, data=request.POST)
if f.is_valid():
f.delete()
return HttpResponseRedirect(reverse('objectbrowser') + "#" + my_obj.object_type)
except Exception, e:
c['errors'].append(e)
return render_to_response('delete_object.html', c, context_instance=RequestContext(request))
@adagios_decorator
def copy_object(request, object_id):
""" View to Copy a single object definition """
c = {}
c.update(csrf(request))
c['messages'] = []
c['errors'] = []
c['object'] = my_obj = Model.ObjectDefinition.objects.get_by_id(object_id)
if request.method == 'GET':
c['form'] = CopyObjectForm(pynag_object=my_obj, initial=request.GET)
elif request.method == 'POST':
c['form'] = f = CopyObjectForm(pynag_object=my_obj, data=request.POST)
if f.is_valid():
try:
f.save()
c['copied_objects'] = f.copied_objects
c['success'] = 'success'
except IndexError, e:
c['errors'].append(e)
return render_to_response('copy_object.html', c, context_instance=RequestContext(request))
@adagios_decorator
def add_object(request, object_type):
""" Friendly wizard on adding a new object of any particular type
"""
c = {}
c['messages'] = []
c['errors'] = []
c['object_type'] = object_type
if request.method == 'GET' and object_type == 'template':
c['form'] = AddTemplateForm(initial=request.GET)
elif request.method == 'GET':
c['form'] = AddObjectForm(object_type, initial=request.GET)
elif request.method == 'POST' and object_type == 'template':
c['form'] = AddTemplateForm(data=request.POST)
elif request.method == 'POST':
c['form'] = AddObjectForm(object_type, data=request.POST)
else:
c['errors'].append(_("Something went wrong while calling this form"))
# This is what happens in post regardless of which type of form it is
if request.method == 'POST' and 'form' in c:
# If form is valid, save object and take user to edit_object form.
if c['form'].is_valid():
c['form'].save()
object_id = c['form'].pynag_object.get_id()
return HttpResponseRedirect(reverse('edit_object', kwargs={'object_id': object_id}), )
else:
c['errors'].append(_('Could not validate form input'))
return render_to_response('add_object.html', c, context_instance=RequestContext(request))
def _querystring_to_objects(dictionary):
""" Finds all nagios objects in a querystring and returns a list of objects
>>> dictionary = {'host':('localhost1', 'localhost2'),}
>>> print _querystring_to_objects
{'host':('localhost1','localhost2')}
"""
result = []
Object = namedtuple('Object', 'object_type description')
for object_type in string_to_class.keys():
objects = dictionary.getlist(object_type)
for i in objects:
obj = (Object(object_type, i))
result.append(obj)
return result
def _querydict_to_objects(request, raise_on_not_found=False):
""" Finds all object specifications in a querydict and returns a list of pynag objects
Typically this is used to name specific objects from the querystring.
Valid input in the request is either id=object_id or object_type=short_name
Arguments:
request - A django request object. Usually the data is in a querystring or POST data
- Example: host=localhost,service=localhost/Ping
raise_on_not_found - Raise ValueError if some object is not found
Returns:
List of pynag objects
"""
result = []
mydict = request.GET or request.POST
# Find everything in the querystring in the form of id=[object_ids]
for object_id in mydict.getlist('id'):
try:
my_object = ObjectDefinition.objects.get_by_id(object_id)
result.append(my_object)
except Exception, e:
if raise_on_not_found is True:
raise e
# Find everything in querystring in the form of object_type=[shortnames]
for object_type,Class in string_to_class.items():
objects = mydict.getlist(object_type)
for shortname in objects:
try:
my_object = Class.objects.get_by_shortname(shortname)
result.append(my_object)
except Exception, e:
# If a service was not found, check if it was registered in
# some unusual way
if object_type == 'service' and '/' in shortname:
host_name,service_description = shortname.split('/', 1)
result.append(_find_service(host_name, service_description))
if raise_on_not_found is True:
raise e
return result
def _find_service(host_name, service_description):
""" Returns pynag.Model.Service matching our search filter """
result = pynag.Model.Service.objects.filter(host_name__has_field=host_name, service_description=service_description)
if not result:
host = pynag.Model.Host.objects.get_by_shortname(host_name, cache_only=True)
for i in host.get_effective_services():
if i.service_description == service_description:
result = [i]
break
return result[0]
@adagios_decorator
def add_to_group(request, group_type=None, group_name=''):
""" Add one or more objects into a group
"""
c = {}
messages = []
errors = []
if not group_type:
raise Exception(_("Please include group type"))
if request.method == 'GET':
objects = _querystring_to_objects(request.GET)
elif request.method == 'POST':
objects = _querystring_to_objects(request.GET)
for i in objects:
try:
obj = pynag.Model.string_to_class[i.object_type].objects.get_by_shortname(i.description)
if group_type == 'contactgroup':
obj.add_to_contactgroup(group_name)
elif group_type == 'hostgroup':
obj.add_to_hostgroup(group_name)
elif group_type == 'servicegroup':
obj.add_to_servicegroup(group_name)
return HttpResponse("Success")
except Exception, e:
errortype = e.__dict__.get('__name__') or str(type(e))
error = str(e)
return HttpResponse(_("Failed to add object: %(errortype)s %(error)s ") % {'errortype': errortype,
'error': error,
})
return render_to_response('add_to_group.html', locals(), context_instance=RequestContext(request))
@adagios_decorator
def edit_all(request, object_type, attribute_name):
""" Edit many objects at once, changing only a single attribute
Example:
Edit notes_url of all services
"""
messages = []
errors = []
objects = Model.string_to_class.get(object_type).objects.all
objects = map(lambda x: (x.get_shortname, x.get(attribute_name)), objects)
return render_to_response('edit_all.html', locals(), context_instance=RequestContext(request))
@adagios_decorator
def search_objects(request, objects=None):
""" Displays a list of pynag objects, search parameters can be entered via querystring
Arguments:
objects -- List of pynag objects to show. If it is not set,
-- We will use querystring instead as search arguments
example:
/adagios/objectbrowser/search?object_type=host&host_name__startswith=foo
"""
messages = []
errors = []
if not objects:
objects = pynag.Model.ObjectDefinition.objects.filter(**request.GET)
# A special case, if no object was found, lets check if user was looking for a service
# With its host_name / service_description pair, and the service is applied to hostgroup instead
if not objects and request.GET.get('object_type') == 'service':
host_name = request.GET.get('host_name')
service_description = request.GET.get('service_description')
shortname = request.GET.get('shortname')
# If shortname was provided instead of host_name / service_description
if not host_name and not service_description and shortname:
host_name, service_description = shortname.split('/')
# If at this point we have found some objects, then lets do a special workaround
services = [_find_service(host_name, service_description)]
errors.append(_('be careful'))
return render_to_response('search_objects.html', locals(), context_instance=RequestContext(request))
@adagios_decorator
def copy_and_edit_object(request, object_id):
""" Create a new object, and open up an edit dialog for it.
If object_id is provided, that object will be copied into this one.
"""
kwargs = {}
for k, v in request.GET.items():
if v in ('', None, 'None'):
v = None
kwargs[k] = v
o = pynag.Model.ObjectDefinition.objects.get_by_id(object_id)
o = o.copy(**kwargs)
o = pynag.Model.ObjectDefinition.objects.filter(shortname=o.get_shortname(), object_type=o.object_type)[0]
return HttpResponseRedirect(reverse('edit_object', kwargs={'object_id': o.get_id()}))
########NEW FILE########
__FILENAME__ = forms
# Adagios is a web based Nagios configuration interface
#
# Copyright (C) 2014, Pall Sigurdsson <[email protected]>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from django import forms
import okconfig
from adagios.misc import helpers
import re
from django.core.exceptions import ValidationError
import socket
from pynag import Model
from adagios.forms import AdagiosForm
from django.utils.translation import ugettext as _
def get_all_hosts():
return [('', _('Select a host'))] + map(lambda x: (x, x), helpers.get_host_names())
def get_all_templates():
all_templates = okconfig.get_templates()
service_templates = filter(lambda x: 'host' not in x, all_templates)
return map(lambda x: (x, _("Standard %(service_template)s checks") % {"service_template": x}), service_templates)
def get_all_groups():
return map(lambda x: (x, x), okconfig.get_groups())
def get_inactive_services():
""" List of all unregistered services (templates) """
inactive_services = [('', _('Select a service'))]
inactive_services += map(lambda x: (x.name, x.name),
Model.Service.objects.filter(service_description__contains="", name__contains="", register="0"))
inactive_services.sort()
return inactive_services
class ScanNetworkForm(AdagiosForm):
network_address = forms.CharField()
def clean_network_address(self):
addr = self.cleaned_data['network_address']
if addr.find('/') > -1:
addr, mask = addr.split('/', 1)
if not mask.isdigit():
raise ValidationError(_("not a valid netmask"))
if not self.isValidIPAddress(addr):
raise ValidationError(_("not a valid ip address"))
else:
if not self.isValidIPAddress(addr):
raise ValidationError(_("not a valid ip address"))
return self.cleaned_data['network_address']
def isValidHostname(self, hostname):
if len(hostname) > 255:
return False
if hostname[-1:] == ".":
# strip exactly one dot from the right, if present
hostname = hostname[:-1]
allowed = re.compile("(?!-)[A-Z\d-]{1,63}(?<!-)$", re.IGNORECASE)
for x in hostname.split("."):
if allowed.match(x) is False:
return False
return True
def isValidIPAddress(self, ipaddress):
try:
socket.inet_aton(ipaddress)
except Exception:
return False
return True
class AddGroupForm(AdagiosForm):
group_name = forms.CharField(help_text=_("Example: databases"))
alias = forms.CharField(help_text=_("Human friendly name for the group"))
force = forms.BooleanField(
required=False, help_text=_("Overwrite group if it already exists."))
class AddHostForm(AdagiosForm):
host_name = forms.CharField(help_text=_("Name of the host to add"))
address = forms.CharField(help_text=_("IP Address of this host"))
group_name = forms.ChoiceField(
initial="default", help_text=_("host/contact group to put this host in"))
templates = forms.MultipleChoiceField(
required=False, help_text=_("Add standard template of checks to this host"))
force = forms.BooleanField(
required=False, help_text=_("Overwrite host if it already exists."))
def __init__(self, *args, **kwargs):
super(self.__class__, self).__init__(*args, **kwargs)
self.fields['group_name'].choices = choices = get_all_groups()
self.fields['templates'].choices = get_all_templates()
def clean(self):
cleaned_data = super(AddHostForm, self).clean()
force = self.cleaned_data.get('force')
host_name = self.cleaned_data.get('host_name')
templates = self.cleaned_data.get('templates')
for i in templates:
if i not in okconfig.get_templates().keys():
self._errors['templates'] = self.error_class(
[_('template %s was not found') % i])
if not force and host_name in okconfig.get_hosts():
self._errors['host_name'] = self.error_class(
[_('Host name already exists. Use force to overwrite')])
return cleaned_data
class AddTemplateForm(AdagiosForm):
# Attributes
host_name = forms.ChoiceField(help_text=_("Add templates to this host"))
templates = forms.MultipleChoiceField(
required=False, help_text=_("Add standard template of checks to this host"))
force = forms.BooleanField(
required=False, help_text=_("Overwrites templates if they already exist"))
def __init__(self, *args, **kwargs):
super(self.__class__, self).__init__(*args, **kwargs)
self.fields['templates'].choices = get_all_templates()
self.fields['host_name'].choices = get_all_hosts()
def clean(self):
cleaned_data = super(AddTemplateForm, self).clean()
force = self.cleaned_data.get('force')
host_name = self.cleaned_data.get('host_name')
templates = self.cleaned_data.get('templates')
for i in templates:
if i not in okconfig.get_templates().keys():
self._errors['templates'] = self.error_class(
[_('template %s was not found') % i])
if not force and host_name not in okconfig.get_hosts():
self._errors['host_name'] = self.error_class(
[_('Host name not found Use force to write template anyway')])
return cleaned_data
def save(self):
host_name = self.cleaned_data['host_name']
templates = self.cleaned_data['templates']
force = self.cleaned_data['force']
self.filelist = []
for i in templates:
self.filelist += okconfig.addtemplate(
host_name=host_name, template_name=i, force=force)
class InstallAgentForm(AdagiosForm):
remote_host = forms.CharField(help_text=_("Host or ip address"))
install_method = forms.ChoiceField(
initial='ssh', help_text=_("Make sure firewalls are not blocking ports 22(for ssh) or 445(for winexe)"),
choices=[(_('auto detect'), _('auto detect')), ('ssh', 'ssh'), ('winexe', 'winexe')])
username = forms.CharField(
initial='root', help_text=_("Log into remote machine with as this user"))
password = forms.CharField(
required=False, widget=forms.PasswordInput, help_text=_("Leave empty if using kerberos or ssh keys"))
windows_domain = forms.CharField(
required=False, help_text=_("If remote machine is running a windows domain"))
class ChooseHostForm(AdagiosForm):
host_name = forms.ChoiceField(help_text=_("Select which host to edit"))
def __init__(self, service=Model.Service(), *args, **kwargs):
super(forms.Form, self).__init__(*args, **kwargs)
self.fields['host_name'].choices = get_all_hosts()
class AddServiceToHostForm(AdagiosForm):
host_name = forms.ChoiceField(
help_text=_("Select host which you want to add service check to"))
service = forms.ChoiceField(
help_text=_("Select which service check you want to add to this host"))
def __init__(self, service=Model.Service(), *args, **kwargs):
super(forms.Form, self).__init__(*args, **kwargs)
self.fields['host_name'].choices = get_all_hosts()
self.fields['service'].choices = get_inactive_services()
class EditTemplateForm(AdagiosForm):
def __init__(self, service=Model.Service(), *args, **kwargs):
self.service = service
super(forms.Form, self).__init__(*args, **kwargs)
# Run through all the all attributes. Add
# to form everything that starts with "_"
self.description = service['service_description']
fieldname = "%s::%s::%s" % (
service['host_name'], service['service_description'], 'register')
self.fields[fieldname] = forms.BooleanField(
required=False, initial=service['register'] == "1", label='register')
self.register = fieldname
macros = []
self.command_line = None
try:
self.command_line = service.get_effective_command_line()
for macro, value in service.get_all_macros().items():
if macro.startswith('$_SERVICE') or macro.startswith('S$ARG'):
macros.append(macro)
for k in sorted(macros):
fieldname = "%s::%s::%s" % (
service['host_name'], service['service_description'], k)
label = k.replace('$_SERVICE', '')
label = label.replace('_', ' ')
label = label.replace('$', '')
label = label.capitalize()
self.fields[fieldname] = forms.CharField(
required=False, initial=service.get_macro(k), label=label)
# KeyError can occur if service has an invalid check_command
except KeyError:
pass
def save(self):
for i in self.changed_data:
# Changed data comes in the format host_name::service_description::$_SERVICE_PING
# We need to change that to just __PING
field_name = i.split('::')[2]
field_name = field_name.replace('$_SERVICE', '_')
field_name = field_name.replace('$', '')
data = self.cleaned_data[i]
if field_name == 'register':
data = int(data)
self.service[field_name] = data
self.service.save()
self.service.reload_object()
# Lets also update commandline because form is being returned to the
# user
self.command_line = self.service.get_effective_command_line()
########NEW FILE########
__FILENAME__ = models
# Adagios is a web based Nagios configuration interface
#
# Copyright (C) 2014, Pall Sigurdsson <[email protected]>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from django.db import models
# Create your models here.
########NEW FILE########
__FILENAME__ = tests
# -*- coding: utf-8 -*-
#
# Adagios is a web based Nagios configuration interface
#
# Copyright (C) 2014, Pall Sigurdsson <[email protected]>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from django.utils import unittest
from django.test.client import Client
from django.utils.translation import ugettext as _
import okconfig
import adagios.settings
okconfig.cfg_file = adagios.settings.nagios_config
class TestOkconfig(unittest.TestCase):
def testOkconfigVerifies(self):
result = okconfig.verify()
for k, v in result.items():
self.assertTrue(v, msg=_("Failed on test: %s") % k)
def testIndexPage(self):
c = Client()
response = c.get('/okconfig/verify_okconfig')
self.assertEqual(response.status_code, 200)
def testPageLoad(self):
""" Smoketest for the okconfig views """
self.loadPage('/okconfig/addhost')
self.loadPage('/okconfig/scan_network')
self.loadPage('/okconfig/addgroup')
self.loadPage('/okconfig/addtemplate')
self.loadPage('/okconfig/addhost')
self.loadPage('/okconfig/addservice')
self.loadPage('/okconfig/install_agent')
self.loadPage('/okconfig/edit')
self.loadPage('/okconfig/edit/localhost')
self.loadPage('/okconfig/verify_okconfig')
def loadPage(self, url):
""" Load one specific page, and assert if return code is not 200 """
try:
c = Client()
response = c.get(url)
self.assertEqual(response.status_code, 200, _("Expected status code 200 for page %s") % url)
except Exception, e:
self.assertEqual(True, _("Unhandled exception while loading %(url)s: %(e)s") % {'url': url, 'e': e})
########NEW FILE########
__FILENAME__ = urls
# Adagios is a web based Nagios configuration interface
#
# Copyright (C) 2014, Pall Sigurdsson <[email protected]>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from django.conf.urls.defaults import *
from django.conf import settings
urlpatterns = patterns('adagios',
#(r'^/?$', 'okconfig_.views.index'),
(r'^/scan_network/?', 'okconfig_.views.scan_network'),
(r'^/addgroup/?', 'okconfig_.views.addgroup'),
(r'^/addtemplate/?', 'okconfig_.views.addtemplate'),
(r'^/addhost/?', 'okconfig_.views.addhost'),
(r'^/addservice/?', 'okconfig_.views.addservice'),
(r'^/install_agent/?', 'okconfig_.views.install_agent'),
(r'^/edit/?$', 'okconfig_.views.choose_host'),
(r'^/edit/(?P<host_name>.+)$', 'okconfig_.views.edit'),
(r'^/verify_okconfig/?',
'okconfig_.views.verify_okconfig'),
)
########NEW FILE########
__FILENAME__ = views
# -*- coding: utf-8 -*-
#
# Adagios is a web based Nagios configuration interface
#
# Copyright (C) 2010, Pall Sigurdsson <[email protected]>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from django.shortcuts import render_to_response, redirect
from django.core import serializers
from django.http import HttpResponse, HttpResponseServerError, HttpResponseRedirect
from django.utils import simplejson
from django.core.context_processors import csrf
from django.template import RequestContext
from django.utils.translation import ugettext as _
from adagios.views import adagios_decorator
from django.core.urlresolvers import reverse
from adagios.okconfig_ import forms
import okconfig
import okconfig.network_scan
from pynag import Model
@adagios_decorator
def addcomplete(request, c=None):
""" Landing page when a new okconfig group has been added
"""
if not c:
c = {}
return render_to_response('addcomplete.html', c, context_instance=RequestContext(request))
@adagios_decorator
def addgroup(request):
""" Add a new okconfig group
"""
c = {}
c['messages'] = []
c['errors'] = []
# If there is a problem with the okconfig setup, lets display an error
if not okconfig.is_valid():
return verify_okconfig(request)
if request.method == 'GET':
f = forms.AddGroupForm(initial=request.GET)
elif request.method == 'POST':
f = forms.AddGroupForm(request.POST)
if f.is_valid():
group_name = f.cleaned_data['group_name']
alias = f.cleaned_data['alias']
force = f.cleaned_data['force']
try:
c['filelist'] = okconfig.addgroup(
group_name=group_name, alias=alias, force=force)
c['group_name'] = group_name
return addcomplete(request, c)
except Exception, e:
c['errors'].append(_("error adding group: %s") % e)
else:
c['errors'].append(_('Could not validate input'))
else:
raise Exception("Sorry i only support GET or POST")
c['form'] = f
return render_to_response('addgroup.html', c, context_instance=RequestContext(request))
@adagios_decorator
def addhost(request):
""" Add a new host from an okconfig template
"""
c = {}
c['messages'] = []
c['errors'] = []
# If there is a problem with the okconfig setup, lets display an error
if not okconfig.is_valid():
return verify_okconfig(request)
if request.method == 'GET':
f = forms.AddHostForm(initial=request.GET)
elif request.method == 'POST':
f = forms.AddHostForm(request.POST)
if f.is_valid():
host_name = f.cleaned_data['host_name']
group_name = f.cleaned_data['group_name']
address = f.cleaned_data['address']
templates = f.cleaned_data['templates']
#description = f.cleaned_data['description']
force = f.cleaned_data['force']
try:
c['filelist'] = okconfig.addhost(host_name=host_name, group_name=group_name, address=address,
force=force, templates=templates)
c['host_name'] = host_name
return addcomplete(request, c)
except Exception, e:
c['errors'].append(_("error adding host: %s") % e)
else:
c['errors'].append(_('Could not validate input'))
else:
raise Exception("Sorry i only support GET or POST")
c['form'] = f
return render_to_response('addhost.html', c, context_instance=RequestContext(request))
@adagios_decorator
def addtemplate(request, host_name=None):
""" Add a new okconfig template to a host
"""
c = {}
c['messages'] = []
c['errors'] = []
# If there is a problem with the okconfig setup, lets display an error
if not okconfig.is_valid():
return verify_okconfig(request)
c['form'] = forms.AddTemplateForm(initial=request.GET)
if request.method == 'POST':
c['form'] = f = forms.AddTemplateForm(request.POST)
if f.is_valid():
try:
f.save()
c['host_name'] = host_name = f.cleaned_data['host_name']
c['filelist'] = f.filelist
c['messages'].append(
_("Template was successfully added to host."))
return HttpResponseRedirect(reverse('adagios.okconfig_.views.edit', args=[host_name]))
except Exception, e:
c['errors'].append(e)
else:
c['errors'].append(_("Could not validate form"))
return render_to_response('addtemplate.html', c, context_instance=RequestContext(request))
@adagios_decorator
def addservice(request):
""" Create a new service derived from an okconfig template
"""
c = {}
c.update(csrf(request))
c['form'] = forms.AddServiceToHostForm()
c['messages'] = []
c['errors'] = []
c['filename'] = Model.config.cfg_file
if request.method == 'POST':
c['form'] = form = forms.AddServiceToHostForm(data=request.POST)
if form.is_valid():
host_name = form.cleaned_data['host_name']
host = Model.Host.objects.get_by_shortname(host_name)
service = form.cleaned_data['service']
new_service = Model.Service()
new_service.host_name = host_name
new_service.use = service
new_service.set_filename(host.get_filename())
# new_service.reload_object()
c['my_object'] = new_service
# Add custom macros if any were specified
for k, v in form.data.items():
if k.startswith("_") or k.startswith('service_description'):
new_service[k] = v
try:
new_service.save()
return HttpResponseRedirect(reverse('edit_object', kwargs={'object_id': new_service.get_id()}))
except IOError, e:
c['errors'].append(e)
else:
c['errors'].append(_("Could not validate form"))
return render_to_response('addservice.html', c, context_instance=RequestContext(request))
@adagios_decorator
def verify_okconfig(request):
""" Checks if okconfig is properly set up. """
c = {}
c['errors'] = []
c['okconfig_checks'] = okconfig.verify()
for i in c['okconfig_checks'].values():
if i == False:
c['errors'].append(
_('There seems to be a problem with your okconfig installation'))
break
return render_to_response('verify_okconfig.html', c, context_instance=RequestContext(request))
@adagios_decorator
def install_agent(request):
""" Installs an okagent on a remote host """
c = {}
c['errors'] = []
c['messages'] = []
c['form'] = forms.InstallAgentForm(initial=request.GET)
c['nsclient_installfiles'] = okconfig.config.nsclient_installfiles
if request.method == 'POST':
c['form'] = f = forms.InstallAgentForm(request.POST)
if f.is_valid():
f.clean()
host = f.cleaned_data['remote_host']
user = f.cleaned_data['username']
passw = f.cleaned_data['password']
method = f.cleaned_data['install_method']
domain = f.cleaned_data['windows_domain']
try:
status, out, err = okconfig.install_okagent(
remote_host=host, domain=domain, username=user, password=passw, install_method=method)
c['exit_status'] = status
c['stderr'] = err
# Do a little cleanup in winexe stdout, it is irrelevant
out = out.split('\n')
c['stdout'] = []
for i in out:
if i.startswith(_('Unknown parameter encountered:')):
continue
elif i.startswith(_('Ignoring unknown parameter')):
continue
elif 'NT_STATUS_LOGON_FAILURE' in i:
c['hint'] = _("NT_STATUS_LOGON_FAILURE usually means there is a problem with username or password. Are you using correct domain ?")
elif 'NT_STATUS_DUPLICATE_NAME' in i:
c['hint'] = _("The security settings on the remote windows host might forbid logins if the host name specified does not match the computername on the server. Try again with either correct hostname or the ip address of the server.")
elif 'NT_STATUS_ACCESS_DENIED' in i:
c['hint'] = _("Please make sure that %(admin)s is a local administrator on host %(host)s") % {
'admin': user, 'host': host}
elif i.startswith('Error: Directory') and i.endswith('not found'):
c['hint'] = _("No nsclient copy found ")
c['stdout'].append(i)
c['stdout'] = '\n'.join(c['stdout'])
except Exception, e:
c['errors'].append(e)
else:
c['errors'].append(_('invalid input'))
return render_to_response('install_agent.html', c, context_instance=RequestContext(request))
@adagios_decorator
def edit(request, host_name):
""" Edit all the Service "__MACROS" for a given host """
c = {}
c['errors'] = []
c['messages'] = []
c.update(csrf(request))
c['hostname'] = host_name
c['host_name'] = host_name
c['forms'] = myforms = []
try:
c['myhost'] = Model.Host.objects.get_by_shortname(host_name)
except KeyError, e:
c['errors'].append(_("Host %s not found") % e)
return render_to_response('edittemplate.html', c, context_instance=RequestContext(request))
# Get all services of that host that contain a service_description
services = Model.Service.objects.filter(
host_name=host_name, service_description__contains='')
if request.method == 'GET':
for service in services:
myforms.append(forms.EditTemplateForm(service=service))
elif request.method == 'POST':
# All the form fields have an id of HOST::SERVICE::ATTRIBUTE
for service in services:
form = forms.EditTemplateForm(service=service, data=request.POST)
myforms.append(form)
if form.is_valid():
try:
if form.changed_data != []:
form.save()
c['messages'].append(
_("'%s' successfully saved.") % service.get_description())
except Exception, e:
c['errors'].append(
_("Failed to save service %(service)s: %(exc)s") % {'service': service.get_description(), 'exc': e})
else:
c['errors'].append(
_('invalid data in %s') % service.get_description())
c['forms'] = myforms
return render_to_response('edittemplate.html', c, context_instance=RequestContext(request))
@adagios_decorator
def choose_host(request):
"""Simple form that lets you choose one host to edit"""
c = {}
c.update(csrf(request))
if request.method == 'GET':
c['form'] = forms.ChooseHostForm(initial=request.GET)
elif request.method == 'POST':
c['form'] = forms.ChooseHostForm(data=request.POST)
if c['form'].is_valid():
host_name = c['form'].cleaned_data['host_name']
return HttpResponseRedirect(reverse("adagios.okconfig_.views.edit", args=[host_name]))
return render_to_response('choosehost.html', c, context_instance=RequestContext(request))
@adagios_decorator
def scan_network(request):
""" Scan a single network and show hosts that are alive
"""
c = {}
c['errors'] = []
if not okconfig.is_valid():
return verify_okconfig(request)
if request.method == 'GET':
if request.GET.has_key('network_address'):
initial = request.GET
else:
my_ip = okconfig.network_scan.get_my_ip_address()
network_address = "%s/28" % my_ip
initial = {'network_address': network_address}
c['form'] = forms.ScanNetworkForm(initial=initial)
elif request.method == 'POST':
c['form'] = forms.ScanNetworkForm(request.POST)
if not c['form'].is_valid():
c['errors'].append(_("could not validate form"))
else:
network = c['form'].cleaned_data['network_address']
try:
c['scan_results'] = okconfig.network_scan.get_all_hosts(
network)
for i in c['scan_results']:
i.check()
except Exception, e:
c['errors'].append(_("Error running scan"))
return render_to_response('scan_network.html', c, context_instance=RequestContext(request))
########NEW FILE########
__FILENAME__ = forms
# -*- coding: utf-8 -*-
#
# Adagios is a web based Nagios configuration interface
#
# Copyright (C) 2010, Pall Sigurdsson <[email protected]>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from django import forms
class LiveStatusForm(forms.Form):
""" This form is used to generate a mk_livestatus query """
table = forms.ChoiceField()
columns = forms.MultipleChoiceField()
filter1 = forms.ChoiceField(required=False)
filter2 = forms.ChoiceField(required=False)
########NEW FILE########
__FILENAME__ = functions
# Adagios is a web based Nagios configuration interface
#
# Copyright (C) 2014, Pall Sigurdsson <[email protected]>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import os
import pynag.Utils
from pynag.Utils import PynagError
from adagios import settings
import subprocess
from django.utils.translation import ugettext as _
def run_pnp(pnp_command, **kwargs):
""" Run a specific pnp command
Arguments:
pnp_command -- examples: image graph json xml export
host -- filter results for a specific host
srv -- filter results for a specific service
source -- Fetch a specific datasource (0,1,2,3, etc)
view -- Specific timeframe (0 = 4 hours, 1 = 25 hours, etc)
Returns:
Results as they appear from pnp's index.php
Raises:
PynagError if command could not be run
"""
try:
pnp_path = settings.pnp_path
except Exception, e1:
pnp_path = find_pnp_path()
# Cleanup kwargs
pnp_arguments = {}
for k, v in kwargs.items():
k = str(k)
if isinstance(v, list):
v = v[0]
v = str(v)
pnp_arguments[k] = v
querystring = '&'.join(map(lambda x: "%s=%s" % x, pnp_arguments.items()))
pnp_parameters = pnp_command + "?" + querystring
command = ['php', pnp_path, pnp_parameters]
proc = subprocess.Popen(command, shell=False, stdout=subprocess.PIPE, stderr=subprocess.PIPE,)
stdout, stderr = proc.communicate('through stdin to stdout')
result = proc.returncode, stdout, stderr
return result[1]
def find_pnp_path():
""" Look through common locations of pnp4nagios, tries to locate it automatically """
possible_paths = [settings.pnp_filepath]
possible_paths += [
"/usr/share/pnp4nagios/html/index.php",
"/usr/share/nagios/html/pnp4nagios/index.php"
]
for i in possible_paths:
if os.path.isfile(i):
return i
raise PynagError(
_("Could not find pnp4nagios/index.php. Please specify it in adagios->settings->PNP. Tried %s") % possible_paths)
########NEW FILE########
__FILENAME__ = models
########NEW FILE########
__FILENAME__ = tests
# -*- coding: utf-8 -*-
#
# Adagios is a web based Nagios configuration interface
#
# Copyright (C) 2014, Pall Sigurdsson <[email protected]>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import os
from django.utils import unittest
from django.test.client import Client
from django.utils.translation import ugettext as _
import pynag.Parsers
from adagios.settings import nagios_config
from adagios.pnp import functions
class PNP4NagiosTestCase(unittest.TestCase):
def testPnpIsConfigured(self):
config = pynag.Parsers.config()
config.parse_maincfg()
for k, v in config.maincfg_values:
if k == "broker_module" and v.find('npcd') > 1:
tmp = v.split()
self.assertFalse(
len(tmp) < 2, _('We think pnp4nagios broker module is incorrectly configured. In nagios.cfg it looks like this: %s') % v)
module_file = tmp.pop(0)
self.assertTrue(
os.path.exists(module_file), _('npcd broker_module module not found at "%s". Is nagios correctly configured?') % module_file)
config_file = None
for i in tmp:
if i.startswith('config_file='):
config_file = i.split('=', 1)[1]
break
self.assertIsNotNone(
config_file, _("npcd broker module has no config_file= argument. Is pnp4nagios correctly configured?"))
self.assertTrue(
os.path.exists(config_file), _('PNP4nagios config file was not found (%s).') % config_file)
return
self.assertTrue(
False, _('Nagios Broker module not found. Is pnp4nagios installed and configured?'))
def testGetJson(self):
result = functions.run_pnp('json')
self.assertGreaterEqual(
len(result), 0, msg=_("Tried to get json from pnp4nagios but result was improper"))
def testPageLoad(self):
c = Client()
response = c.get('/pnp/json')
self.assertEqual(response.status_code, 200)
########NEW FILE########
__FILENAME__ = urls
# Adagios is a web based Nagios configuration interface
#
# Copyright (C) 2014, Pall Sigurdsson <[email protected]>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from django.conf.urls.defaults import *
from django.conf import settings
urlpatterns = patterns('adagios',
(r'^/(?P<pnp_command>.+)?$', 'pnp.views.pnp'),
)
########NEW FILE########
__FILENAME__ = views
# -*- coding: utf-8 -*-
#
# Adagios is a web based Nagios configuration interface
#
# Copyright (C) 2010, Pall Sigurdsson <[email protected]>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from django.core.context_processors import csrf
from django.shortcuts import render_to_response
from django.shortcuts import HttpResponse
from adagios.pnp.functions import run_pnp
from adagios.views import adagios_decorator
import json
@adagios_decorator
def pnp(request, pnp_command='image'):
c = {}
c['messages'] = []
c['errors'] = []
result = run_pnp(pnp_command, **request.GET)
mimetype = "text"
if pnp_command == 'image':
mimetype = "image/png"
elif pnp_command == 'json':
mimetype = "application/json"
return HttpResponse(result, mimetype)
########NEW FILE########
__FILENAME__ = profiling
#!/usr/bin/env python
#
# Adagios is a web based Nagios configuration interface
#
# Copyright (C) 2014, Tomas Edwardsson <[email protected]>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
# Code from https://code.djangoproject.com/wiki/ProfilingDjango
# Documentation at
# https://github.com/opinkerfi/adagios/wiki/Profiling-Decorators-within-Adagios
import hotshot
import os
import time
import settings
import tempfile
import random
try:
PROFILE_LOG_BASE = settings.PROFILE_LOG_BASE
except:
PROFILE_LOG_BASE = tempfile.gettempdir()
def profile(log_file):
"""Profile some callable.
This decorator uses the hotshot profiler to profile some callable (like
a view function or method) and dumps the profile data somewhere sensible
for later processing and examination.
It takes one argument, the profile log name. If it's a relative path, it
places it under the PROFILE_LOG_BASE. It also inserts a time stamp into the
file name, such that 'my_view.prof' become 'my_view-20100211T170321.prof',
where the time stamp is in UTC. This makes it easy to run and compare
multiple trials.
"""
if not os.path.isabs(log_file):
log_file = os.path.join(PROFILE_LOG_BASE, log_file)
def _outer(f):
def _inner(*args, **kwargs):
# Add a timestamp to the profile output when the callable
# is actually called.
(base, ext) = os.path.splitext(log_file)
base = base + "-" + time.strftime("%Y%m%dT%H%M%S", time.gmtime()) + str(random.randint(1,9999))
final_log_file = base + ext
prof = hotshot.Profile(final_log_file)
try:
ret = prof.runcall(f, *args, **kwargs)
finally:
prof.close()
return ret
return _inner
return _outer
########NEW FILE########
__FILENAME__ = models
# Adagios is a web based Nagios configuration interface
#
# Copyright (C) 2014, Pall Sigurdsson <[email protected]>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from django.db import models
# Create your models here.
########NEW FILE########
__FILENAME__ = objectbrowser
# Adagios is a web based Nagios configuration interface
#
# Copyright (C) 2014, Pall Sigurdsson <[email protected]>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
# Temporary wrapper around pynag helpers script
from adagios.misc.helpers import *
########NEW FILE########
__FILENAME__ = status
# Adagios is a web based Nagios configuration interface
#
# Copyright (C) 2014, Pall Sigurdsson <[email protected]>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
# This is a wrapper around the rest functionality that exists in
# The status view. We like to keep the actual implementations there
# because we like to keep code close to its apps
from adagios.status.rest import *
########NEW FILE########
__FILENAME__ = tests
# Adagios is a web based Nagios configuration interface
#
# Copyright (C) 2014, Pall Sigurdsson <[email protected]>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
This file demonstrates two different styles of tests (one doctest and one
unittest). These will both pass when you run "manage.py test".
Replace these with more appropriate tests for your application.
"""
from django.utils import unittest
from django.test.client import Client
from django.utils.translation import ugettext as _
import json
class LiveStatusTestCase(unittest.TestCase):
def testPageLoad(self):
""" Smoke Test for various rest modules """
self.loadPage('/rest')
self.loadPage('/rest/status/')
self.loadPage('/rest/pynag/')
self.loadPage('/rest/adagios/')
self.loadPage('/rest/status.js')
self.loadPage('/rest/pynag.js')
self.loadPage('/rest/adagios.js')
def testDnsLookup(self):
""" Test the DNS lookup rest call
"""
path = "/rest/pynag/json/dnslookup"
data = {'host_name': 'localhost'}
try:
c = Client()
response = c.post(path=path, data=data)
json_data = json.loads(response.content)
self.assertEqual(response.status_code, 200, _("Expected status code 200 for page %s") % path)
self.assertEqual(True, 'addresslist' in json_data, _("Expected 'addresslist' to appear in response"))
except KeyError, e:
self.assertEqual(True, _("Unhandled exception while loading %(path)s: %(exc)s") % {'path': path, 'exc': e})
def loadPage(self, url):
""" Load one specific page, and assert if return code is not 200 """
try:
c = Client()
response = c.get(url)
self.assertEqual(response.status_code, 200, _("Expected status code 200 for page %s") % url)
except Exception, e:
self.assertEqual(True, _("Unhandled exception while loading %(url)s: %(exc)s") % {'url': url, 'exc': e})
########NEW FILE########
__FILENAME__ = urls
# Adagios is a web based Nagios configuration interface
#
# Copyright (C) 2014, Pall Sigurdsson <[email protected]>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from django.conf.urls.defaults import *
from django.conf import settings
urlpatterns = patterns('adagios',
url(r'^/?$', 'rest.views.list_modules'),
)
# Example:
# rest_modules['module_name'] = 'module_path'
# will make /adagios/rest/module_name/ available and it loads all
# functions from 'module_path'
rest_modules = {}
rest_modules['pynag'] = 'adagios.misc.helpers'
rest_modules['okconfig'] = 'okconfig'
rest_modules['status'] = 'adagios.rest.status'
rest_modules['adagios'] = 'adagios.misc.rest'
# We are going to generate some url patterns, for clarification here is the end result shown for the status module:
#url(r'^/status/$', 'rest.views.index', { 'module_name': 'adagios.rest.status' }, name="rest/status"),
#url(r'^/status.js$', 'rest.views.javascript', { 'module_name': 'adagios.rest.status' }, ),
#(r'^/status/(?P<format>.+?)/(?P<attribute>.+?)/?$', 'rest.views.handle_request', { 'module_name': 'adagios.rest.status' }),
for module_name, module_path in rest_modules.items():
base_pattern = r'^/%s' % module_name
args = {'module_name': module_name, 'module_path': module_path}
urlpatterns += patterns('adagios',
url(base_pattern + '/$', 'rest.views.index', args, name="rest/%s" % module_name),
url(base_pattern + '.js$', 'rest.views.javascript', args, ),
url(base_pattern + '/(?P<format>.+?)/(?P<attribute>.+?)/?$', 'rest.views.handle_request', args),
)
########NEW FILE########
__FILENAME__ = views
# Adagios is a web based Nagios configuration interface
#
# Copyright (C) 2014, Pall Sigurdsson <[email protected]>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
# Create your views here.
from django.shortcuts import render_to_response, redirect
from django.core import serializers
from django.http import HttpResponse, HttpResponseServerError
from django.utils import simplejson
#from django.core.context_processors import csrf
from django.views.decorators.csrf import csrf_exempt
from django.template import RequestContext
from django.core.urlresolvers import resolve
from adagios.views import adagios_decorator
import inspect
from django import forms
import os
my_module = None
import adagios.rest.urls
def _load(module_path):
#global my_module
# if not my_module:
my_module = __import__(module_path, None, None, [''])
return my_module
@csrf_exempt
@adagios_decorator
def handle_request(request, module_name, module_path, attribute, format):
m = _load(module_path)
# TODO: Only allow function calls if method == POST
members = {}
for k, v in inspect.getmembers(m):
members[k] = v
item = members[attribute]
docstring = inspect.getdoc(item)
if request.method == 'GET':
if format == 'help':
result = inspect.getdoc(item)
elif not inspect.isfunction(item):
result = item
else:
arguments = request.GET
c = {}
c['function_name'] = attribute
c['form'] = CallFunctionForm(function=item, initial=request.GET)
c['docstring'] = docstring
c['module_name'] = module_name
if not request.GET.items():
return render_to_response('function_form.html', c, context_instance=RequestContext(request))
# Handle get parameters
arguments = {}
for k, v in request.GET.items():
# TODO: Is it safe to turn all digits to int ?
#if str(v).isdigit(): v = int(float(v))
arguments[k.encode('utf-8')] = v.encode('utf-8')
# Here is a special hack, if the method we are calling has an argument
# called "request" we will not let the remote user ship it in.
# instead we give it a django request object
if 'request' in inspect.getargspec(item)[0]:
arguments['request'] = request
result = item(**arguments)
elif request.method == 'POST':
item = members[attribute]
if not inspect.isfunction(item):
result = item
else:
arguments = {} # request.POST.items()
for k, v in request.POST.items():
arguments[k.encode('utf-8')] = v.encode('utf-8')
# Here is a special hack, if the method we are calling has an argument
# called "request" we will not let the remote user ship it in.
# instead we give it a django request object
if 'request' in inspect.getargspec(item)[0]:
arguments['request'] = request
result = item(**arguments)
else:
raise BaseException(_("Unsupported operation: %s") % (request.method, ))
# Everything below is just about formatting the results
if format == 'json':
result = simplejson.dumps(
result, ensure_ascii=False, sort_keys=True, skipkeys=True, indent=4)
mimetype = 'application/javascript'
elif format == 'xml':
# TODO: For some reason Ubuntu does not have this module. Where is
# it? Should we use lxml instead ?
import xml.marshal.generic
result = xml.marshal.generic.dumps(result)
mimetype = 'application/xml'
elif format == 'txt':
result = str(result)
mimetype = 'text/plain'
else:
raise BaseException(
_("Unsupported format: '%s'. Valid formats: json xml txt") %
format)
return HttpResponse(result, mimetype=mimetype)
@adagios_decorator
def list_modules(request):
""" List all available modules and their basic info
"""
rest_modules = adagios.rest.urls.rest_modules
return render_to_response('list_modules.html', locals(), context_instance=RequestContext(request))
@adagios_decorator
def index(request, module_name, module_path):
""" This view is used to display the contents of a given python module
"""
m = _load(module_path)
gets, puts = [], []
blacklist = ('argv', 'environ', 'exit', 'path', 'putenv', 'getenv', )
for k, v in inspect.getmembers(m):
if k.startswith('_'):
continue
if k in blacklist:
continue
if inspect.ismodule(v):
continue
elif inspect.isfunction(v):
puts.append(k)
else:
gets.append(k)
c = {}
c['module_path'] = module_path
c['gets'] = gets
c['puts'] = puts
c['module_documenation'] = inspect.getdoc(m)
return render_to_response('index.html', c, context_instance=RequestContext(request))
def javascript(request, module_name, module_path):
""" Create a javascript library that will wrap around module_path module """
m = _load(module_path)
variables, functions = [], []
blacklist = ('argv', 'environ', 'exit', 'path', 'putenv', 'getenv', )
members = {}
for k, v in inspect.getmembers(m):
if k.startswith('_'):
continue
if k in blacklist:
continue
if inspect.ismodule(v):
continue
if inspect.isfunction(v):
functions.append(k)
members[k] = v
else:
variables.append(k)
c = {}
c['module_path'] = module_path
c['module_name'] = module_name
c['gets'] = variables
c['puts'] = functions
c['module_documenation'] = inspect.getdoc(m)
current_url = request.get_full_path()
baseurl = current_url.replace('.js', '')
# Find every function, prepare what is needed so template can
for i in functions:
argspec = inspect.getargspec(members[i])
args, varargs, varkw, defaults = argspec
docstring = inspect.getdoc(members[i])
if defaults is None:
defaults = []
else:
defaults = list(defaults)
# Lets create argstring, for the javascript needed
tmp = [] + args
argstring = []
for num, default in enumerate(reversed(defaults)):
argstring.append('%s=%s' % (tmp.pop(), default))
argstring.reverse()
argstring = tmp + argstring
members[i] = {}
members[i]['args'] = args
members[i]['argstring'] = ','.join(args)
members[i]['varargs'] = varargs
members[i]['varkw'] = varkw
members[i]['defaults'] = defaults
members[i]['docstring'] = docstring
members[i]['url'] = baseurl + "/json/" + i
args, varargs, varkw, defaults = argspec
c['functions'] = members
return render_to_response('javascript.html', c, mimetype="text/javascript", context_instance=RequestContext(request))
class CallFunctionForm(forms.Form):
def __init__(self, function, *args, **kwargs):
super(CallFunctionForm, self).__init__(*args, **kwargs)
# We will create a field for every function_paramater
function_paramaters = {}
# If any paramaters were past via querystring, lets generate fields for
# them
if kwargs.has_key('initial'):
for k, v in kwargs['initial'].items():
function_paramaters[k] = v
# Generate fields which resemble our functions default arguments
argspec = inspect.getargspec(function)
args, varargs, varkw, defaults = argspec
self.show_kwargs = varkw is not None
# We treat the argument 'request' as special. Django request object is going to be
# passed instead of whatever the user wanted
if "request" in args:
args.remove('request')
if defaults is None:
defaults = []
else:
defaults = list(defaults)
for i in args:
self.fields[i] = forms.CharField(label=i)
for k, v in function_paramaters.items():
self.fields[k] = forms.CharField(label=k, initial=v)
while len(defaults) > 0:
value = defaults.pop()
field = args.pop()
self.fields[field].initial = value
########NEW FILE########
__FILENAME__ = settings
# Adagios is a web based Nagios configuration interface
#
# Copyright (C) 2014, Pall Sigurdsson <[email protected]>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
# Django settings for adagios project.
DEBUG = True
TEMPLATE_DEBUG = DEBUG
USE_TZ = True
# Hack to allow relative template paths
import os
from glob import glob
from warnings import warn
import string
djangopath = os.path.dirname(__file__)
ADMINS = (
# ('Your Name', '[email protected]'),
)
MANAGERS = ADMINS
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': '/tmp/test',
'USER': '',
'PASSWORD': '',
'HOST': '',
'PORT': '',
}
}
# Local time zone for this installation. Choices can be found here:
# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
# although not all choices may be available on all operating systems.
# On Unix systems, a value of None will cause Django to use the same
# timezone as the operating system.
# If running in a Windows environment this must be set to the same as your
# system time zone.
# TIME_ZONE = 'Atlantic/Reykjavik'
TIME_ZONE = None
USE_TZ = True
# Language code for this installation. All choices can be found here:
# http://www.i18nguy.com/unicode/language-identifiers.html
LANGUAGE_CODE = 'en-us'
SITE_ID = 1
# If you set this to False, Django will make some optimizations so as not
# to load the internationalization machinery.
USE_I18N = True
# If you set this to False, Django will not format dates, numbers and
# calendars according to the current locale
USE_L10N = True
# Absolute filesystem path to the directory that will hold user-uploaded files.
# Example: "/home/media/media.lawrence.com/"
MEDIA_ROOT = "%s/media/" % (djangopath)
# URL that handles the media served from MEDIA_ROOT. Make sure to use a
# trailing slash if there is a path component (optional in other cases).
# Examples: "http://media.lawrence.com", "http://example.com/media/"
MEDIA_URL = 'media/'
# URL prefix for admin media -- CSS, JavaScript and images. Make sure to use a
# trailing slash.
# Examples: "http://foo.com/media/", "/media/".
#ADMIN_MEDIA_PREFIX = '/media/'
# List of callables that know how to import templates from various sources.
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.locale.LocaleMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'adagios.auth.AuthorizationMiddleWare',
#'django.contrib.auth.middleware.AuthenticationMiddleware',
#'django.contrib.messages.middleware.MessageMiddleware',
)
SESSION_ENGINE = 'django.contrib.sessions.backends.file'
LANGUAGES = (
('en', 'English'),
('fr', 'French'),
)
LOCALE_PATHS = (
"%s/locale/" % (djangopath),
)
ROOT_URLCONF = 'adagios.urls'
TEMPLATE_DIRS = (
# Put strings here, like "/home/html/django_templates" or "C:/www/django/templates".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
"%s/templates" % (djangopath),
)
INSTALLED_APPS = [
#'django.contrib.auth',
#'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
#'django.contrib.messages',
# Uncomment the next line to enable the admin:
# 'django.contrib.admin',
# Uncomment the next line to enable admin documentation:
# 'django.contrib.admindocs',
'adagios.objectbrowser',
'adagios.rest',
'adagios.misc',
'adagios.pnp',
'adagios.contrib',
]
TEMPLATE_CONTEXT_PROCESSORS = ('adagios.context_processors.on_page_load',
#"django.contrib.auth.context_processors.auth",
"django.core.context_processors.debug",
"django.core.context_processors.i18n",
"django.core.context_processors.media",
"django.core.context_processors.static",
"django.core.context_processors.request",
"django.contrib.messages.context_processors.messages")
# Themes options #
# To rapidly switch your theme, update THEME_DEFAULT and leave the rest.
# folders in which themes files will be looked up
THEMES_FOLDER = 'themes' # in 'media/'
# default theme in use, it should be present in the THEMES_FOLDER
# (or at least through a symbolic link)
THEME_DEFAULT = 'default'
# CSS entry-point, in the theme folder
THEME_ENTRY_POINT = 'style.css'
# folder where users preferences are stored
USER_PREFS_PATH = "/etc/adagios/userdata/"
# name displayed in the top left corner
TOPMENU_HOME = 'Adagios'
# items in the top menubar (excluding those coming from %s_menubar.html)
# The identfier is used to recognize active links (which are displayed
# differently).
# The view can begin with '/' (and will go to http://server/...)
# or can be a view name.
# See Nagvis example for direct link, though the template contrib/nagvis.html must be created.
TOPMENU_ITEMS = [
# Name, identifier, view_url, icon
# ('Nagvis', 'nagvis', '/contrib/nagvis.html', 'glyph-display'),
('Configure', 'objectbrowser', 'objectbrowser.views.list_object_types', 'glyph-edit'),
('Nagios', 'nagios', 'misc.views.nagios', 'glyph-list'),
]
# Graphite #
# the url where to fetch data and images
graphite_url = "http://localhost:9091"
# time ranges for generated graphs
# the CSS identifier only needs to be unique here (it will be prefixed)
GRAPHITE_PERIODS = [
# Displayed name, CSS identifier, Graphite period
('4 hours', 'hours', '-4h'),
('One day', 'day', '-1d'),
('One week', 'week', '-1w'),
('One month', 'month', '-1mon'),
('One year', 'year', '-1y'),
]
# querystring that will be passed on to graphite's render method.
graphite_querystring = "target={host_}.{service_}.{metric_}&width=500&height=200&from={from_}d&lineMode=connected&title={title}&target={host_}.{service_}.{metric_}_warn&target={host_}.{service_}.{metric_}_crit"
# Title format to use on all graphite graphs
graphite_title = "{host} - {service} - {metric}"
# default selected (active) tab, and the one rendered in General-preview
GRAPHITE_DEFAULT_TAB = 'day'
# Adagios specific configuration options. These are just the defaults,
# Anything put in /etc/adagios.d/adagios.conf will overwrite this.
nagios_config = None # Sensible default is "/etc/nagios/nagios.cfg"
nagios_url = "/nagios"
nagios_init_script = "/etc/init.d/nagios"
nagios_binary = "/usr/bin/nagios"
livestatus_path = None
enable_githandler = False
enable_loghandler = False
enable_authorization = False
enable_status_view = True
enable_bi = True
enable_graphite = False
contrib_dir = "/var/lib/adagios/contrib/"
serverside_includes = "/etc/adagios/ssi"
escape_html_tags = True
warn_if_selinux_is_active = True
destination_directory = "/etc/nagios/adagios/"
administrators = "nagiosadmin,@users"
pnp_url = "/pnp4nagios"
pnp_filepath = "/usr/share/nagios/html/pnp4nagios/index.php"
include = ""
django_secret_key = ""
map_center = "64.119595,-21.655426"
map_zoom = "10"
title_prefix = "Adagios - "
auto_reload = False
refresh_rate = "30"
plugins = {}
# Profiling settings
#
# You can use the @profile("filename") to profile single functions within
# adagios. Not enabled by default on any function.
#
# Documenations at
# https://github.com/opinkerfi/adagios/wiki/Profiling-Decorators-within-Adagios
PROFILE_LOG_BASE = "/var/lib/adagios"
# Load config files from /etc/adagios
# Adagios uses the configuration file in /etc/adagios/adagios.conf by default.
# If it doesn't exist you should create it. Otherwise a adagios.conf will be
# created in the django project root which should be avoided.
adagios_configfile = "/etc/adagios/adagios.conf"
try:
if not os.path.exists(adagios_configfile):
alternative_adagios_configfile = "%s/adagios.conf" % djangopath
message = "Config file '{adagios_configfile}' not found. Using {alternative_adagios_configfile} instead."
warn(message.format(**locals()))
adagios_configfile = alternative_adagios_configfile
open(adagios_configfile, "a").close()
execfile(adagios_configfile)
# if config has any default include, lets include that as well
configfiles = glob(include)
for configfile in configfiles:
execfile(configfile)
except IOError, e:
warn('Unable to open %s: %s' % (adagios_configfile, e.strerror))
try:
from django.utils.crypto import get_random_string
except ImportError:
def get_random_string(length, stringset=string.ascii_letters + string.digits + string.punctuation):
'''
Returns a string with `length` characters chosen from `stringset`
>>> len(get_random_string(20)) == 20
'''
return ''.join([stringset[i % len(stringset)] for i in [ord(x) for x in os.urandom(length)]])
if not django_secret_key:
chars = 'abcdefghijklmnopqrstuvwxyz0123456789!@#$%^&*(-_=+)'
SECRET_KEY = get_random_string(50, chars)
try:
data = "\n# Automaticly generated secret_key\ndjango_secret_key = '%s'\n" % SECRET_KEY
with open(adagios_configfile, "a") as config_fh:
config_fh.write(data)
except Exception, e:
warn("ERROR: Got %s while trying to save django secret_key in %s" % (type(e), adagios_configfile))
else:
SECRET_KEY = django_secret_key
ALLOWED_INCLUDE_ROOTS = (serverside_includes,)
if enable_status_view:
plugins['status'] = 'adagios.status'
if enable_bi:
plugins['bi'] = 'adagios.bi'
for k, v in plugins.items():
INSTALLED_APPS.append(v)
import adagios.profiling
# default preferences, for new users or when they are not available
PREFS_DEFAULT = {
'language': 'en',
'theme': THEME_DEFAULT,
'refresh_rate': refresh_rate
}
########NEW FILE########
__FILENAME__ = forms
# -*- coding: utf-8 -*-
#
# Adagios is a web based Nagios configuration interface
#
# Copyright (C) 2010, Pall Sigurdsson <[email protected]>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from django import forms
from django.utils.translation import ugettext as _
import adagios.status.utils
import adagios.businessprocess
class LiveStatusForm(forms.Form):
""" This form is used to generate a mk_livestatus query """
table = forms.ChoiceField()
columns = forms.MultipleChoiceField()
filter1 = forms.ChoiceField(required=False)
filter2 = forms.ChoiceField(required=False)
class RemoveSubProcessForm(forms.Form):
""" Remove one specific sub process from a business process
"""
process_name = forms.CharField(max_length=100, required=True)
process_type = forms.CharField(max_length=100, required=True)
def __init__(self, instance, *args, **kwargs):
self.bp = instance
super(RemoveSubProcessForm, self).__init__(*args, **kwargs)
def save(self):
process_name = self.cleaned_data.get('process_name')
process_type = self.cleaned_data.get('process_type')
self.bp.remove_process(process_name, process_type)
self.bp.save()
status_method_choices = map(
lambda x: (x, x), adagios.businessprocess.BusinessProcess.status_calculation_methods)
class BusinessProcessForm(forms.Form):
""" Use this form to edit a BusinessProcess """
name = forms.CharField(max_length=100, required=True,
help_text=_("Unique name for this business process."))
#processes = forms.CharField(max_length=100, required=False)
display_name = forms.CharField(max_length=100, required=False,
help_text=_("This is the name that will be displayed to users on this process. Usually it is the name of the system this business group represents."))
notes = forms.CharField(max_length=1000, required=False,
help_text=_("Here you can put in any description of the business process you are adding. Its a good idea to write down what the business process is about and who to contact in case of downtimes."))
status_method = forms.ChoiceField(
choices=status_method_choices, help_text=_("Here you can choose which method is used to calculate the global status of this business process"))
state_0 = forms.CharField(max_length=100, required=False,
help_text=_("Human friendly text for this respective state. You can type whatever you want but nagios style exit codes indicate that 0 should be 'ok'"))
state_1 = forms.CharField(max_length=100, required=False,
help_text=_("Typically used to represent warning or performance problems"))
state_2 = forms.CharField(max_length=100, required=False,
help_text=_("Typically used to represent critical status"))
state_3 = forms.CharField(
max_length=100, required=False, help_text=_("Use this when status is unknown"))
#graphs = models.ManyToManyField(BusinessProcess, unique=False, blank=True)
#graphs = models.ManyToManyField(BusinessProcess, unique=False, blank=True)
def __init__(self, instance, *args, **kwargs):
self.bp = instance
super(BusinessProcessForm, self).__init__(*args, **kwargs)
def save(self):
c = self.cleaned_data
self.bp.data.update(c)
self.bp.save()
def remove(self):
c = self.data
process_name = c.get('process_name')
process_type = c.get('process_type')
if process_type == 'None':
process_type = None
self.bp.remove_process(process_name, process_type)
self.bp.save()
def clean(self):
cleaned_data = super(BusinessProcessForm, self).clean()
# If name has changed, look if there is another business process with
# same name.
new_name = cleaned_data.get('name')
if new_name and new_name != self.bp.name:
if new_name in adagios.businessprocess.get_all_process_names():
raise forms.ValidationError(
_("Cannot rename process to %s. Another process with that name already exists") % new_name
)
return cleaned_data
def delete(self):
""" Delete this business process """
self.bp.delete()
def add_process(self):
process_name = self.data.get('process_name')
hostgroup_name = self.data.get('hostgroup_name')
servicegroup_name = self.data.get('servicegroup_name')
service_name = self.data.get('service_name')
if process_name:
self.bp.add_process(process_name, None)
if hostgroup_name:
self.bp.add_process(hostgroup_name, None)
if servicegroup_name:
self.bp.add_process(servicegroup_name, None)
if service_name:
self.bp.add_process(service_name, None)
self.bp.save()
choices = 'businessprocess', 'hostgroup', 'servicegroup', 'service', 'host'
process_type_choices = map(lambda x: (x, x), choices)
class AddSubProcess(forms.Form):
process_type = forms.ChoiceField(choices=process_type_choices)
process_name = forms.CharField(
widget=forms.HiddenInput(attrs={'style': "width: 300px;"}), max_length=100)
display_name = forms.CharField(max_length=100, required=False)
tags = forms.CharField(
max_length=100, required=False, initial="not critical")
def __init__(self, instance, *args, **kwargs):
self.bp = instance
super(AddSubProcess, self).__init__(*args, **kwargs)
def save(self):
self.bp.add_process(**self.cleaned_data)
self.bp.save()
class AddHostgroupForm(forms.Form):
pass
class AddGraphForm(forms.Form):
host_name = forms.CharField(max_length=100,)
service_description = forms.CharField(max_length=100, required=False)
metric_name = forms.CharField(max_length=100, required=True)
notes = forms.CharField(max_length=100, required=False,
help_text=_("Put here a friendly description of the graph"))
def __init__(self, instance, *args, **kwargs):
self.bp = instance
super(AddGraphForm, self).__init__(*args, **kwargs)
def save(self):
self.bp.add_pnp_graph(**self.cleaned_data)
self.bp.save()
########NEW FILE########
__FILENAME__ = graphite
# Adagios is a web based Nagios configuration interface
#
# Copyright (C) 2014, Matthieu Caneill <[email protected]>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import re
import adagios.settings
ILLEGAL_CHAR = re.compile(r'[^\w-]')
def _get_graphite_url(base, host, service, metric, from_):
""" Constructs an URL for Graphite.
Args:
- base (str): base URL for Graphite access
- host (str): hostname
- service (str): service, e.g. HTTP
- metric (str): metric, e.g. size, time
- from_ (str): Graphite time period
Returns: str
"""
host_ = _compliant_name(host)
service_ = _compliant_name(service)
metric_ = _compliant_name(metric)
base = base.rstrip('/')
title = adagios.settings.graphite_title.format(**locals())
url = "{base}/render?" + adagios.settings.graphite_querystring
url = url.format(**locals())
return url
def _compliant_name(name):
""" Makes the necessary replacements for Graphite. """
if name == '_HOST_':
return '__HOST__'
name = ILLEGAL_CHAR.sub('_', name)
return name
def get(base, host, service, metrics, units):
""" Returns a data structure containg URLs for Graphite.
The structure looks like:
[{'name': 'One day',
'css_id' : 'day',
'metrics': {'size': 'http://url-of-size-metric',
'time': 'http://url-of-time-metric'}
},
{...}]
Args:
- base (str): base URL for Graphite access
- host (str): hostname
- service (str): service, e.g. HTTP
- metrics (list): list of metrics, e.g. ["size", "time"]
- units (list): a list of <name,css_id,unit>,
see adagios.settings.GRAPHITE_PERIODS
Returns: list
"""
graphs = []
for name, css_id, unit in units:
m = {}
for metric in metrics:
m[metric] = _get_graphite_url(base, host, service, metric, unit)
graph = dict(name=name, css_id=css_id, metrics=m)
graphs.append(graph)
return graphs
########NEW FILE########
__FILENAME__ = models
########NEW FILE########
__FILENAME__ = rest
# Adagios is a web based Nagios configuration interface
#
# Copyright (C) 2014, Pall Sigurdsson <[email protected]>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
Convenient stateless functions for the status module. These are meant for programs to interact
with status of Nagios.
"""
import time
import pynag.Control.Command
import pynag.Model
import pynag.Utils
import adagios.status.utils
import pynag.Parsers
import collections
from django.utils.translation import ugettext as _
from adagios import userdata
def hosts(request, fields=None, **kwargs):
""" Get List of hosts. Any parameters will be passed straight throught to pynag.Utils.grep()
Arguments:
fields -- If specified, a list of attributes to return. If unspecified, all fields are returned.
Any **kwargs will be treated as a pynag.Utils.grep()-style filter
"""
return adagios.status.utils.get_hosts(request=request, fields=fields, **kwargs)
def services(request, fields=None, **kwargs):
""" Similar to hosts(), is a wrapper around adagios.status.utils.get_services()
"""
return adagios.status.utils.get_services(request=request, fields=fields, **kwargs)
def services_dt(request, fields=None, **kwargs):
""" Similar to hosts(), is a wrapper around adagios.status.utils.get_services()
"""
services = adagios.status.utils.get_services(request=request, fields='host_name,description')
result = {
'sEcho': len(services),
'iTotalRecords': len(services),
'aaData': []
}
for service in services:
result['aaData'].append(service.values())
return result
def contacts(request, fields=None, *args, **kwargs):
""" Wrapper around pynag.Parsers.mk_livestatus.get_contacts()
"""
l = adagios.status.utils.livestatus(request)
return l.get_contacts(*args, **kwargs)
def emails(request, *args, **kwargs):
""" Returns a list of all emails of all contacts
"""
l = adagios.status.utils.livestatus(request)
return map(lambda x: x['email'], l.get_contacts('Filter: email !='))
def acknowledge_many(hostlist, servicelist, sticky=1, notify=1, persistent=0, author="adagios", comment="acknowledged by Adagios"):
""" Same as acknowledge, but for acknowledge on many hosts services at a time.
Arguments:
hostlist -- string in the format of host1;host2;host3
servicelist -- string in the format of host1,service1;host2,service2
"""
items = []
for i in hostlist.split(';'):
if not i: continue
items.append((i, None))
for i in servicelist.split(';'):
if not i: continue
host_name,service_description = i.split(',')
items.append((host_name, service_description))
for i in items:
acknowledge(
host_name=i[0],
service_description=i[1],
sticky=sticky,
notify=notify,
persistent=persistent,
author=author,
comment=comment
)
return _("Success")
def acknowledge(host_name, service_description=None, sticky=1, notify=1, persistent=0, author='adagios', comment='acknowledged by Adagios'):
""" Acknowledge one single host or service check """
if service_description in (None, '', u'', '_HOST_'):
pynag.Control.Command.acknowledge_host_problem(host_name=host_name,
sticky=sticky,
notify=notify,
persistent=persistent,
author=author,
comment=comment,
)
else:
pynag.Control.Command.acknowledge_svc_problem(host_name=host_name,
service_description=service_description,
sticky=sticky,
notify=notify,
persistent=persistent,
author=author,
comment=comment,
)
def downtime_many(hostlist, servicelist, hostgrouplist, start_time=None, end_time=None, fixed=1, trigger_id=0, duration=7200, author='adagios', comment='Downtime scheduled by adagios', all_services_on_host=False, hostgroup_name=None):
""" Same as downtime, but for acknowledge on many hosts services at a time.
Arguments:
hostlist -- string in the format of host1;host2;host3
hostgrouplist -- string in the format of hostgroup1;hostgroup2;hostgroup3
servicelist -- string in the format of host1,service1;host2,service2
"""
items = []
for i in hostlist.split(';'):
if not i: continue
items.append((i, None, None))
for i in hostgrouplist.split(';'):
if not i: continue
items.append((None, None, i))
for i in servicelist.split(';'):
if not i: continue
host_name, service_description = i.split(',')
items.append((host_name, service_description, None))
for i in items:
host_name = i[0]
service_description = i[1]
hostgroup_name = i[2]
downtime(
host_name=host_name,
service_description=service_description,
start_time=start_time,
end_time=end_time,
fixed=fixed,
trigger_id=trigger_id,
duration=duration,
author=author,
comment=comment,
all_services_on_host=all_services_on_host,
hostgroup_name=hostgroup_name
)
def downtime(host_name=None, service_description=None, start_time=None, end_time=None, fixed=1, trigger_id=0, duration=7200, author='adagios', comment='Downtime scheduled by adagios', all_services_on_host=False, hostgroup_name=None):
""" Schedule downtime for a host or a service """
if fixed in (1, '1') and start_time in (None, ''):
start_time = time.time()
if fixed in (1, '1') and end_time in (None, ''):
end_time = int(start_time) + int(duration)
if all_services_on_host == 'false':
all_services_on_host = False
elif all_services_on_host == 'true':
all_services_on_host = True
# Check if we are supposed to schedule downtime for a whole hostgroup:
if hostgroup_name:
result1 = pynag.Control.Command.schedule_hostgroup_host_downtime(
hostgroup_name=hostgroup_name,
start_time=start_time,
end_time=end_time,
fixed=fixed,
trigger_id=trigger_id,
duration=duration,
author=author,
comment=comment,
),
result2 = pynag.Control.Command.schedule_hostgroup_svc_downtime(
hostgroup_name=hostgroup_name,
start_time=start_time,
end_time=end_time,
fixed=fixed,
trigger_id=trigger_id,
duration=duration,
author=author,
comment=comment,
)
return result1, result2
# Check if we are recursively scheduling downtime for host and all its services:
elif all_services_on_host:
result1 = pynag.Control.Command.schedule_host_svc_downtime(
host_name=host_name,
start_time=start_time,
end_time=end_time,
fixed=fixed,
trigger_id=trigger_id,
duration=duration,
author=author,
comment=comment,
),
result2 = pynag.Control.Command.schedule_host_downtime(
host_name=host_name,
start_time=start_time,
end_time=end_time,
fixed=fixed,
trigger_id=trigger_id,
duration=duration,
author=author,
comment=comment,
)
return result1, result2
# Otherwise, if this is a host
elif service_description in (None, '', u'', '_HOST_'):
return pynag.Control.Command.schedule_host_downtime(
host_name=host_name,
start_time=start_time,
end_time=end_time,
fixed=fixed,
trigger_id=trigger_id,
duration=duration,
author=author,
comment=comment,
)
# otherwise it must be a service:
else:
return pynag.Control.Command.schedule_svc_downtime(
host_name=host_name,
service_description=service_description,
start_time=start_time,
end_time=end_time,
fixed=fixed,
trigger_id=trigger_id,
duration=duration,
author=author,
comment=comment,
)
import adagios.utils
def reschedule_many(request, hostlist, servicelist, check_time=None, **kwargs):
""" Same as reschedule() but takes a list of hosts/services as input
Arguments:
hostlist -- semicolon seperated list of hosts to schedule checks for. Same as multiple calls with host_name=
servicelist -- Same as hostlist but for services. Format is: host_name,service_description;host_name,service_description
"""
#task = adagios.utils.Task()
#WaitCondition = "last_check > %s" % int(time.time()- 1)
for i in hostlist.split(';'):
if not i: continue
reschedule(request, host_name=i, service_description=None, check_time=check_time)
#task.add(wait, 'hosts', i, WaitCondition)
for i in servicelist.split(';'):
if not i: continue
host_name,service_description = i.split(',')
reschedule(request, host_name=host_name, service_description=service_description, check_time=check_time)
#WaitObject = "{h};{s}".format(h=host_name, s=service_description)
#task.add(wait, 'services', WaitObject, WaitCondition)
return {'message': _("command sent successfully")}
def reschedule(request, host_name=None, service_description=None, check_time=None, wait=0, hostlist='', servicelist=''):
""" Reschedule a check of this service/host
Arguments:
host_name -- Name of the host
service_description -- Name of the service check. If left empty, host check will be rescheduled
check_time -- timestamp of when to execute this check, if left empty, execute right now
wait -- If set to 1, function will not return until check has been rescheduled
"""
if check_time is None or check_time is '':
check_time = time.time()
if service_description in (None, '', u'', '_HOST_', 'undefined'):
service_description = ""
pynag.Control.Command.schedule_forced_host_check(
host_name=host_name, check_time=check_time)
if wait == "1":
livestatus = adagios.status.utils.livestatus(request)
livestatus.query("GET hosts",
"WaitObject: %s " % host_name,
"WaitCondition: last_check > %s" % check_time,
"WaitTrigger: check",
"Filter: host_name = %s" % host_name,
)
else:
pynag.Control.Command.schedule_forced_svc_check(
host_name=host_name, service_description=service_description, check_time=check_time)
if wait == "1":
livestatus = adagios.status.utils.livestatus(request)
livestatus.query("GET services",
"WaitObject: %s %s" % (
host_name, service_description),
"WaitCondition: last_check > %s" % check_time,
"WaitTrigger: check",
"Filter: host_name = %s" % host_name,
)
return "ok"
def comment(author, comment, host_name, service_description=None, persistent=1):
""" Adds a comment to a particular service.
If the "persistent" field is set to zero (0), the comment will be deleted the next time Nagios is restarted.
Otherwise, the comment will persist across program restarts until it is deleted manually. """
if service_description in (None, '', u'', '_HOST_'):
pynag.Control.Command.add_host_comment(
host_name=host_name, persistent=persistent, author=author, comment=comment)
else:
pynag.Control.Command.add_svc_comment(
host_name=host_name, service_description=service_description, persistent=persistent, author=author, comment=comment)
return "ok"
def delete_comment(comment_id, object_type=None, host_name=None, service_description=None):
"""
"""
if not host_name:
# TODO host_name is not used here, why do we need it ?
pass
if object_type == "host" or service_description in (None, '', u'', '_HOST_'):
pynag.Control.Command.del_host_comment(comment_id=comment_id)
else:
pynag.Control.Command.del_svc_comment(comment_id=comment_id)
return "ok"
def edit(object_type, short_name, attribute_name, new_value):
""" Change one single attribute for one single object.
Arguments:
object_type -- Type of object to change (i.e. "host","service", etc)
short_name -- Short Name of the object f.e. the host_name of a host
attribute_name -- Name of attribute to change .. f.e. 'address'
new_value -- New value of the object .. f.e. '127.0.0.1'
Examples:
edit('host','localhost','address','127.0.0.1')
edit('service', 'localhost/Ping', 'contactgroups', 'None')
"""
# TODO : MK Livestatus access acording to remote_user
c = pynag.Model.string_to_class[object_type]
my_obj = c.objects.get_by_shortname(short_name)
my_obj[attribute_name] = new_value
my_obj.save()
return str(my_obj)
def get_map_data(request, host_name=None):
""" Returns a list of (host_name,2d_coords). If host_name is provided, returns a list with only that host """
livestatus = adagios.status.utils.livestatus(request)
all_hosts = livestatus.query('GET hosts', )
hosts_with_coordinates = pynag.Model.Host.objects.filter(
**{'2d_coords__exists': True})
hosts = []
connections = []
for i in all_hosts:
name = i['name']
if host_name in (None, '', name):
# If x does not have any coordinates, break
coords = None
for x in hosts_with_coordinates:
if x.host_name == name:
coords = x['2d_coords']
break
if coords is None:
continue
tmp = coords.split(',')
if len(tmp) != 2:
continue
x, y = tmp
host = {}
host['host_name'] = name
host['state'] = i['state']
i['x_coordinates'] = x
i['y_coordinates'] = y
hosts.append(i)
# For all hosts that have network parents, lets return a proper line for
# those two
for i in hosts:
# Loop through all network parents. If network parent is also in our hostlist
# Then create a connection between the two
for parent in i.get('parents'):
for x in hosts:
if x.get('name') == parent:
connection = {}
connection['parent_x_coordinates'] = x.get('x_coordinates')
connection['parent_y_coordinates'] = x.get('y_coordinates')
connection['child_x_coordinates'] = i.get('x_coordinates')
connection['child_y_coordinates'] = i.get('y_coordinates')
connection['state'] = i.get('state')
connection['description'] = i.get('name')
connections.append(connection)
result = {}
result['hosts'] = hosts
result['connections'] = connections
return result
def change_host_coordinates(host_name, latitude, longitude):
""" Updates longitude and latitude for one specific host """
host = pynag.Model.Host.objects.get_by_shortname(host_name)
coords = "%s,%s" % (latitude, longitude)
host['2d_coords'] = coords
host.save()
def autocomplete(request, q):
""" Returns a list of {'hosts':[], 'hostgroups':[],'services':[]} matching search query q
"""
if q is None:
q = ''
result = {}
hosts = adagios.status.utils.get_hosts(request, host_name__contains=q)
services = adagios.status.utils.get_services(request, service_description__contains=q)
hostgroups = adagios.status.utils.get_hostgroups(request, hostgroup_name__contains=q)
result['hosts'] = sorted(set(map(lambda x: x['name'], hosts)))
result['hostgroups'] = sorted(set(map(lambda x: x['name'], hostgroups)))
result['services'] = sorted(set(map(lambda x: x['description'], services)))
return result
def delete_downtime(downtime_id, is_service=True):
""" Delete one specific downtime with id that matches downtime_id.
Arguments:
downtime_id -- Id of the downtime to be deleted
is_service -- If set to True or 1, then this is assumed to be a service downtime, otherwise assume host downtime
"""
if is_service in (True, 1, '1'):
pynag.Control.Command.del_svc_downtime(downtime_id)
else:
pynag.Control.Command.del_host_downtime(downtime_id)
return "ok"
def top_alert_producers(limit=5, start_time=None, end_time=None):
""" Return a list of ["host_name",number_of_alerts]
Arguments:
limit -- Limit output to top n hosts (default 5)
start_time -- Search log starting with start_time (default since last log rotation)
"""
if start_time == '':
start_time = None
if end_time == '':
end_time = None
l = pynag.Parsers.LogFiles()
log = l.get_state_history(start_time=start_time, end_time=end_time)
top_alert_producers = collections.defaultdict(int)
for i in log:
if 'host_name' in i and 'state' in i and i['state'] > 0:
top_alert_producers[i['host_name']] += 1
top_alert_producers = top_alert_producers.items()
top_alert_producers.sort(cmp=lambda a, b: cmp(a[1], b[1]), reverse=True)
if limit > len(top_alert_producers):
top_alert_producers = top_alert_producers[:int(limit)]
return top_alert_producers
def log_entries(*args, **kwargs):
""" Same as pynag.Parsers.Logfiles().get_log_entries()
Arguments:
start_time -- unix timestamp. if None, return all entries from today
end_time -- If specified, only fetch log entries older than this (unix timestamp)
strict -- If True, only return entries between start_time and end_time, if False,
-- then return entries that belong to same log files as given timeset
search -- If provided, only return log entries that contain this string (case insensitive)
kwargs -- All extra arguments are provided as filter on the log entries. f.e. host_name="localhost"
Returns:
List of dicts
"""
l = pynag.Parsers.LogFiles()
return l.get_log_entries(*args, **kwargs)
def state_history(start_time=None, end_time=None, object_type=None, host_name=None, service_description=None, hostgroup_name=None):
""" Returns a list of dicts, with the state history of hosts and services. Parameters behaves similar to get_log_entries
"""
if start_time == '':
start_time = None
if end_time == '':
end_time = None
if host_name == '':
host_name = None
if service_description == '':
service_description = None
l = pynag.Parsers.LogFiles()
log_entries = l.get_state_history(start_time=start_time, end_time=end_time, host_name=host_name, service_description=service_description)
if object_type == 'host' or object_type == 'service':
pass
elif object_type == 'hostgroup':
hg = pynag.Model.Hostgroup.objects.get_by_shortname(hostgroup_name)
hosts = hg.get_effective_hosts()
hostnames = map(lambda x: x.host_name, hosts)
log_entries = filter(lambda x: x['host_name'] in hostnames, log_entries)
else:
raise Exception(_("Unsupported object type: %s") % object_type)
# Add some css-hints for and duration of each state history entry as percent of duration
# this is used by all views that have state history and on top of it a progress bar which shows
# Up/downtime totals.
c = {'log': log_entries }
if len(c['log']) > 0:
log = c['log']
c['start_time'] = start_time = log[0]['time']
c['end_time'] = log[-1]['time']
now = time.time()
total_duration = now - start_time
css_hint = {}
css_hint[0] = 'success'
css_hint[1] = 'warning'
css_hint[2] = 'danger'
css_hint[3] = 'info'
for i in log:
i['duration_percent'] = 100 * i['duration'] / total_duration
i['bootstrap_status'] = css_hint[i['state']]
return log_entries
def _get_service_model(host_name, service_description=None):
""" Return one pynag.Model.Service object for one specific service as seen
from status point of view. That means it will do its best to return a service
that was assigned to hostgroup but the caller requested a specific host.
Returns:
pynag.Model.Service object
Raises:
KeyError if not found
"""
try:
return pynag.Model.Service.objects.get_by_shortname("%s/%s" % (host_name, service_description))
except KeyError, e:
host = pynag.Model.Host.objects.get_by_shortname(host_name)
for i in host.get_effective_services():
if i.service_description == service_description:
return i
raise e
def command_line(host_name, service_description=None):
""" Returns effective command line for a host or a service (i.e. resolves check_command) """
try:
obj = _get_host_or_service(host_name, service_description)
return obj.get_effective_command_line(host_name=host_name)
except KeyError:
return _("Could not resolve commandline. Object not found")
def _get_host_or_service(host_name, service_description=None):
""" Return a pynag.Model.Host or pynag.Model.Service or raise exception if none are found """
host = pynag.Model.Host.objects.get_by_shortname(host_name)
if not service_description or service_description == '_HOST_':
return host
else:
search_result = pynag.Model.Service.objects.filter(host_name=host_name, service_description=service_description)
if search_result:
return search_result[0]
# If no services were found, the service might be applied to a hostgroup
for service in host.get_effective_services():
if service.service_description == service_description:
return service
raise KeyError(_("Object not found"))
def update_check_command(host_name, service_description=None, **kwargs):
""" Saves all custom variables of a given service
"""
try:
for k, v in kwargs.items():
if service_description is None or service_description == '':
obj = pynag.Model.Host.objects.get_by_shortname(host_name)
else:
obj = pynag.Model.Service.objects.get_by_shortname(
"%s/%s" % (host_name, service_description))
if k.startswith("$_SERVICE") or k.startswith('$ARG') or k.startswith('$_HOST'):
obj.set_macro(k, v)
obj.save()
return _("Object saved")
except KeyError:
raise Exception(_("Object not found"))
def get_business_process_names():
""" Returns all configured business processes
"""
import adagios.businessprocess
return map(lambda x: x.name, adagios.businessprocess.get_all_processes())
def get(request, object_type, *args, **kwargs):
livestatus_arguments = pynag.Utils.grep_to_livestatus(*args, **kwargs)
if not object_type.endswith('s'):
object_type += 's'
if 'name__contains' in kwargs and object_type == 'services':
name = str(kwargs['name__contains'])
livestatus_arguments = filter(
lambda x: x.startswith('name'), livestatus_arguments)
livestatus_arguments.append('Filter: host_name ~ %s' % name)
livestatus_arguments.append('Filter: description ~ %s' % name)
livestatus_arguments.append('Or: 2')
livestatus = adagios.status.utils.livestatus(request)
results = livestatus.query('GET %s' % object_type, *livestatus_arguments)
if object_type == 'service':
for i in results:
i['name'] = i.get('host_name') + "/" + i.get('description')
return results
def get_business_process(process_name=None, process_type=None):
""" Returns a list of all processes in json format.
If process_name is specified, return all sub processes.
"""
import adagios.bi
if not process_name:
processes = adagios.bi.get_all_processes()
else:
process = adagios.bi.get_business_process(str(process_name), process_type)
processes = process.get_processes()
result = []
# Turn processes into nice json
for i in processes:
json = {}
json['state'] = i.get_status()
json['name'] = i.name
json['display_name'] = i.display_name
json['subprocess_count'] = len(i.processes)
json['process_type'] = i.process_type
result.append(json)
return result
def remove_downtime(request, host_name, service_description=None, downtime_id=None):
""" Remove downtime for one specific host or service """
downtimes_to_remove = []
# If downtime_id is not provided, remove all downtimes of that service or host
if downtime_id:
downtimes_to_remove.append(downtime_id)
else:
livestatus = adagios.status.utils.livestatus(request)
query_parameters = list()
query_parameters.append('GET downtimes')
query_parameters.append('Filter: host_name = {host_name}'.format(**locals()))
if service_description:
query_parameters.append('Filter: service_description = {service_description}'.format(**locals()))
result = livestatus.query(*query_parameters)
for i in result:
downtime_id = i['id']
downtimes_to_remove.append(downtime_id)
if service_description:
for i in downtimes_to_remove:
pynag.Control.Command.del_svc_downtime(downtime_id=i)
else:
for i in downtimes_to_remove:
pynag.Control.Command.del_host_downtime(downtime_id=i)
return "ok"
def remove_acknowledgement(host_name, service_description=None):
""" Remove downtime for one specific host or service """
if not service_description:
pynag.Control.Command.remove_host_acknowledgement(host_name=host_name)
else:
pynag.Control.Command.remove_svc_acknowledgement(host_name=host_name, service_description=service_description)
return "ok"
def submit_check_result(request, host_name, service_description=None, autocreate=False, status_code=3, plugin_output=_("No message was entered"), performance_data=""):
""" Submit a passive check_result for a given host or a service
Arguments:
host_name -- Name of the host you want to submit check results for
service_description -- If provided, submit a result for service this service instead of a host
autocreate -- If this is set to True, and host/service does not exist. It will be created
status_code -- Nagios style status for the check (0,1,2,3 which means ok,warning,critical, etc)
plugin_output -- The text output of the check to display in a web interface
performance_data -- Optional, If there are any performance metrics to display
"""
livestatus = adagios.status.utils.livestatus(request)
result = {}
output = plugin_output + " | " + performance_data
if not service_description:
object_type = 'host'
args = pynag.Utils.grep_to_livestatus(host_name=host_name)
objects = livestatus.get_hosts(*args)
else:
object_type = 'service'
args = pynag.Utils.grep_to_livestatus(host_name=host_name, service_description=service_description)
objects = livestatus.get_services(*args)
if not objects and autocreate is True:
raise Exception(_("Autocreate not implemented yet"))
elif not objects:
result['error'] = 'No %s with that name' % object_type
else:
if object_type == 'host':
pynag.Control.Command.process_host_check_result(host_name, status_code, output)
else:
pynag.Control.Command.process_service_check_result(host_name, service_description, status_code, output)
result['message'] = _("Command has been submitted.")
return result
def statistics(request, **kwargs):
""" Returns a dict with various statistics on status data. """
return adagios.status.utils.get_statistics(request, **kwargs)
def metrics(request, **kwargs):
""" Returns a list of dicts which contain service perfdata metrics
"""
result = []
fields = "host_name description perf_data state host_state".split()
services = adagios.status.utils.get_services(request, fields=fields, **kwargs)
for service in services:
metrics = pynag.Utils.PerfData(service['perf_data']).metrics
metrics = filter(lambda x: x.is_valid(), metrics)
for metric in metrics:
metric_dict = {
'host_name': service['host_name'],
'service_description': service['description'],
'state': service['state'],
'host_state': service['host_state'],
'label': metric.label,
'value': metric.value,
'uom': metric.uom,
'warn': metric.warn,
'crit': metric.crit,
'min': metric.min,
'max': metric.max,
}
result.append(metric_dict)
return result
def metric_names(request, **kwargs):
""" Returns the names of all perfdata metrics that match selected request """
metric_names = set()
fields = "host_name description perf_data state host_state".split()
services = adagios.status.utils.get_services(request, fields=fields, **kwargs)
for service in services:
metrics = pynag.Utils.PerfData(service['perf_data']).metrics
metrics = filter(lambda x: x.is_valid(), metrics)
for metric in metrics:
metric_names.add(metric.label)
result = {
'services that match filter': len(services),
'filter': kwargs,
'metric_names': sorted(list(metric_names)),
}
return result
def wait(table, WaitObject, WaitCondition=None, WaitTrigger='check', **kwargs):
print _("Lets wait for"), locals()
if not WaitCondition:
WaitCondition = "last_check > %s" % int(time.time()-1)
livestatus = adagios.status.utils.livestatus(None)
print _("livestatus ok")
result = livestatus.get(table, 'Stats: state != 999', WaitObject=WaitObject, WaitCondition=WaitCondition, WaitTrigger=WaitTrigger, **kwargs)
print _("ok no more waiting for "), WaitObject
return result
def wait_many(hostlist, servicelist, WaitCondition=None, WaitTrigger='check', **kwargs):
if not WaitCondition:
WaitCondition = "last_check > %s" % int(time.time()-1)
livestatus = adagios.status.utils.livestatus(None)
for host in hostlist.split(';'):
if not host:
continue
WaitObject = host
livestatus.get('hosts', WaitObject=WaitObject, WaitCondition=WaitCondition, WaitTrigger=WaitTrigger, **kwargs)
print WaitObject
for service in servicelist.split(';'):
if not service:
continue
WaitObject = service.replace(',', ';')
livestatus.get('services', WaitObject=WaitObject, WaitCondition=WaitCondition, WaitTrigger=WaitTrigger, **kwargs)
print WaitObject
def toggle_backend_visibility(request, backend_name):
""" Toggles a backend in user preferences.
Args:
request: a Django request
backend_name (str): The name of the backend.
"""
user = userdata.User(request)
if not user.disabled_backends:
user.disabled_backends = []
if backend_name in user.disabled_backends:
user.disabled_backends.remove(backend_name)
else:
user.disabled_backends.append(backend_name)
user.save()
########NEW FILE########
__FILENAME__ = adagiostags
# Adagios is a web based Nagios configuration interface
#
# Copyright (C) 2014, Pall Sigurdsson <[email protected]>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import math
from datetime import datetime, timedelta
from django import template
from django.utils.timesince import timesince
from django.utils.translation import ugettext as _
register = template.Library()
@register.filter("timestamp")
def timestamp(value):
try:
return datetime.fromtimestamp(value)
except AttributeError:
return ''
@register.filter("duration")
def duration(value):
""" Used as a filter, returns a human-readable duration.
'value' must be in seconds.
"""
zero = datetime.min
return timesince(zero, zero + timedelta(0, value))
@register.filter("hash")
def hash(h, key):
return h[key]
########NEW FILE########
__FILENAME__ = tests
# -*- coding: utf-8 -*-
#
# Adagios is a web based Nagios configuration interface
#
# Copyright (C) 2014, Pall Sigurdsson <[email protected]>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from django.utils import unittest
from django.test.client import Client
from django.utils.translation import ugettext as _
import pynag.Parsers
import os
from django.test.client import RequestFactory
import adagios.status
import adagios.status.utils
import adagios.status.graphite
import adagios.settings
import adagios.utils
class LiveStatusTestCase(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.nagios_config = adagios.settings.nagios_config
cls.environment = adagios.utils.FakeAdagiosEnvironment()
cls.environment.create_minimal_environment()
cls.environment.configure_livestatus()
cls.environment.update_adagios_global_variables()
cls.environment.start()
cls.livestatus = cls.environment.get_livestatus()
cls.factory = RequestFactory()
@classmethod
def tearDownClass(cls):
cls.environment.terminate()
def testLivestatusConnectivity(self):
requests = self.livestatus.query('GET status', 'Columns: requests')
self.assertEqual(
1, len(requests), _("Could not get status.requests from livestatus"))
def testLivestatusConfigured(self):
config = pynag.Parsers.config(cfg_file=self.nagios_config)
config.parse_maincfg()
for k, v in config.maincfg_values:
if k == "broker_module" and v.find('livestatus') > 1:
tmp = v.split()
self.assertFalse(
len(tmp) < 2, _(' We think livestatus is incorrectly configured. In nagios.cfg it looks like this: %s') % v)
module_file = tmp[0]
socket_file = tmp[1]
self.assertTrue(
os.path.exists(module_file), _(' Livestatus Broker module not found at "%s". Is nagios correctly configured?') % module_file)
self.assertTrue(
os.path.exists(socket_file), _(' Livestatus socket file was not found (%s). Make sure nagios is running and that livestatus module is loaded') % socket_file)
return
self.assertTrue(
False, _('Nagios Broker module not found. Is livestatus installed and configured?'))
def testPageLoad(self):
""" Loads a bunch of status pages, looking for a crash """
self.loadPage('/status/')
self.loadPage('/status/hosts')
self.loadPage('/status/services')
self.loadPage('/status/contacts')
self.loadPage('/status/parents')
self.loadPage('/status/state_history')
self.loadPage('/status/log')
self.loadPage('/status/comments')
self.loadPage('/status/downtimes')
self.loadPage('/status/hostgroups')
self.loadPage('/status/servicegroups')
self.loadPage('/status/map')
self.loadPage('/status/dashboard')
def test_status_detail(self):
""" Tests for /status/detail """
tmp = self.loadPage('/status/detail?contact_name=nagiosadmin')
self.assertTrue('nagiosadmin belongs to the following' in tmp.content)
tmp = self.loadPage('/status/detail?host_name=ok_host')
self.assertTrue('ok_host' in tmp.content)
tmp = self.loadPage('/status/detail?host_name=ok_host&service_description=ok%20service%201')
self.assertTrue('ok_host' in tmp.content)
tmp = self.loadPage('/status/detail?contactgroup_name=admins')
self.assertTrue('nagiosadmin' in tmp.content)
def testStateHistory(self):
request = self.factory.get('/status/state_history')
adagios.status.views.state_history(request)
def loadPage(self, url, expected_status_code=200):
""" Load one specific page, and assert if return code is not 200 """
c = Client()
response = c.get(url)
self.assertEqual(response.status_code, expected_status_code, _("Expected status code %(code)s for page %(url)s") % {'code': expected_status_code, 'url': url})
return response
def testSubmitCommand(self):
""" Test adagios.rest.status.submit_check_results
"""
c = Client()
data = {}
data['host_name'] = 'adagios test host'
data['service_description'] = 'nonexistant'
data['status_code'] = "0"
data['plugin_output'] = 'test message'
data['performance_data'] = ''
response = c.post('/rest/status/json/submit_check_result', data=data)
self.assertEqual(200, response.status_code)
class Graphite(unittest.TestCase):
def test__get_graphite_url(self):
""" Smoketest for adagios.status.graphite._get_graphite_url() """
base = "http://localhost/graphite"
host = "localhost"
service = "Ping"
metric = "packetloss"
from_ = "-1d"
parameters = locals()
parameters.pop('self', None)
result = adagios.status.graphite._get_graphite_url(**parameters)
self.assertTrue(result.startswith(base))
self.assertTrue(host in result)
self.assertTrue(service in result)
self.assertTrue(metric in result)
def test_get(self):
""" Smoketest for adagios.status.graphite.get() """
base = "http://localhost/graphite"
host = "localhost"
service = "Ping"
metrics = ["packetloss", "rta"]
units = [("test", "test", "-1d")]
parameters = locals()
parameters.pop('self', None)
result = adagios.status.graphite.get(**parameters)
self.assertTrue(result)
self.assertTrue(len(result) == 1)
self.assertTrue('rta' in result[0]['metrics'])
self.assertTrue('packetloss' in result[0]['metrics'])
########NEW FILE########
__FILENAME__ = urls
# Adagios is a web based Nagios configuration interface
#
# Copyright (C) 2014, Pall Sigurdsson <[email protected]>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from django.conf.urls.defaults import *
from django.conf import settings
urlpatterns = patterns('adagios',
url(r'^/?$', 'status.views.status_index'),
url(r'^/acknowledgements/?$', 'status.views.acknowledgement_list'),
url(r'^/error/?$', 'status.views.error_page'),
url(r'^/comments/?$', 'status.views.comment_list'),
url(r'^/contacts/?$', 'status.views.contact_list'),
url(r'^/contactgroups/?$', 'status.views.contactgroups'),
url(r'^/dashboard/?$', 'status.views.dashboard'),
url(r'^/detail/?$', 'status.views.detail'),
url(r'^/downtimes/?$', 'status.views.downtime_list'),
url(r'^/hostgroups/?$', 'status.views.status_hostgroups'),
url(r'^/hosts/?$', 'status.views.hosts'),
url(r'^/log/?$', 'status.views.log'),
url(r'^/map/?', 'status.views.map_view'),
url(r'^/parents/?$', 'status.views.network_parents'),
url(r'^/perfdata/?$', 'status.views.perfdata'),
url(r'^/perfdata2/?$', 'status.views.perfdata2'),
url(r'^/problems/?$', 'status.views.problems'),
url(r'^/servicegroups/?$', 'status.views.status_servicegroups'),
url(r'^/services/?$', 'status.views.services'),
url(r'^/state_history/?$', 'status.views.state_history'),
url(r'^/backends/?$', 'status.views.backends'),
# Misc snippets
url(r'^/snippets/log/?$', 'status.views.snippets_log'),
url(r'^/snippets/services/?$', 'status.views.snippets_services'),
url(r'^/snippets/hosts/?$', 'status.views.snippets_hosts'),
# Misc tests
url(r'^/test/services/?$', 'status.views.services_js'),
url(r'^/test/status_dt/?$', 'status.views.status_dt'),
url(r'^/test/livestatus/?$', 'status.views.test_livestatus'),
# Deprecated as of 2013-03-23
url(r'^/contacts/(?P<contact_name>.+)/?$', 'status.views.contact_detail'),
url(r'^/hostgroups/(?P<hostgroup_name>.+)/?$', 'status.views.status_hostgroup'),
url(r'^/contactgroups/(?P<contactgroup_name>.+)/?$', 'status.views.contactgroup_detail'),
url(r'^/servicegroups/(?P<servicegroup_name>.+)/?$', 'status.views.servicegroup_detail'),
url(r'^/services_old/?$', 'status.views.status'),
)
########NEW FILE########
__FILENAME__ = utils
# -*- coding: utf-8 -*-
#
# Adagios is a web based Nagios configuration interface
#
# Copyright (C) 2014, Pall Sigurdsson <[email protected]>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# Utility functions for the status app. These are mostly used by
# adagios.status.views
import pynag.Utils
import pynag.Parsers
import adagios.settings
from adagios.misc.rest import add_notification, clear_notification
import simplejson as json
from collections import defaultdict
from adagios import userdata
state = defaultdict(lambda: "unknown")
state[0] = "ok"
state[1] = "warning"
state[2] = "critical"
def get_all_backends():
# TODO: Properly support multiple instances, using split here is not a good idea
backends = adagios.settings.livestatus_path or ''
backends = backends.split(',')
backends = map(lambda x: x.strip(), backends)
return backends
def livestatus(request):
""" Returns a new pynag.Parsers.mk_livestatus() object with authauser automatically set from request.META['remoteuser']
"""
if request is None:
authuser = None
elif adagios.settings.enable_authorization and not adagios.auth.has_role(request, 'administrators') and not adagios.auth.has_role(request, 'operators'):
authuser = request.META.get('REMOTE_USER', None)
else:
authuser = None
backends = get_all_backends()
# we remove the disabled backends
if backends is not None:
try:
user = userdata.User(request)
if user.disabled_backends is not None:
backends = filter(lambda x: x not in user.disabled_backends, backends)
clear_notification("userdata problem")
except Exception as e:
message = "%s: %s" % (type(e), str(e))
add_notification(level="warning", notification_id="userdata problem", message=message)
livestatus = pynag.Parsers.MultiSite(
nagios_cfg_file=adagios.settings.nagios_config,
livestatus_socket_path=adagios.settings.livestatus_path,
authuser=authuser)
for i in backends:
livestatus.add_backend(path=i, name=i)
return livestatus
def query(request, *args, **kwargs):
""" Wrapper around pynag.Parsers.mk_livestatus().query(). Any authorization logic should be performed here. """
l = livestatus(request)
return l.query(*args, **kwargs)
def get_hostgroups(request, *args, **kwargs):
""" Get a list of hostgroups from mk_livestatus
"""
l = livestatus(request)
return l.get_hostgroups(*args, **kwargs)
def get_hosts(request, tags=None, fields=None, *args, **kwargs):
""" Get a list of hosts from mk_livestatus
This is a wrapper around pynag.Parsers.mk_livestatus().query()
Arguments:
request - Not in use
tags - Not in use
fields - If fields=None, return all columns, otherwise return only the columns provided
Any *args will be passed directly to livestatus
Any **kwargs will be converted to livestatus "'Filter:' style strings
Returns:
A list of dict (hosts)
"""
if 'q' in kwargs:
q = kwargs.get('q')
del kwargs['q']
if not isinstance(q, list):
q = [q]
else:
q = []
# Often search filters include description, which we will skip
kwargs.pop('description', None)
if 'host_state' in kwargs:
kwargs['state'] = kwargs.pop('host_state')
# If keyword "unhandled" is in kwargs, then we will fetch unhandled
# hosts only
if 'unhandled' in kwargs:
del kwargs['unhandled']
kwargs['state'] = 1
kwargs['acknowledged'] = 0
kwargs['scheduled_downtime_depth'] = 0
#kwargs['host_scheduled_downtime_depth'] = 0
#kwargs['host_acknowledged'] = 0
arguments = pynag.Utils.grep_to_livestatus(*args, **kwargs)
# if "q" came in from the querystring, lets filter on host_name
for i in q:
arguments.append('Filter: name ~~ %s' % i)
arguments.append('Filter: address ~~ %s' % i)
arguments.append('Filter: plugin_output ~~ %s' % i)
arguments.append('Or: 3')
if fields is None:
fields = [
'name', 'plugin_output', 'last_check', 'state', 'services', 'services_with_info', 'services_with_state',
'parents', 'childs', 'address', 'last_state_change', 'acknowledged', 'downtimes', 'comments_with_info',
'scheduled_downtime_depth', 'num_services_crit', 'num_services_warn', 'num_services_unknown',
'num_services_ok', 'num_services_pending']
# fields should be a list, lets create a Column: query for livestatus
if isinstance(fields, (str, unicode)):
fields = fields.split(',')
if len(fields) > 0:
argument = 'Columns: %s' % (' '.join(fields))
arguments.append(argument)
l = livestatus(request)
result = l.get_hosts(*arguments)
# Add statistics to every hosts:
for host in result:
try:
host['num_problems'] = host['num_services_crit'] + \
host['num_services_warn'] + host['num_services_unknown']
host['children'] = host['services_with_state']
if host.get('last_state_change') == 0:
host['state'] = 3
host['status'] = state[host['state']]
ok = host.get('num_services_ok')
warn = host.get('num_services_warn')
crit = host.get('num_services_crit')
pending = host.get('num_services_pending')
unknown = host.get('num_services_unknown')
total = ok + warn + crit + pending + unknown
host['total'] = total
host['problems'] = warn + crit + unknown
try:
total = float(total)
host['health'] = float(ok) / total * 100.0
host['percent_ok'] = ok / total * 100
host['percent_warn'] = warn / total * 100
host['percent_crit'] = crit / total * 100
host['percent_unknown'] = unknown / total * 100
host['percent_pending'] = pending / total * 100
except ZeroDivisionError:
host['health'] = 'n/a'
except Exception:
pass
# Sort by host and service status
result.sort(reverse=True, cmp=lambda a, b: cmp(a.get('num_problems'), b.get('num_problems')))
result.sort(reverse=True, cmp=lambda a, b: cmp(a.get('state'), b.get('state')))
return result
def get_services(request=None, tags=None, fields=None, *args, **kwargs):
""" Get a list of services from mk_livestatus.
This is a wrapper around pynag.Parsers.mk_livestatus().query()
Arguments:
requests - Not in use
tags - List of 'tags' that will be passed on as a filter to the services.
Example of service tags are: problem, unhandled, ishandled,
fields - If fields=None, return all columns, otherwise return only the columns provided.
fields can be either a list or a comma seperated string
Any *args will be passed directly to livestatus
Any **kwargs passed in will be converted to livestatus 'Filter:' strings
Examples:
get_services(host_name='localhost') # same as livestatus.query('GET services','Filter: host_name = localhost')
get_services('Authuser: admin', host_name='localhost')
"""
if 'q' in kwargs:
q = kwargs.get('q')
del kwargs['q']
else:
q = []
if not isinstance(q, list):
q = [q]
# If keyword "unhandled" is in kwargs, then we will fetch unhandled
# services only
if 'unhandled' in kwargs:
del kwargs['unhandled']
kwargs['state__isnot'] = 0
kwargs['acknowledged'] = 0
kwargs['scheduled_downtime_depth'] = 0
kwargs['host_scheduled_downtime_depth'] = 0
kwargs['host_acknowledged'] = 0
kwargs['host_state'] = 0
arguments = pynag.Utils.grep_to_livestatus(*args, **kwargs)
# If q was added, it is a fuzzy filter on services
for i in q:
arguments.append('Filter: host_name ~~ %s' % i)
arguments.append('Filter: description ~~ %s' % i)
arguments.append('Filter: plugin_output ~~ %s' % i)
arguments.append('Filter: host_address ~~ %s' % i)
arguments.append('Or: 4')
if fields is None:
fields = [
'host_name', 'description', 'plugin_output', 'last_check', 'host_state', 'state', 'scheduled_downtime_depth',
'last_state_change', 'acknowledged', 'downtimes', 'host_downtimes', 'comments_with_info']
# fields should be a list, lets create a Column: query for livestatus
if isinstance(fields, (str, unicode)):
fields = fields.split(',')
if len(fields) > 0:
argument = 'Columns: %s' % (' '.join(fields))
arguments.append(argument)
l = livestatus(request)
result = l.get_services(*arguments)
# Add custom tags to our service list
try:
for service in result:
# Tag the service with tags such as problems and unhandled
service_tags = []
if service['state'] != 0:
service_tags.append('problem')
service_tags.append('problems')
if service['acknowledged'] == 0 and service['downtimes'] == [] and service['host_downtimes'] == []:
service_tags.append('unhandled')
service['unhandled'] = "unhandled"
else:
service_tags.append('ishandled')
service['handled'] = "handled"
elif service.get('last_state_change') == 0:
service['state'] = 3
service_tags.append('pending')
else:
service_tags.append('ok')
if service['acknowledged'] == 1:
service_tags.append('acknowledged')
if service['downtimes'] != []:
service_tags.append('downtime')
service['tags'] = ' '.join(service_tags)
service['status'] = state[service['state']]
if isinstance(tags, str):
tags = [tags]
if isinstance(tags, list):
result = pynag.Utils.grep(result, tags__contains=tags)
except Exception:
pass
return result
def get_contacts(request, *args, **kwargs):
l = livestatus(request)
return l.get_contacts(*args, **kwargs)
def get_contactgroups(request, *args, **kwargs):
l = livestatus(request)
return l.get_contactgroups(*args, **kwargs)
def get_statistics(request, *args, **kwargs):
""" Return a list of dict. That contains various statistics from mk_livestatus (like service totals and host totals)
"""
c = {}
l = livestatus(request)
arguments = pynag.Utils.grep_to_livestatus(*args, **kwargs)
# Get service totals as an array of [ok,warn,crit,unknown]
c['service_totals'] = l.get_services(
'Stats: state = 0',
'Stats: state = 1',
'Stats: state = 2',
'Stats: state = 3',
*arguments
) or [0, 0, 0, 0]
# Get host totals as an array of [up,down,unreachable]
c['host_totals'] = l.get_hosts(
'Stats: state = 0',
'Stats: state = 1',
'Stats: state = 2',
*arguments
) or [0, 0, 0]
# Get total number of host/ host_problems
c['total_hosts'] = sum(c['host_totals'])
c['total_host_problems'] = c['total_hosts'] - c['host_totals'][0]
# Get total number of services/ service_problems
c['total_services'] = sum(c['service_totals'])
c['total_service_problems'] = c['total_services'] - c['service_totals'][0]
# Calculate percentage of hosts/services that are "ok"
try:
c['service_totals_percent'] = map(lambda x: float(100.0 * x / c['total_services']), c['service_totals'])
except ZeroDivisionError:
c['service_totals_percent'] = [0, 0, 0, 0]
try:
c['host_totals_percent'] = map(lambda x: float(100.0 * x / c['total_hosts']), c['host_totals'])
except ZeroDivisionError:
c['host_totals_percent'] = [0, 0, 0, 0]
unhandled_services = l.get_services(
'Stats: state > 0',
acknowledged=0,
scheduled_downtime_depth=0,
host_state=0,
*arguments
) or [0]
unhandled_hosts = l.get_hosts(
'Stats: state = 1',
acknowledged=0,
scheduled_downtime_depth=0,
*arguments
) or [0]
c['unhandled_services'] = unhandled_services[0]
c['unhandled_hosts'] = unhandled_hosts[0]
total_unhandled_network_problems = l.get_hosts(
'Filter: childs != ',
'Stats: state = 1',
acknowledged=0,
scheduled_downtime_depth=0,
*arguments
) or [0]
c['total_unhandled_network_problems'] = total_unhandled_network_problems[0]
tmp = l.get_hosts(
'Filter: childs != ',
'Stats: state >= 0',
'Stats: state > 0',
*arguments
) or [0, 0]
c['total_network_parents'], c['total_network_problems'] = tmp
return c
def grep_to_livestatus(object_type, *args, **kwargs):
""" Take querystring parameters from django request object, and returns list of livestatus queries
Should support both hosts and services.
It does minimal support for views have hosts and services in same view and user wants to
enter some querystring parameters for both.
"""
result = []
for key in kwargs:
if hasattr(kwargs, 'getlist'):
values = kwargs.getlist(key)
else:
values = [kwargs.get(key)]
if object_type == 'host' and key.startswith('service_'):
continue
if object_type == 'host' and key == 'description':
continue
if object_type == 'host' and key in ('host_scheduled_downtime_depth', 'host_acknowledged', 'host_state'):
key = key[len('host_'):]
if object_type == 'service' and key in ('service_state', 'service_description'):
key = key[len('service_'):]
if object_type == 'service' and key == 'unhandled':
tmp = {}
tmp['state__isnot'] = 0
tmp['acknowledged'] = 0
tmp['scheduled_downtime_depth'] = 0
tmp['host_scheduled_downtime_depth'] = 0
tmp['host_acknowledged'] = 0
tmp['host_state'] = 0
result += pynag.Utils.grep_to_livestatus(**kwargs)
elif object_type == 'host' and key == 'unhandled':
tmp = {}
tmp['state__isnot'] = 0
tmp['acknowledged'] = 0
tmp['scheduled_downtime_depth'] = 0
elif object_type == 'host' and key == 'q':
for i in values:
result.append('Filter: name ~~ %s' % i)
result.append('Filter: address ~~ %s' % i)
result.append('Filter: plugin_output ~~ %s' % i)
result.append('Or: 3')
elif object_type == 'service' and key == 'q':
for i in values:
result.append('Filter: host_name ~~ %s' % i)
result.append('Filter: description ~~ %s' % i)
result.append('Filter: plugin_output ~~ %s' % i)
result.append('Filter: host_address ~~ %s' % i)
result.append('Or: 4')
else:
for value in values:
result += pynag.Utils.grep_to_livestatus(**{key: value})
return list(args) + result
########NEW FILE########
__FILENAME__ = views
# -*- coding: utf-8 -*-
#
# Adagios is a web based Nagios configuration interface
#
# Copyright (C) 2010, Pall Sigurdsson <[email protected]>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from django.http import HttpResponse
import time
from os.path import dirname
from collections import defaultdict
import json
import traceback
from django.shortcuts import render_to_response, redirect
from django.template import RequestContext
from django.utils.encoding import smart_str
from django.core.context_processors import csrf
from django.utils.translation import ugettext as _
import pynag.Model
import pynag.Utils
import pynag.Control
import pynag.Plugins
import pynag.Model.EventHandlers
from pynag.Parsers import ParserError
import adagios.settings
from adagios.pnp.functions import run_pnp
from adagios.status import utils
import adagios.status.rest
import adagios.status.forms
import adagios.businessprocess
from django.core.urlresolvers import reverse
from adagios.status import graphite
state = defaultdict(lambda: "unknown")
state[0] = "ok"
state[1] = "warning"
state[2] = "critical"
from adagios.views import adagios_decorator, error_page
@adagios_decorator
def detail(request):
""" Return status detail view for a single given host, hostgroup,service, contact, etc """
host_name = request.GET.get('host_name')
service_description = request.GET.get('service_description')
contact_name = request.GET.get('contact_name')
hostgroup_name = request.GET.get('hostgroup_name')
contactgroup_name = request.GET.get('contactgroup_name')
servicegroup_name = request.GET.get('servicegroup_name')
if service_description:
return service_detail(request, host_name=host_name, service_description=service_description)
elif host_name:
return host_detail(request, host_name=host_name)
elif contact_name:
return contact_detail(request, contact_name=contact_name)
elif contactgroup_name:
return contactgroup_detail(request, contactgroup_name=contactgroup_name)
elif hostgroup_name:
return hostgroup_detail(request, hostgroup_name=hostgroup_name)
elif servicegroup_name:
return servicegroup_detail(request, servicegroup_name=servicegroup_name)
raise Exception(_("You have to provide an item via querystring so we know what to give you details for"))
@adagios_decorator
def status_parents(request):
""" Here for backwards compatibility """
return network_parents(request)
@adagios_decorator
def network_parents(request):
""" List of hosts that are network parents """
c = {}
c['messages'] = []
authuser = request.GET.get('contact_name', None)
livestatus = utils.livestatus(request)
fields = "name childs state scheduled_downtime_depth address last_check last_state_change acknowledged downtimes services services_with_info".split()
hosts = utils.get_hosts(request, 'Filter: childs !=', fields=fields, **request.GET)
host_dict = {}
map(lambda x: host_dict.__setitem__(x['name'], x), hosts)
c['hosts'] = []
for i in hosts:
if i['childs']:
c['hosts'].append(i)
ok = 0
crit = 0
i['child_hosts'] = []
for x in i['childs']:
i['child_hosts'].append(host_dict[x])
if host_dict[x]['state'] == 0:
ok += 1
else:
crit += 1
total = float(len(i['childs']))
i['health'] = float(ok) / total * 100.0
i['percent_ok'] = ok / total * 100
i['percent_crit'] = crit / total * 100
return render_to_response('status_parents.html', c, context_instance=RequestContext(request))
@adagios_decorator
def status(request):
""" Compatibility layer around status.views.services
"""
# return render_to_response('status.html', c, context_instance=RequestContext(request))
# Left here for compatibility reasons:
return services(request)
@adagios_decorator
def services(request):
""" This view handles list of services """
c = {}
c['messages'] = []
c['errors'] = []
fields = [
'host_name', 'description', 'plugin_output', 'last_check', 'host_state', 'state',
'last_state_change', 'acknowledged', 'downtimes', 'host_downtimes', 'comments_with_info']
c['services'] = utils.get_services(request, fields=fields, **request.GET)
return render_to_response('status_services.html', c, context_instance=RequestContext(request))
@adagios_decorator
def services_js(request):
""" This view handles list of services """
c = {}
c['messages'] = []
c['errors'] = []
fields = [
'host_name', 'description', 'plugin_output', 'last_check', 'host_state', 'state',
'last_state_change', 'acknowledged', 'downtimes', 'host_downtimes', 'comments_with_info']
c['services'] = json.dumps(utils.get_services(request, fields=fields, **request.GET))
return render_to_response('status_services_js.html', c, context_instance=RequestContext(request))
@adagios_decorator
def status_dt(request):
""" This view handles list of services """
c = {}
return render_to_response('status_dt.html', c, context_instance=RequestContext(request))
@adagios_decorator
def snippets_services(request):
""" Returns a html stub with only the services view """
c = {}
c['messages'] = []
c['errors'] = []
fields = [
'host_name', 'description', 'plugin_output', 'last_check', 'host_state', 'state',
'last_state_change', 'acknowledged', 'downtimes', 'host_downtimes', 'comments_with_info']
c['services'] = utils.get_services(request, fields=fields, **request.GET)
return render_to_response('snippets/status_servicelist_snippet.html', c, context_instance=RequestContext(request))
@adagios_decorator
def snippets_hosts(request):
c = {}
c['messages'] = []
c['errors'] = []
c['hosts'] = utils.get_hosts(request, **request.GET)
c['host_name'] = request.GET.get('detail', None)
return render_to_response('snippets/status_hostlist_snippet.html', c, context_instance=RequestContext(request))
@adagios_decorator
def snippets_log(request):
""" Returns a html stub with the snippet_statehistory_snippet.html
"""
host_name = request.GET.get('host_name')
service_description = request.GET.get('service_description')
hostgroup_name = request.GET.get('hostgroup_name')
if service_description == "_HOST_":
service_description = None
l = pynag.Parsers.LogFiles(maincfg=adagios.settings.nagios_config)
log = l.get_state_history(host_name=host_name, service_description=service_description)
# If hostgroup_name was specified, lets get all log entries that belong to that hostgroup
if host_name and service_description:
object_type = 'service'
elif hostgroup_name:
object_type = "hostgroup"
hg = pynag.Model.Hostgroup.objects.get_by_shortname(hostgroup_name)
hosts = hg.get_effective_hosts()
hostnames = map(lambda x: x.host_name, hosts)
log = filter(lambda x: x['host_name'] in hostnames, log)
elif host_name:
object_type = "host"
else:
raise Exception(_("Need either a host_name or hostgroup_name parameter"))
c = {'log':log}
c['object_type'] = object_type
# Create some state history progress bar from our logs:
if len(c['log']) > 0:
log = c['log']
c['start_time'] = start_time = log[0]['time']
c['end_time'] = end_time = log[-1]['time']
now = time.time()
total_duration = now - start_time
state_hist = []
start = start_time
last_item = None
css_hint = {}
css_hint[0] = 'success'
css_hint[1] = 'warning'
css_hint[2] = 'danger'
css_hint[3] = 'unknown'
for i in log:
i['duration_percent'] = 100 * i['duration'] / total_duration
i['bootstrap_status'] = css_hint[i['state']]
return render_to_response('snippets/status_statehistory_snippet.html', locals(), context_instance=RequestContext(request))
@adagios_decorator
def host_detail(request, host_name):
""" Return status detail view for a single host """
return service_detail(request, host_name=host_name, service_description=None)
@adagios_decorator
def service_detail(request, host_name, service_description):
""" Displays status details for one host or service """
c = {}
c['messages'] = []
c['errors'] = []
livestatus = utils.livestatus(request)
backend = request.GET.get('backend')
c['pnp_url'] = adagios.settings.pnp_url
c['nagios_url'] = adagios.settings.nagios_url
c['request'] = request
now = time.time()
seconds_in_a_day = 60 * 60 * 24
seconds_passed_today = now % seconds_in_a_day
today = now - seconds_passed_today # midnight of today
try:
c['host'] = my_host = livestatus.get_host(host_name, backend)
my_host['object_type'] = 'host'
my_host['short_name'] = my_host['name']
except IndexError:
c['errors'].append(_("Could not find any host named '%s'") % host_name)
return error_page(request, c)
if service_description is None:
tmp = request.GET.get('service_description')
if tmp is not None:
return service_detail(request, host_name, service_description=tmp)
primary_object = my_host
c['service_description'] = '_HOST_'
#c['log'] = pynag.Parsers.LogFiles(maincfg=adagios.settings.nagios_config).get_state_history(
# host_name=host_name, service_description=None)
else:
try:
c['service'] = my_service = livestatus.get_service(
host_name, service_description, backend=backend)
my_service['object_type'] = 'service'
c['service_description'] = service_description
my_service['short_name'] = "%s/%s" % (
my_service['host_name'], my_service['description'])
primary_object = my_service
#c['log'] = pynag.Parsers.LogFiles(maincfg=adagios.settings.nagios_config).get_state_history(
# host_name=host_name, service_description=service_description)
except IndexError:
c['errors'].append(
_("Could not find any service named '%s'") % service_description)
return error_page(request, c)
c['my_object'] = primary_object
c['object_type'] = primary_object['object_type']
# Friendly statusname (i.e. turn 2 into "critical")
primary_object['status'] = state[primary_object['state']]
# Plugin longoutput comes to us with special characters escaped. lets undo
# that:
primary_object['long_plugin_output'] = primary_object[
'long_plugin_output'].replace('\\n', '\n')
# Service list on the sidebar should be sorted
my_host['services_with_info'] = sorted(
my_host.get('services_with_info', []))
c['host_name'] = host_name
perfdata = primary_object['perf_data']
perfdata = pynag.Utils.PerfData(perfdata)
for i, datum in enumerate(perfdata.metrics):
datum.i = i
try:
datum.status = state[datum.get_status()]
except pynag.Utils.PynagError:
datum.status = state[3]
c['perfdata'] = perfdata.metrics
# Get a complete list of network parents
try:
c['network_parents'] = reversed(_get_network_parents(request, host_name))
except Exception, e:
c['errors'].append(e)
# Lets get some graphs
try:
tmp = run_pnp("json", host=host_name)
tmp = json.loads(tmp)
except Exception, e:
tmp = []
c['pnp4nagios_error'] = e
c['graph_urls'] = tmp
if adagios.settings.enable_graphite:
metrics = [x.label for x in perfdata.metrics]
service = c['service_description'].replace(' ', '_')
c['graphite'] = graphite.get(adagios.settings.graphite_url,
c['host_name'],
service,
metrics,
adagios.settings.GRAPHITE_PERIODS,
)
# used in the General tab - preview
for graph in c['graphite']:
if graph['css_id'] == adagios.settings.GRAPHITE_DEFAULT_TAB:
default = {}
for k,v in graph['metrics'].items():
default[k] = v
c['graphite_default'] = default
return render_to_response('status_detail.html', c, context_instance=RequestContext(request))
def _get_network_parents(request, host_name):
""" Returns a list of hosts that are network parents (or grandparents) to host_name
Every item in the list is a host dictionary from mk_livestatus
Returns:
List of lists
Example:
_get_network_parents('remotehost.example.com')
[
['gateway.example.com', 'mod_gearman.example.com'],
['localhost'],
]
"""
result = []
backend = request.GET.get('backend', None)
livestatus = adagios.status.utils.livestatus(request)
if isinstance(host_name, unicode):
host_name = smart_str(host_name)
if isinstance(host_name, str):
host = livestatus.get_host(host_name, backend)
elif isinstance(host_name, dict):
host = host_name
else:
raise KeyError(
'host_name must be str or dict (got %s)' % type(host_name))
parent_names = host['parents']
while len(parent_names) > 0:
parents = map(lambda x: livestatus.get_host(x, backend), parent_names)
# generate a list of grandparent names:
grand_parents = set()
for i in parents:
map(lambda x: grand_parents.add(x), i.get('parents'))
result.append(parents)
parent_names = list(grand_parents)
return result
@adagios_decorator
def hostgroup_detail(request, hostgroup_name):
""" Status detail for one specific hostgroup """
c = {}
c['messages'] = []
c['errors'] = []
c['hostgroup_name'] = hostgroup_name
c['object_type'] = 'hostgroup'
livestatus = adagios.status.utils.livestatus(request)
my_hostgroup = pynag.Model.Hostgroup.objects.get_by_shortname(
hostgroup_name)
c['my_hostgroup'] = livestatus.get_hostgroups(
'Filter: name = %s' % hostgroup_name)[0]
_add_statistics_to_hostgroups([c['my_hostgroup']])
# Get information about child hostgroups
subgroups = my_hostgroup.hostgroup_members or ''
subgroups = subgroups.split(',')
if subgroups == ['']:
subgroups = []
c['hostgroups'] = map(lambda x: livestatus.get_hostgroups('Filter: name = %s' % x)[0], subgroups)
_add_statistics_to_hostgroups(c['hostgroups'])
return render_to_response('status_hostgroup.html', c, context_instance=RequestContext(request))
def _add_statistics_to_hostgroups(hostgroups):
""" Enriches a list of hostgroup dicts with information about subgroups and parentgroups
"""
# Lets establish a good list of all hostgroups and parentgroups
all_hostgroups = pynag.Model.Hostgroup.objects.all
all_subgroups = set() # all hostgroups that belong in some other hostgroup
# "subgroup":['master1','master2']
hostgroup_parentgroups = defaultdict(set)
hostgroup_childgroups = pynag.Model.ObjectRelations.hostgroup_hostgroups
for hostgroup, subgroups in hostgroup_childgroups.items():
map(lambda x: hostgroup_parentgroups[x].add(hostgroup), subgroups)
for i in hostgroups:
i['child_hostgroups'] = hostgroup_childgroups[i['name']]
i['parent_hostgroups'] = hostgroup_parentgroups[i['name']]
# Extra statistics for our hostgroups
for hg in hostgroups:
ok = hg.get('num_services_ok')
warn = hg.get('num_services_warn')
crit = hg.get('num_services_crit')
pending = hg.get('num_services_pending')
unknown = hg.get('num_services_unknown')
total = ok + warn + crit + pending + unknown
hg['total'] = total
hg['problems'] = warn + crit + unknown
try:
total = float(total)
hg['health'] = float(ok) / total * 100.0
hg['health'] = float(ok) / total * 100.0
hg['percent_ok'] = ok / total * 100
hg['percent_warn'] = warn / total * 100
hg['percent_crit'] = crit / total * 100
hg['percent_unknown'] = unknown / total * 100
hg['percent_pending'] = pending / total * 100
except ZeroDivisionError:
pass
@adagios_decorator
def status_servicegroups(request):
c = {}
c['messages'] = []
c['errors'] = []
servicegroup_name = None
livestatus = utils.livestatus(request)
servicegroups = livestatus.get_servicegroups()
c['servicegroup_name'] = servicegroup_name
c['request'] = request
c['servicegroups'] = servicegroups
return render_to_response('status_servicegroups.html', c, context_instance=RequestContext(request))
@adagios_decorator
def status_hostgroups(request):
c = {}
c['messages'] = []
c['errors'] = []
hostgroup_name = None
livestatus = utils.livestatus(request)
hostgroups = livestatus.get_hostgroups()
c['hostgroup_name'] = hostgroup_name
c['request'] = request
# Lets establish a good list of all hostgroups and parentgroups
all_hostgroups = pynag.Model.Hostgroup.objects.all
all_subgroups = set() # all hostgroups that belong in some other hostgroup
# "subgroup":['master1','master2']
hostgroup_parentgroups = defaultdict(set)
hostgroup_childgroups = pynag.Model.ObjectRelations.hostgroup_hostgroups
for hostgroup, subgroups in hostgroup_childgroups.items():
map(lambda x: hostgroup_parentgroups[x].add(hostgroup), subgroups)
for i in hostgroups:
i['child_hostgroups'] = hostgroup_childgroups[i['name']]
i['parent_hostgroups'] = hostgroup_parentgroups[i['name']]
if hostgroup_name is None:
# If no hostgroup was specified. Lets only show "root hostgroups"
c['hosts'] = livestatus.get_hosts()
my_hostgroups = []
for i in hostgroups:
if len(i['parent_hostgroups']) == 0:
my_hostgroups.append(i)
my_hostgroups.sort()
c['hostgroups'] = my_hostgroups
else:
my_hostgroup = pynag.Model.Hostgroup.objects.get_by_shortname(
hostgroup_name)
subgroups = my_hostgroup.hostgroup_members or ''
subgroups = subgroups.split(',')
# Strip out any group that is not a subgroup of hostgroup_name
right_hostgroups = []
for group in hostgroups:
if group.get('name', '') in subgroups:
right_hostgroups.append(group)
c['hostgroups'] = right_hostgroups
# If a hostgroup was specified lets also get all the hosts for it
c['hosts'] = livestatus.query(
'GET hosts', 'Filter: host_groups >= %s' % hostgroup_name)
for host in c['hosts']:
ok = host.get('num_services_ok')
warn = host.get('num_services_warn')
crit = host.get('num_services_crit')
pending = host.get('num_services_pending')
unknown = host.get('num_services_unknown')
total = ok + warn + crit + pending + unknown
host['total'] = total
host['problems'] = warn + crit + unknown
try:
total = float(total)
host['health'] = float(ok) / total * 100.0
host['percent_ok'] = ok / total * 100
host['percent_warn'] = warn / total * 100
host['percent_crit'] = crit / total * 100
host['percent_unknown'] = unknown / total * 100
host['percent_pending'] = pending / total * 100
except ZeroDivisionError:
host['health'] = 'n/a'
# Extra statistics for our hostgroups
for hg in c['hostgroups']:
ok = hg.get('num_services_ok')
warn = hg.get('num_services_warn')
crit = hg.get('num_services_crit')
pending = hg.get('num_services_pending')
unknown = hg.get('num_services_unknown')
total = ok + warn + crit + pending + unknown
hg['total'] = total
hg['problems'] = warn + crit + unknown
try:
total = float(total)
hg['health'] = float(ok) / total * 100.0
hg['health'] = float(ok) / total * 100.0
hg['percent_ok'] = ok / total * 100
hg['percent_warn'] = warn / total * 100
hg['percent_crit'] = crit / total * 100
hg['percent_unknown'] = unknown / total * 100
hg['percent_pending'] = pending / total * 100
except ZeroDivisionError:
pass
return render_to_response('status_hostgroups.html', c, context_instance=RequestContext(request))
@adagios_decorator
def status_host(request):
""" Here for backwards compatibility """
return hosts(request)
@adagios_decorator
def hosts(request):
c = {}
c['messages'] = []
c['errors'] = []
c['hosts'] = utils.get_hosts(request, **request.GET)
c['host_name'] = request.GET.get('detail', None)
return render_to_response('status_host.html', c, context_instance=RequestContext(request))
@adagios_decorator
def problems(request):
c = {}
c['messages'] = []
c['errors'] = []
search_filter = request.GET.copy()
if 'state__isnot' not in search_filter and 'state' not in search_filter:
search_filter['state__isnot'] = '0'
c['hosts'] = utils.get_hosts(request, **search_filter)
c['services'] = utils.get_services(request, **search_filter)
return render_to_response('status_problems.html', c, context_instance=RequestContext(request))
def get_related_objects(object_id):
my_object = pynag.Model.ObjectDefinition.objects.get_by_id(object_id)
result = []
if my_object.register == '0':
result += my_object.get_effective_children()
return result
if my_object.object_type == 'hostgroup':
result += my_object.get_effective_hostgroups()
result += my_object.get_effective_hosts()
if my_object.object_type == 'contactgroup':
result += my_object.get_effective_contactgroups()
result += my_object.get_effective_contacts()
if my_object.object_type == 'host':
result += my_object.get_effective_network_children()
result += my_object.get_effective_services()
return result
def _add_statistics_to_hosts(hosts):
""" Takes a list of dict hosts, and adds to the list statistics
Following is an example of attributes added to the dicts:
num_services_ok
num_services_warn
problems (number of problems)
health (percent of services ok)
percent_problems
"""
for host in hosts:
ok = host.get('num_services_ok')
warn = host.get('num_services_warn')
crit = host.get('num_services_crit')
pending = host.get('num_services_pending')
unknown = host.get('num_services_unknown')
total = ok + warn + crit + pending + unknown
host['total'] = total
host['problems'] = warn + crit + unknown
host['num_problems'] = warn + crit + unknown
try:
total = float(total)
host['health'] = float(ok) / total * 100.0
host['percent_ok'] = ok / total * 100
host['percent_warn'] = warn / total * 100
host['percent_crit'] = crit / total * 100
host['percent_unknown'] = unknown / total * 100
host['percent_pending'] = pending / total * 100
except ZeroDivisionError:
host['health'] = 'n/a'
host['percent_ok'] = 0
host['percent_warn'] = 0
host['percent_crit'] = 0
host['percent_unknown'] = 0
host['percent_pending'] = 0
@adagios_decorator
def status_index(request):
c = adagios.status.utils.get_statistics(request)
c['services'] = adagios.status.utils.get_services(request, 'unhandled')
#c['top_alert_producers'] = adagios.status.rest.top_alert_producers(limit=5)
return render_to_response('status_index.html', c, context_instance=RequestContext(request))
@adagios_decorator
def test_livestatus(request):
""" This view is a test on top of mk_livestatus which allows you to enter your own queries """
c = {}
c['messages'] = []
c['table'] = table = request.GET.get('table')
livestatus = adagios.status.utils.livestatus(request)
if table is not None:
columns = livestatus.query('GET columns', 'Filter: table = %s' % table)
c['columns'] = columns
columns = ""
limit = request.GET.get('limit')
run_query = False
for k, v in request.GET.items():
if k == "submit":
run_query = True
if k.startswith('check_'):
columns += " " + k[len("check_"):]
# Any columns checked means we return a query
query = ['GET %s' % table]
if len(columns) > 0:
query.append("Columns: %s" % columns)
if limit != '' and limit > 0:
query.append("Limit: %s" % limit)
if run_query is True:
c['results'] = livestatus.query(*query)
c['query'] = livestatus.last_query
c['header'] = c['results'][0].keys()
return render_to_response('test_livestatus.html', c, context_instance=RequestContext(request))
def _status_combined(request, optimized=False):
""" Returns a combined status of network outages, host problems and service problems
If optimized is True, fewer attributes are loaded it, makes it run faster but with less data
"""
c = {}
livestatus = adagios.status.utils.livestatus(request)
if optimized == True:
hosts = livestatus.get_hosts(
'Columns: name state acknowledged downtimes childs parents')
services = livestatus.get_services(
'Columns: host_name description state acknowledged downtimes host_state')
else:
hosts = livestatus.get_hosts()
services = livestatus.get_services()
hosts_that_are_down = []
hostnames_that_are_down = []
service_status = [0, 0, 0, 0]
host_status = [0, 0, 0, 0]
parents = []
for host in hosts:
host_status[host["state"]] += 1
if len(host['childs']) > 0:
parents.append(host)
if host['state'] != 0 and host['acknowledged'] == 0 and host['downtimes'] == []:
hostnames_that_are_down.append(host['name'])
hosts_that_are_down.append(host)
network_problems = []
host_problems = []
service_problems = []
# Do nothing if host parent is also down.
for host in hosts_that_are_down:
for i in host['parents']:
if i in hostnames_that_are_down:
break
else:
if len(host['childs']) == 0:
host_problems.append(host)
else:
network_problems.append(host)
for service in services:
service_status[service["state"]] += 1
if service['state'] != 0 and service['acknowledged'] == 0 and len(service['downtimes']) == 0 and service['host_state'] == 0:
service_problems.append(service)
c['network_problems'] = network_problems
c['host_problems'] = host_problems
c['service_problems'] = service_problems
c['hosts'] = hosts
c['services'] = services
c['parents'] = parents
service_totals = float(sum(service_status))
host_totals = float(sum(host_status))
if service_totals == 0:
c['service_status'] = 0
else:
c['service_status'] = map(
lambda x: 100 * x / service_totals, service_status)
if host_totals == 0:
c['host_status'] = 0
else:
c['host_status'] = map(lambda x: 100 * x / host_totals, host_status)
#l = pynag.Parsers.LogFiles(maincfg=adagios.settings.nagios_config)
#c['log'] = reversed(l.get_state_history())
return c
@adagios_decorator
def status_problems(request):
return dashboard(request)
@adagios_decorator
def dashboard(request):
# Get statistics
c = adagios.status.utils.get_statistics(request)
c['messages'] = []
c['errors'] = []
c['host_problems'] = utils.get_hosts(request, state='1', unhandled='', **request.GET)
# Service problems
c['service_problems'] = utils.get_services(request, host_state="0", unhandled='', **request.GET)
# Sort problems by state and last_check as secondary sort field
c['service_problems'].sort(
reverse=True, cmp=lambda a, b: cmp(a['last_check'], b['last_check']))
c['service_problems'].sort(
reverse=True, cmp=lambda a, b: cmp(a['state'], b['state']))
return render_to_response('status_dashboard.html', c, context_instance=RequestContext(request))
@adagios_decorator
def state_history(request):
c = {}
c['messages'] = []
c['errors'] = []
livestatus = adagios.status.utils.livestatus(request)
start_time = request.GET.get('start_time', None)
end_time = request.GET.get('end_time', None)
if end_time is None:
end_time = time.time()
end_time = int(float(end_time))
if start_time is None:
seconds_in_a_day = 60 * 60 * 24
seconds_today = end_time % seconds_in_a_day # midnight of today
start_time = end_time - seconds_today
start_time = int(start_time)
l = pynag.Parsers.LogFiles(maincfg=adagios.settings.nagios_config)
c['log'] = log = l.get_state_history(start_time=start_time, end_time=end_time,strict=False)
total_duration = end_time - start_time
c['total_duration'] = total_duration
css_hint = {}
css_hint[0] = 'success'
css_hint[1] = 'warning'
css_hint[2] = 'danger'
css_hint[3] = 'info'
last_item = None
services = {}
search_filter = request.GET.copy()
search_filter.pop('start_time', None)
search_filter.pop('end_time', None)
search_filter.pop('start_time_picker', None)
search_filter.pop('start_hours', None)
search_filter.pop('end_time_picker', None)
search_filter.pop('end_hours', None)
search_filter.pop('submit', None)
log = pynag.Utils.grep(log, **search_filter)
for i in log:
short_name = "%s/%s" % (i['host_name'], i['service_description'])
if short_name not in services:
s = {}
s['host_name'] = i['host_name']
s['service_description'] = i['service_description']
s['log'] = []
s['worst_logfile_state'] = 0
#s['log'] = [{'time':start_time,'state':3, 'plugin_output':'Unknown value here'}]
services[short_name] = s
services[short_name]['log'].append(i)
services[short_name]['worst_logfile_state'] = max(
services[short_name]['worst_logfile_state'], i['state'])
for service in services.values():
last_item = None
service['sla'] = float(0)
service['num_problems'] = 0
service['duration'] = 0
for i in service['log']:
i['bootstrap_status'] = css_hint[i['state']]
if i['time'] < start_time:
i['time'] = start_time
if last_item is not None:
last_item['end_time'] = i['time']
#last_item['time'] = max(last_item['time'], start_time)
last_item['duration'] = duration = last_item[
'end_time'] - last_item['time']
last_item['duration_percent'] = 100 * float(
duration) / total_duration
service['duration'] += last_item['duration_percent']
if last_item['state'] == 0:
service['sla'] += last_item['duration_percent']
else:
service['num_problems'] += 1
last_item = i
if not last_item is None:
last_item['end_time'] = end_time
last_item['duration'] = duration = last_item[
'end_time'] - last_item['time']
last_item['duration_percent'] = 100 * duration / total_duration
service['duration'] += last_item['duration_percent']
if last_item['state'] == 0:
service['sla'] += last_item['duration_percent']
else:
service['num_problems'] += 1
c['services'] = services
c['start_time'] = start_time
c['end_time'] = end_time
return render_to_response('state_history.html', c, context_instance=RequestContext(request))
def _status_log(request):
""" Helper function to any status view that requires log access """
c = {}
c['messages'] = []
c['errors'] = []
start_time = request.GET.get('start_time', '')
end_time = request.GET.get('end_time', '')
host_name = request.GET.get('host_name', '')
service_description = request.GET.get('service_description', '')
limit = request.GET.get('limit', '')
if end_time == '':
end_time = None
else:
end_time = float(end_time)
if start_time == '':
now = time.time()
seconds_in_a_day = 60 * 60 * 24
seconds_today = now % seconds_in_a_day # midnight of today
start_time = now - seconds_today
else:
start_time = float(start_time)
if limit == '':
limit = 2000
else:
limit = int(limit)
# Any querystring parameters we will treat as a search string to get_log_entries, but we need to massage them
# a little bit first
kwargs = {}
for k, v in request.GET.items():
if k == 'search':
k = 'search'
elif k in (
'start_time', 'end_time', 'start_time_picker', 'end_time_picker', 'limit',
'start_hours', 'end_hours'):
continue
elif v is None or len(v) == 0:
continue
k = str(k)
v = str(v)
kwargs[k] = v
l = pynag.Parsers.LogFiles(maincfg=adagios.settings.nagios_config)
c['log'] = l.get_log_entries(
start_time=start_time, end_time=end_time, **kwargs)[-limit:]
c['log'].reverse()
c['logs'] = {'all': []}
for line in c['log']:
if line['class_name'] not in c['logs'].keys():
c['logs'][line['class_name']] = []
c['logs'][line['class_name']].append(line)
c['logs']['all'].append(line)
c['start_time'] = start_time
c['end_time'] = end_time
return c
@adagios_decorator
def log(request):
c = _status_log(request)
c['request'] = request
c['log'].reverse()
return render_to_response('status_log.html', c, context_instance=RequestContext(request))
@adagios_decorator
def comment_list(request):
""" Display a list of all comments """
c = {}
c['messages'] = []
c['errors'] = []
l = adagios.status.utils.livestatus(request)
args = pynag.Utils.grep_to_livestatus(**request.GET)
c['comments'] = l.query('GET comments', *args)
return render_to_response('status_comments.html', c, context_instance=RequestContext(request))
@adagios_decorator
def downtime_list(request):
""" Display a list of all comments """
c = {}
c['messages'] = []
c['errors'] = []
l = adagios.status.utils.livestatus(request)
args = pynag.Utils.grep_to_livestatus(**request.GET)
c['downtimes'] = l.query('GET downtimes', *args)
return render_to_response('status_downtimes.html', c, context_instance=RequestContext(request))
@adagios_decorator
def acknowledgement_list(request):
""" Display a list of all comments """
c = {}
c['messages'] = []
c['errors'] = []
l = adagios.status.utils.livestatus(request)
args = pynag.Utils.grep_to_livestatus(**request.GET)
c['acknowledgements'] = l.query('GET comments', 'Filter: entry_type = 4', *args)
return render_to_response('status_acknowledgements.html', c, context_instance=RequestContext(request))
@adagios_decorator
def perfdata(request):
""" Display a list of perfdata
"""
c = {}
c['messages'] = []
c['errors'] = []
fields = "host_name description perf_data state host_state scheduled_downtime_depth host_scheduled_downtime_depth host_acknowledged acknowledged downtimes host_downtimes".split()
perfdata = utils.get_services(request, fields=fields, **request.GET)
for i in perfdata:
metrics = pynag.Utils.PerfData(i['perf_data']).metrics
metrics = filter(lambda x: x.is_valid(), metrics)
i['metrics'] = metrics
c['perfdata'] = perfdata
return render_to_response('status_perfdata.html', c, context_instance=RequestContext(request))
@adagios_decorator
def contact_list(request):
""" Display a list of active contacts
"""
c = {}
c['messages'] = []
c['errors'] = []
c['contacts'] = adagios.status.utils.get_contacts(request, **request.GET)
return render_to_response('status_contacts.html', c, context_instance=RequestContext(request))
@adagios_decorator
def contact_detail(request, contact_name):
""" Detailed information for one specific contact
"""
c = {}
c['messages'] = []
c['errors'] = []
c['contact_name'] = contact_name
l = adagios.status.utils.livestatus(request)
backend = request.GET.get('backend', None)
# Fetch contact and basic information
try:
contact = l.get_contact(contact_name, backend)
c['contact'] = contact
except IndexError:
raise Exception("Contact named '%s' was not found." % contact_name)
# Active comments
c['comments'] = l.query(
'GET comments', 'Filter: comment ~ %s' % contact_name,)
for i in c['comments']:
if i.get('type') == 1:
i['state'] = i['host_state']
else:
i['state'] = i['service_state']
# Services this contact can see
c['services'] = l.query(
'GET services', "Filter: contacts >= %s" % contact_name)
# Activity log
c['log'] = pynag.Parsers.LogFiles(
maincfg=adagios.settings.nagios_config).get_log_entries(search=str(contact_name))
# Contact groups
c['groups'] = l.query(
'GET contactgroups', 'Filter: members >= %s' % contact_name)
# Git audit logs
nagiosdir = dirname(adagios.settings.nagios_config or pynag.Model.config.guess_cfg_file())
git = pynag.Utils.GitRepo(directory=nagiosdir)
c['gitlog'] = git.log(author_name=contact_name)
return render_to_response('status_contact.html', c, context_instance=RequestContext(request))
@adagios_decorator
def map_view(request):
c = {}
livestatus = adagios.status.utils.livestatus(request)
c['hosts'] = livestatus.get_hosts()
c['map_center'] = adagios.settings.map_center
c['map_zoom'] = adagios.settings.map_zoom
return render_to_response('status_map.html', c, context_instance=RequestContext(request))
@adagios_decorator
def servicegroup_detail(request, servicegroup_name):
""" Detailed information for one specific servicegroup """
c = {}
c['messages'] = []
c['errors'] = []
c['servicegroup_name'] = servicegroup_name
search_conditions = request.GET.copy()
search_conditions.pop('servicegroup_name')
c['services'] = adagios.status.utils.get_services(request, groups__has_field=servicegroup_name, **search_conditions)
return render_to_response('status_servicegroup.html', c, context_instance=RequestContext(request))
@adagios_decorator
def contactgroups(request):
""" Display a list of active contacts
"""
c = {}
c['messages'] = []
c['errors'] = []
l = adagios.status.utils.livestatus(request)
c['contactgroups'] = l.get_contactgroups(**request.GET)
return render_to_response('status_contactgroups.html', c, context_instance=RequestContext(request))
@adagios_decorator
def contactgroup_detail(request, contactgroup_name):
""" Detailed information for one specific contactgroup
"""
c = {}
c['messages'] = []
c['errors'] = []
c['contactgroup_name'] = contactgroup_name
l = adagios.status.utils.livestatus(request)
# Fetch contact and basic information
result = l.query("GET contactgroups", "Filter: name = %s" %
contactgroup_name)
if result == []:
c['errors'].append(
"Contactgroup named '%s' was not found." % contactgroup_name)
else:
contactgroup = result[0]
c['contactgroup'] = contactgroup
# Services this contact can see
c['services'] = l.query(
'GET services', "Filter: contact_groups >= %s" % contactgroup_name)
# Services this contact can see
c['hosts'] = l.query(
'GET hosts', "Filter: contact_groups >= %s" % contactgroup_name)
# Contact groups
#c['contacts'] = l.query('GET contacts', 'Filter: contactgroup_ >= %s' % contact_name)
return render_to_response('status_contactgroup.html', c, context_instance=RequestContext(request))
@adagios_decorator
def perfdata2(request):
""" Just a test method, feel free to remove it
"""
c = {}
c['messages'] = []
c['errors'] = []
columns = 'Columns: host_name description perf_data state host_state'
l = adagios.status.utils.livestatus(request)
# User can specify from querystring a filter of which services to fetch
# we convert querystring into livestatus filters.
# User can also specify specific metrics to watch, so we extract from
# querystring as well
querystring = request.GET.copy()
interesting_metrics = querystring.pop('metrics', [''])[0].strip(',')
arguments = pynag.Utils.grep_to_livestatus(**querystring)
if not arguments:
services = []
else:
services = l.query('GET services', columns, *arguments)
# If no metrics= was specified on querystring, we take the string
# from first service in our search result
if not interesting_metrics and services:
metric_set = set()
for i in services:
perfdata = pynag.Utils.PerfData(i.get('perf_data', ''))
map(lambda x: metric_set.add(x.label), perfdata.metrics)
interesting_metrics = sorted(list(metric_set))
else:
interesting_metrics = interesting_metrics.split(',')
# Iterate through all the services and parse perfdata
for service in services:
perfdata = pynag.Utils.PerfData(service['perf_data'])
null_metric = pynag.Utils.PerfDataMetric()
metrics = map(lambda x: perfdata.get_perfdatametric(
x) or null_metric, interesting_metrics)
#metrics = filter(lambda x: x.is_valid(), metrics)
service['metrics'] = metrics
c['metrics'] = interesting_metrics
c['services'] = services
return render_to_response('status_perfdata2.html', c, context_instance=RequestContext(request))
def acknowledge(request):
""" Acknowledge
"""
if request.method != 'POST':
raise Exception("Only use POST to this url")
sticky = request.POST.get('sticky', 1)
persistent = request.POST.get('persistent', 0)
author = request.META.get('REMOTE_USER', 'anonymous')
comment = request.POST.get('comment', 'acknowledged by Adagios')
hostlist = request.POST.getlist('host', [])
servicelist = request.POST.getlist('service', [])
@adagios_decorator
def status_hostgroup(request, hostgroup_name):
""" Here for backwards compatibility """
return hostgroup_detail(request, hostgroup_name=hostgroup_name)
@adagios_decorator
def status_detail(request):
""" Here for backwards compatibility """
return detail(request)
@adagios_decorator
def backends(request):
""" Display a list of available backends and their connection status """
livestatus = adagios.status.utils.livestatus(request)
backends = livestatus.get_backends()
for i, v in backends.items():
v.test(raise_error=False)
return render_to_response('status_backends.html', locals(), context_instance=RequestContext(request))
########NEW FILE########
__FILENAME__ = urls
# Adagios is a web based Nagios configuration interface
#
# Copyright (C) 2014, Pall Sigurdsson <[email protected]>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from django.conf.urls.defaults import *
from django.conf import settings
from django.views.static import serve
# Uncomment the next two lines to enable the admin:
# from django.contrib import admin
# admin.autodiscover()
urlpatterns = patterns('',
# Example:
url(r'^$', 'adagios.views.index', name="home"),
url(r'^media/(?P<path>.*)$', 'django.views.static.serve', {'document_root': settings.MEDIA_ROOT}, name="media"),
url(r'^403', 'adagios.views.http_403'),
url(r'^objectbrowser', include('adagios.objectbrowser.urls')),
url(r'^misc', include('adagios.misc.urls')),
url(r'^pnp', include('adagios.pnp.urls')),
url(r'^media(?P<path>.*)$', serve, {'document_root': settings.MEDIA_ROOT }),
url(r'^rest', include('adagios.rest.urls')),
url(r'^contrib', include('adagios.contrib.urls')),
# Uncomment the admin/doc line below to enable admin documentation:
# (r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
# (r'^admin/', include(admin.site.urls)),
# Internationalization
url(r'^jsi18n/$', 'django.views.i18n.javascript_catalog'),
)
########NEW FILE########
__FILENAME__ = userdata
# -*- coding: utf-8 -*-
#
# Adagios is a web based Nagios configuration interface
#
# Copyright (C) 2014, Matthieu Caneill <[email protected]>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import os
import json
import collections
import settings
class User(object):
""" Handles authentified users, provides preferences management. """
def __init__(self, request, autosave=False):
""" Instantiates one user's preferences.
Args:
request (Request): The incoming Django request.
Kwargs:
autosave (bool): if True, preferences are automatically saved.
"""
self._request = request
self._autosave = autosave
try:
self._username = request.META.get('REMOTE_USER', 'anonymous')
except Exception:
self._username = 'anonymous'
self._conffile = self._get_prefs_location()
self._check_path(self._conffile)
# sets the preferences as attributes:
for k, v in self._get_conf().iteritems():
self.__dict__[k] = v
def _check_path(self, path):
""" Checks the userdata folder, try to create it if it doesn't
exist."""
folder = os.path.dirname(path)
# does the folder exist?
if not os.path.isdir(folder):
try:
os.makedirs(folder)
except:
raise Exception("Folder %s can't be created. Be sure Adagios "
"has write access on its parent." % folder)
def _get_prefs_location(self):
""" Returns the location of the preferences file of the
specified user. """
try:
user_prefs_path = settings.USER_PREFS_PATH
except:
raise Exception('You must define USER_PREFS_PATH in settings.py')
return os.path.join(user_prefs_path, self._username + '.json')
def _get_default_conf(self):
try:
d = settings.PREFS_DEFAULT
except:
d = dict()
return d
def _get_conf(self):
""" Returns the json preferences for the specified user. """
try:
with open(self._conffile) as f:
conf = json.loads(f.read())
except IOError:
conf = self._get_default_conf()
except ValueError:
conf = self._get_default_conf()
return conf
def __getattr__(self, name):
""" Provides None as a default value. """
if name not in self.__dict__.keys():
return None
return self.__dict__[name]
def __setattr__(self, name, value):
""" Saves the preferences if autosave is set. """
self.__dict__[name] = value
if self._autosave and not name.startswith('_'):
self.save()
def set_pref(self, name, value):
""" Explicitly sets a user preference. """
self.__dict__[name] = value
def to_dict(self):
d = {}
for k in filter(lambda x: not(x.startswith('_')), self.__dict__.keys()):
d[k] = self.__dict__[k]
return d
def save(self):
""" Saves the preferences in JSON format. """
d = self.to_dict()
try:
with open(self._conffile, 'w') as f:
f.write(json.dumps(d))
except IOError:
raise Exception("Couldn't write settings into file %s. Be sure to "
"have write permissions on the parent folder."
% self._conffile)
self.trigger_hooks()
def trigger_hooks(self):
""" Triggers the hooks when preferences are changed. """
# language preference
from django.utils import translation
try:
self._request.session['django_language'] = self.language
# newer versions of Django: s/django_language/_language
translation.activate(self.language)
except Exception as e:
pass
########NEW FILE########
__FILENAME__ = utils
#!/usr/bin/env python
#
# Adagios is a web based Nagios configuration interface
#
# Copyright (C) 2014, Pall Sigurdsson <[email protected]>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import multiprocessing
import adagios.status.utils
import time
import adagios
import pynag.Model
import adagios.exceptions
import adagios.settings
import os
import pynag.Utils.misc
from django.utils.translation import ugettext as _
def wait(object_type, WaitObject, WaitCondition, WaitTrigger, **kwargs):
livestatus = adagios.status.utils.livestatus(None)
livestatus.get(object_type, WaitObject=WaitObject, WaitCondition=WaitCondition, WaitTrigger=WaitTrigger, **kwargs)
print WaitObject
def wait_for_objects(object_type, object_list, condition=None, trigger='check'):
if not condition:
condition = "last_check > %s" % int(0)
callback = lambda x: wait(object_type, WaitObject=x, WaitCondition=condition, WaitTrigger=trigger)
for WaitObject in object_list:
callback(WaitObject)
def wait_for_service(host_name, service_description, condition='last_check >= 0', trigger='check'):
livestatus = adagios.status.utils.livestatus(None)
waitobject = "%s;%s" % (host_name, service_description)
livestatus.get_services(
host_name=host_name,
service_description=service_description,
WaitCondition=condition,
WaitObject=waitobject
)
from multiprocessing.pool import ThreadPool
class Task(object):
def __init__(self, num_processes=5):
self._tasks = []
adagios.tasks.append(self)
self._pool = ThreadPool(processes=num_processes)
def add(self, function, *args, **kwargs):
print "Adding Task:", locals()
result = self._pool.apply_async(function, args, kwargs)
self._tasks.append(result)
#print result.get()
def status(self):
all_tasks = self._tasks
for i in all_tasks:
print i.ready()
completed_tasks = filter(lambda x: x.ready(), all_tasks)
return "{done}/{total} done.".format(done=len(completed_tasks), total=len(all_tasks))
def get_id(self):
return hash(self)
def ready(self):
""" Returns True if all the Tasks in this class have finished running. """
return max(map(lambda x: x.ready(), self._tasks))
def update_eventhandlers(request):
""" Iterates through all pynag eventhandler and informs them who might be making a change
"""
remote_user = request.META.get('REMOTE_USER', 'anonymous')
for i in pynag.Model.eventhandlers:
i.modified_by = remote_user
# if okconfig is installed, make sure okconfig is notified of git
# settings
try:
from pynag.Utils import GitRepo
import okconfig
okconfig.git = GitRepo(directory=os.path.dirname(
adagios.settings.nagios_config), auto_init=False, author_name=remote_user)
except Exception:
pass
def get_available_themes():
""" Returns a tuple with the name of themes that are available in media/theme directory """
theme_dir = os.path.join(adagios.settings.MEDIA_ROOT, adagios.settings.THEMES_FOLDER)
result = []
for root, dirs, files in os.walk(theme_dir):
if adagios.settings.THEME_ENTRY_POINT in files:
result.append(os.path.basename(root))
return result
def reload_config_file(adagios_configfile=None):
""" Reloads adagios.conf and populates updates adagios.settings accordingly.
Args:
adagios_configfile: Full path to adagios.conf. If None then use settings.adagios_configfile
"""
if not adagios_configfile:
adagios_configfile = adagios.settings.adagios_configfile
# Using execfile might not be optimal outside strict settings.py usage, but
# lets do things exactly like settings.py does it.
execfile(adagios_configfile)
config_values = locals()
adagios.settings.__dict__.update(config_values)
class FakeAdagiosEnvironment(pynag.Utils.misc.FakeNagiosEnvironment):
_adagios_settings_copy = None
def __init__(self, *args, **kwargs):
super(FakeAdagiosEnvironment, self).__init__(*args, **kwargs)
def update_adagios_global_variables(self):
""" Updates common adagios.settings to point to a temp directory.
If you are are doing unit tests which require specific changes, feel free to update
adagios.settings manually after calling this method.
"""
self._adagios_settings_copy = adagios.settings.__dict__.copy()
adagios.settings.adagios_configfile = self.adagios_config_file
adagios.settings.USER_PREFS_PATH = self.adagios_config_dir + "/userdata"
adagios.settings.nagios_config = self.cfg_file
adagios.settings.livestatus_path = self.livestatus_socket_path
reload_config_file(self.adagios_config_file)
def restore_adagios_global_variables(self):
""" Restores adagios.settings so it looks like before update_adagios_global_variables() was called
"""
adagios.settings.__dict__.clear()
adagios.settings.__dict__.update(self._adagios_settings_copy)
def create_minimal_environment(self):
""" Behaves like FakeNagiosEnvironment except also creates adagios config directory """
super(FakeAdagiosEnvironment, self).create_minimal_environment()
self.adagios_config_dir = os.path.join(self.tempdir, 'adagios')
self.adagios_config_file = os.path.join(self.adagios_config_dir, 'adagios.conf')
os.makedirs(self.adagios_config_dir)
with open(self.adagios_config_file, 'w') as f:
f.write('')
def terminate(self):
""" Behaves like FakeNagiosEnvironment except also restores adagios.settings module """
if self._adagios_settings_copy:
self.restore_adagios_global_variables()
super(FakeAdagiosEnvironment, self).terminate()
########NEW FILE########
__FILENAME__ = views
# Adagios is a web based Nagios configuration interface
#
# Copyright (C) 2014, Pall Sigurdsson <[email protected]>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from django.http import HttpResponse
import traceback
from django.shortcuts import render_to_response, redirect
from django.template import RequestContext, loader
from django import template
from django.utils.translation import ugettext as _
import time
import logging
import adagios.settings
import adagios.utils
from adagios.exceptions import AccessDenied
def adagios_decorator(view_func):
""" This is a python decorator intented for all views in the status module.
It catches all unhandled exceptions and displays them on a generic web page.
Kind of what the django exception page does when debug mode is on.
"""
def wrapper(request, *args, **kwargs):
start_time = time.time()
try:
if request.method == 'POST':
adagios.utils.update_eventhandlers(request)
result = view_func(request, *args, **kwargs)
end_time = time.time()
time_now = time.ctime()
duration = end_time - start_time
return result
except Exception, e:
c = {}
c['exception'] = str(e)
c['exception_type'] = str(type(e).__name__)
c['traceback'] = traceback.format_exc()
return error_page(request, context=c)
wrapper.__name__ = view_func.__name__
wrapper.__module__ = view_func.__module__
return wrapper
def error_page(request, context=None):
if context is None:
context = {}
context['errors'] = []
context['errors'].append('Error occured, but no error messages provided, what happened?')
if request.META.get('CONTENT_TYPE') == 'application/json':
context.pop('request', None)
content = str(context)
response = HttpResponse(content=content, content_type='application/json')
else:
response = render_to_response('status_error.html', context, context_instance=RequestContext(request))
response.status_code = 500
return response
def index(request):
""" This view is our frontpage """
# If status view is enabled, redirect to frontpage of the status page:
if adagios.settings.enable_status_view:
return redirect('adagios.status.views.status_index', permanent=True)
else:
return redirect('objectbrowser', permanent=True)
def http_403(request, exception=None):
context = {}
context['exception'] = exception
if request.META.get('CONTENT_TYPE') == 'application/json':
c = {}
c['exception_type'] = exception.__class__
c['message'] = str(exception.message)
c['access_required'] = exception.access_required
response = HttpResponse(content=str(c), content_type='application/json')
else:
response = render_to_response('403.html', context, context_instance=RequestContext(request))
response.status_code = 403
return response
########NEW FILE########
__FILENAME__ = wsgi
# Adagios is a web based Nagios configuration interface
#
# Copyright (C) 2014, Pall Sigurdsson <[email protected]>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import os
os.environ['DJANGO_SETTINGS_MODULE'] = 'adagios.settings'
import django.core.handlers.wsgi
application = django.core.handlers.wsgi.WSGIHandler()
########NEW FILE########
__FILENAME__ = static_businessprocess
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Adagios is a web based Nagios configuration interface
#
# Copyright (C) 2014, Pall Sigurdsson <[email protected]>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
static_businessprocesses .. This script loads a business process and staticly writes html view for it
"""
#source_template = "/usr/lib/python2.6/site-packages/adagios/status/templates/business_process_view.html"
source_template = "/etc/adagios/pages.d/bi_process.html"
destination_directory = "/var/www/iceland.adagios.org"
pnp_parameters = "&graph_width=350&graph_height=30"
import os
os.environ['DJANGO_SETTINGS_MODULE'] = "adagios.settings"
import simplejson as json
from django.shortcuts import render
from django import template
from django.test.client import Client
from optparse import OptionParser
import adagios.bi
import django.http
from adagios.pnp.functions import run_pnp
# Start by parsing some arguments
parser = OptionParser(usage="usage: %prog [options]", version="%prog 1.0")
parser.add_option('--all', help="Parse all business processes", dest="all", action="store_true", default=False)
parser.add_option('--graphs', help="", dest="graphs", action="store_true", default=False)
parser.add_option('--destination', help="destination to write static html into", dest="destination", default=destination_directory)
parser.add_option('--source-template', help="Source template used to render business processes", dest="source", default=source_template)
parser.add_option('--verbose', help="verbose output", dest="verbose", action="store_true", default=False)
(options, args) = parser.parse_args()
def verbose(message):
if options.verbose:
print message
def businessprocess_to_html(process_name, process_type='businessprocess'):
bp = adagios.bi.get_business_process(process_name=process_name, process_type=process_type)
verbose("Rendering business process %s" % bp.name)
c = {}
c['bp'] = bp
c['csrf_token'] = ''
c['graphs_url'] = "graphs.json"
c['static'] = True
directory = "%s/%s" % (options.destination, bp.name)
if not os.path.exists(directory):
os.makedirs(directory)
if options.graphs:
graphs = bi_graphs_to_json(process_name, process_type)
for i in graphs:
url = i.get('image_url')
client = Client()
verbose("Saving image %s" % url)
image = client.get("/pnp/image?%s&%s" % (url, pnp_parameters)).content
graph_filename = "%s/%s.png" % (directory, url)
open(graph_filename, 'w').write(image)
graph_json_file = "%s/graphs.json" % (directory)
for i in graphs:
i['image_url'] = i['image_url'] + '.png'
graph_json = json.dumps(graphs, indent=4)
open(graph_json_file, 'w').write(graph_json)
content = open(options.source, 'r').read()
t = template.Template(content)
c = template.Context(c)
html = t.render(c)
destination_file = "%s/index.html" % directory
open(destination_file, 'w').write(html.encode('utf-8'))
def bi_graphs_to_json(process_name, process_type='businessprocess'):
c = {}
c['messages'] = []
c['errors'] = []
bp = adagios.bi.get_business_process(process_name=process_name, process_type=process_type)
graphs = []
if not bp.graphs:
return []
for graph in bp.graphs or []:
if graph.get('graph_type') == 'pnp':
host_name = graph.get('host_name')
service_description = graph.get('service_description')
metric_name = graph.get('metric_name')
pnp_result = run_pnp('json', host=graph.get('host_name'), srv=graph.get('service_description'))
json_data = json.loads(pnp_result)
for i in json_data:
if i.get('ds_name') == graph.get('metric_name'):
notes = graph.get('notes')
last_value = bp.get_pnp_last_value(host_name, service_description, metric_name)
i['last_value'] = last_value
i['notes'] = notes
graphs.append(i)
return graphs
if options.all:
processlist = adagios.bi.get_all_process_names()
else:
processlist = args
if not processlist:
parser.error("Either provide business process name or specify --all")
for i in processlist:
print "doing ", i
businessprocess_to_html(i)
########NEW FILE########
| [
"[email protected]"
]
| |
a1f9641676acef26b8880cb6b32dc2290e304628 | d47b058c8e2d7509aea5e005f76fcf5d7fbff444 | /testing/test_collection.py | 5a1e9a0521ebde6683b2b11139ad9566d5cb4852 | [
"MIT"
]
| permissive | geraldoandradee/pytest | 51296c2736ca38accee875f62c57127f3cd1e3a8 | 41ab848fa6716b35aa5f8eb92972e6a9721016d8 | refs/heads/master | 2020-04-09T20:41:54.392752 | 2013-10-10T22:01:56 | 2013-10-10T22:01:56 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 21,224 | py | import pytest, py
from _pytest.main import Session
class TestCollector:
def test_collect_versus_item(self):
from pytest import Collector, Item
assert not issubclass(Collector, Item)
assert not issubclass(Item, Collector)
def test_compat_attributes(self, testdir, recwarn):
modcol = testdir.getmodulecol("""
def test_pass(): pass
def test_fail(): assert 0
""")
recwarn.clear()
assert modcol.Module == pytest.Module
assert modcol.Class == pytest.Class
assert modcol.Item == pytest.Item
assert modcol.File == pytest.File
assert modcol.Function == pytest.Function
def test_check_equality(self, testdir):
modcol = testdir.getmodulecol("""
def test_pass(): pass
def test_fail(): assert 0
""")
fn1 = testdir.collect_by_name(modcol, "test_pass")
assert isinstance(fn1, pytest.Function)
fn2 = testdir.collect_by_name(modcol, "test_pass")
assert isinstance(fn2, pytest.Function)
assert fn1 == fn2
assert fn1 != modcol
if py.std.sys.version_info < (3, 0):
assert cmp(fn1, fn2) == 0
assert hash(fn1) == hash(fn2)
fn3 = testdir.collect_by_name(modcol, "test_fail")
assert isinstance(fn3, pytest.Function)
assert not (fn1 == fn3)
assert fn1 != fn3
for fn in fn1,fn2,fn3:
assert fn != 3
assert fn != modcol
assert fn != [1,2,3]
assert [1,2,3] != fn
assert modcol != fn
def test_getparent(self, testdir):
modcol = testdir.getmodulecol("""
class TestClass:
def test_foo():
pass
""")
cls = testdir.collect_by_name(modcol, "TestClass")
fn = testdir.collect_by_name(
testdir.collect_by_name(cls, "()"), "test_foo")
parent = fn.getparent(pytest.Module)
assert parent is modcol
parent = fn.getparent(pytest.Function)
assert parent is fn
parent = fn.getparent(pytest.Class)
assert parent is cls
def test_getcustomfile_roundtrip(self, testdir):
hello = testdir.makefile(".xxx", hello="world")
testdir.makepyfile(conftest="""
import pytest
class CustomFile(pytest.File):
pass
def pytest_collect_file(path, parent):
if path.ext == ".xxx":
return CustomFile(path, parent=parent)
""")
node = testdir.getpathnode(hello)
assert isinstance(node, pytest.File)
assert node.name == "hello.xxx"
nodes = node.session.perform_collect([node.nodeid], genitems=False)
assert len(nodes) == 1
assert isinstance(nodes[0], pytest.File)
class TestCollectFS:
def test_ignored_certain_directories(self, testdir):
tmpdir = testdir.tmpdir
tmpdir.ensure("_darcs", 'test_notfound.py')
tmpdir.ensure("CVS", 'test_notfound.py')
tmpdir.ensure("{arch}", 'test_notfound.py')
tmpdir.ensure(".whatever", 'test_notfound.py')
tmpdir.ensure(".bzr", 'test_notfound.py')
tmpdir.ensure("normal", 'test_found.py')
for x in tmpdir.visit("test_*.py"):
x.write("def test_hello(): pass")
result = testdir.runpytest("--collect-only")
s = result.stdout.str()
assert "test_notfound" not in s
assert "test_found" in s
def test_custom_norecursedirs(self, testdir):
testdir.makeini("""
[pytest]
norecursedirs = mydir xyz*
""")
tmpdir = testdir.tmpdir
tmpdir.ensure("mydir", "test_hello.py").write("def test_1(): pass")
tmpdir.ensure("xyz123", "test_2.py").write("def test_2(): 0/0")
tmpdir.ensure("xy", "test_ok.py").write("def test_3(): pass")
rec = testdir.inline_run()
rec.assertoutcome(passed=1)
rec = testdir.inline_run("xyz123/test_2.py")
rec.assertoutcome(failed=1)
class TestCollectPluginHookRelay:
def test_pytest_collect_file(self, testdir):
wascalled = []
class Plugin:
def pytest_collect_file(self, path, parent):
wascalled.append(path)
testdir.makefile(".abc", "xyz")
pytest.main([testdir.tmpdir], plugins=[Plugin()])
assert len(wascalled) == 1
assert wascalled[0].ext == '.abc'
def test_pytest_collect_directory(self, testdir):
wascalled = []
class Plugin:
def pytest_collect_directory(self, path, parent):
wascalled.append(path.basename)
testdir.mkdir("hello")
testdir.mkdir("world")
pytest.main(testdir.tmpdir, plugins=[Plugin()])
assert "hello" in wascalled
assert "world" in wascalled
class TestPrunetraceback:
def test_collection_error(self, testdir):
p = testdir.makepyfile("""
import not_exists
""")
result = testdir.runpytest(p)
assert "__import__" not in result.stdout.str(), "too long traceback"
result.stdout.fnmatch_lines([
"*ERROR collecting*",
"*mport*not_exists*"
])
def test_custom_repr_failure(self, testdir):
p = testdir.makepyfile("""
import not_exists
""")
testdir.makeconftest("""
import pytest
def pytest_collect_file(path, parent):
return MyFile(path, parent)
class MyError(Exception):
pass
class MyFile(pytest.File):
def collect(self):
raise MyError()
def repr_failure(self, excinfo):
if excinfo.errisinstance(MyError):
return "hello world"
return pytest.File.repr_failure(self, excinfo)
""")
result = testdir.runpytest(p)
result.stdout.fnmatch_lines([
"*ERROR collecting*",
"*hello world*",
])
@pytest.mark.xfail(reason="other mechanism for adding to reporting needed")
def test_collect_report_postprocessing(self, testdir):
p = testdir.makepyfile("""
import not_exists
""")
testdir.makeconftest("""
import pytest
def pytest_make_collect_report(__multicall__):
rep = __multicall__.execute()
rep.headerlines += ["header1"]
return rep
""")
result = testdir.runpytest(p)
result.stdout.fnmatch_lines([
"*ERROR collecting*",
"*header1*",
])
class TestCustomConftests:
def test_ignore_collect_path(self, testdir):
testdir.makeconftest("""
def pytest_ignore_collect(path, config):
return path.basename.startswith("x") or \
path.basename == "test_one.py"
""")
sub = testdir.mkdir("xy123")
sub.ensure("test_hello.py").write("syntax error")
sub.join("conftest.py").write("syntax error")
testdir.makepyfile("def test_hello(): pass")
testdir.makepyfile(test_one="syntax error")
result = testdir.runpytest("--fulltrace")
assert result.ret == 0
result.stdout.fnmatch_lines(["*1 passed*"])
def test_ignore_collect_not_called_on_argument(self, testdir):
testdir.makeconftest("""
def pytest_ignore_collect(path, config):
return True
""")
p = testdir.makepyfile("def test_hello(): pass")
result = testdir.runpytest(p)
assert result.ret == 0
assert "1 passed" in result.stdout.str()
result = testdir.runpytest()
assert result.ret == 0
assert "1 passed" not in result.stdout.str()
def test_collectignore_exclude_on_option(self, testdir):
testdir.makeconftest("""
collect_ignore = ['hello', 'test_world.py']
def pytest_addoption(parser):
parser.addoption("--XX", action="store_true", default=False)
def pytest_configure(config):
if config.getvalue("XX"):
collect_ignore[:] = []
""")
testdir.mkdir("hello")
testdir.makepyfile(test_world="def test_hello(): pass")
result = testdir.runpytest()
assert result.ret == 0
assert "passed" not in result.stdout.str()
result = testdir.runpytest("--XX")
assert result.ret == 0
assert "passed" in result.stdout.str()
def test_pytest_fs_collect_hooks_are_seen(self, testdir):
conf = testdir.makeconftest("""
import pytest
class MyModule(pytest.Module):
pass
def pytest_collect_file(path, parent):
if path.ext == ".py":
return MyModule(path, parent)
""")
sub = testdir.mkdir("sub")
p = testdir.makepyfile("def test_x(): pass")
result = testdir.runpytest("--collect-only")
result.stdout.fnmatch_lines([
"*MyModule*",
"*test_x*"
])
def test_pytest_collect_file_from_sister_dir(self, testdir):
sub1 = testdir.mkpydir("sub1")
sub2 = testdir.mkpydir("sub2")
conf1 = testdir.makeconftest("""
import pytest
class MyModule1(pytest.Module):
pass
def pytest_collect_file(path, parent):
if path.ext == ".py":
return MyModule1(path, parent)
""")
conf1.move(sub1.join(conf1.basename))
conf2 = testdir.makeconftest("""
import pytest
class MyModule2(pytest.Module):
pass
def pytest_collect_file(path, parent):
if path.ext == ".py":
return MyModule2(path, parent)
""")
conf2.move(sub2.join(conf2.basename))
p = testdir.makepyfile("def test_x(): pass")
p.copy(sub1.join(p.basename))
p.copy(sub2.join(p.basename))
result = testdir.runpytest("--collect-only")
result.stdout.fnmatch_lines([
"*MyModule1*",
"*MyModule2*",
"*test_x*"
])
class TestSession:
def test_parsearg(self, testdir):
p = testdir.makepyfile("def test_func(): pass")
subdir = testdir.mkdir("sub")
subdir.ensure("__init__.py")
target = subdir.join(p.basename)
p.move(target)
testdir.chdir()
subdir.chdir()
config = testdir.parseconfig(p.basename)
rcol = Session(config=config)
assert rcol.fspath == subdir
parts = rcol._parsearg(p.basename)
assert parts[0] == target
assert len(parts) == 1
parts = rcol._parsearg(p.basename + "::test_func")
assert parts[0] == target
assert parts[1] == "test_func"
assert len(parts) == 2
def test_collect_topdir(self, testdir):
p = testdir.makepyfile("def test_func(): pass")
id = "::".join([p.basename, "test_func"])
# XXX migrate to inline_genitems? (see below)
config = testdir.parseconfig(id)
topdir = testdir.tmpdir
rcol = Session(config)
assert topdir == rcol.fspath
rootid = rcol.nodeid
#root2 = rcol.perform_collect([rcol.nodeid], genitems=False)[0]
#assert root2 == rcol, rootid
colitems = rcol.perform_collect([rcol.nodeid], genitems=False)
assert len(colitems) == 1
assert colitems[0].fspath == p
def test_collect_protocol_single_function(self, testdir):
p = testdir.makepyfile("def test_func(): pass")
id = "::".join([p.basename, "test_func"])
topdir = testdir.tmpdir
items, hookrec = testdir.inline_genitems(id)
item, = items
assert item.name == "test_func"
newid = item.nodeid
assert newid == id
py.std.pprint.pprint(hookrec.hookrecorder.calls)
hookrec.hookrecorder.contains([
("pytest_collectstart", "collector.fspath == topdir"),
("pytest_make_collect_report", "collector.fspath == topdir"),
("pytest_collectstart", "collector.fspath == p"),
("pytest_make_collect_report", "collector.fspath == p"),
("pytest_pycollect_makeitem", "name == 'test_func'"),
("pytest_collectreport", "report.nodeid.startswith(p.basename)"),
("pytest_collectreport", "report.nodeid == '.'")
])
def test_collect_protocol_method(self, testdir):
p = testdir.makepyfile("""
class TestClass:
def test_method(self):
pass
""")
normid = p.basename + "::TestClass::()::test_method"
for id in [p.basename,
p.basename + "::TestClass",
p.basename + "::TestClass::()",
normid,
]:
items, hookrec = testdir.inline_genitems(id)
assert len(items) == 1
assert items[0].name == "test_method"
newid = items[0].nodeid
assert newid == normid
def test_collect_custom_nodes_multi_id(self, testdir):
p = testdir.makepyfile("def test_func(): pass")
testdir.makeconftest("""
import pytest
class SpecialItem(pytest.Item):
def runtest(self):
return # ok
class SpecialFile(pytest.File):
def collect(self):
return [SpecialItem(name="check", parent=self)]
def pytest_collect_file(path, parent):
if path.basename == %r:
return SpecialFile(fspath=path, parent=parent)
""" % p.basename)
id = p.basename
items, hookrec = testdir.inline_genitems(id)
py.std.pprint.pprint(hookrec.hookrecorder.calls)
assert len(items) == 2
hookrec.hookrecorder.contains([
("pytest_collectstart",
"collector.fspath == collector.session.fspath"),
("pytest_collectstart",
"collector.__class__.__name__ == 'SpecialFile'"),
("pytest_collectstart",
"collector.__class__.__name__ == 'Module'"),
("pytest_pycollect_makeitem", "name == 'test_func'"),
("pytest_collectreport", "report.nodeid.startswith(p.basename)"),
#("pytest_collectreport",
# "report.fspath == %r" % str(rcol.fspath)),
])
def test_collect_subdir_event_ordering(self, testdir):
p = testdir.makepyfile("def test_func(): pass")
aaa = testdir.mkpydir("aaa")
test_aaa = aaa.join("test_aaa.py")
p.move(test_aaa)
items, hookrec = testdir.inline_genitems()
assert len(items) == 1
py.std.pprint.pprint(hookrec.hookrecorder.calls)
hookrec.hookrecorder.contains([
("pytest_collectstart", "collector.fspath == test_aaa"),
("pytest_pycollect_makeitem", "name == 'test_func'"),
("pytest_collectreport",
"report.nodeid.startswith('aaa/test_aaa.py')"),
])
def test_collect_two_commandline_args(self, testdir):
p = testdir.makepyfile("def test_func(): pass")
aaa = testdir.mkpydir("aaa")
bbb = testdir.mkpydir("bbb")
test_aaa = aaa.join("test_aaa.py")
p.copy(test_aaa)
test_bbb = bbb.join("test_bbb.py")
p.move(test_bbb)
id = "."
items, hookrec = testdir.inline_genitems(id)
assert len(items) == 2
py.std.pprint.pprint(hookrec.hookrecorder.calls)
hookrec.hookrecorder.contains([
("pytest_collectstart", "collector.fspath == test_aaa"),
("pytest_pycollect_makeitem", "name == 'test_func'"),
("pytest_collectreport", "report.nodeid == 'aaa/test_aaa.py'"),
("pytest_collectstart", "collector.fspath == test_bbb"),
("pytest_pycollect_makeitem", "name == 'test_func'"),
("pytest_collectreport", "report.nodeid == 'bbb/test_bbb.py'"),
])
def test_serialization_byid(self, testdir):
p = testdir.makepyfile("def test_func(): pass")
items, hookrec = testdir.inline_genitems()
assert len(items) == 1
item, = items
items2, hookrec = testdir.inline_genitems(item.nodeid)
item2, = items2
assert item2.name == item.name
assert item2.fspath == item.fspath
def test_find_byid_without_instance_parents(self, testdir):
p = testdir.makepyfile("""
class TestClass:
def test_method(self):
pass
""")
arg = p.basename + ("::TestClass::test_method")
items, hookrec = testdir.inline_genitems(arg)
assert len(items) == 1
item, = items
assert item.nodeid.endswith("TestClass::()::test_method")
class Test_getinitialnodes:
def test_global_file(self, testdir, tmpdir):
x = tmpdir.ensure("x.py")
config = testdir.parseconfigure(x)
col = testdir.getnode(config, x)
assert isinstance(col, pytest.Module)
assert col.name == 'x.py'
assert col.parent.name == testdir.tmpdir.basename
assert col.parent.parent is None
for col in col.listchain():
assert col.config is config
def test_pkgfile(self, testdir):
testdir.chdir()
tmpdir = testdir.tmpdir
subdir = tmpdir.join("subdir")
x = subdir.ensure("x.py")
subdir.ensure("__init__.py")
config = testdir.parseconfigure(x)
col = testdir.getnode(config, x)
assert isinstance(col, pytest.Module)
assert col.name == 'subdir/x.py'
assert col.parent.parent is None
for col in col.listchain():
assert col.config is config
class Test_genitems:
def test_check_collect_hashes(self, testdir):
p = testdir.makepyfile("""
def test_1():
pass
def test_2():
pass
""")
p.copy(p.dirpath(p.purebasename + "2" + ".py"))
items, reprec = testdir.inline_genitems(p.dirpath())
assert len(items) == 4
for numi, i in enumerate(items):
for numj, j in enumerate(items):
if numj != numi:
assert hash(i) != hash(j)
assert i != j
def test_example_items1(self, testdir):
p = testdir.makepyfile('''
def testone():
pass
class TestX:
def testmethod_one(self):
pass
class TestY(TestX):
pass
''')
items, reprec = testdir.inline_genitems(p)
assert len(items) == 3
assert items[0].name == 'testone'
assert items[1].name == 'testmethod_one'
assert items[2].name == 'testmethod_one'
# let's also test getmodpath here
assert items[0].getmodpath() == "testone"
assert items[1].getmodpath() == "TestX.testmethod_one"
assert items[2].getmodpath() == "TestY.testmethod_one"
s = items[0].getmodpath(stopatmodule=False)
assert s.endswith("test_example_items1.testone")
print(s)
def test_matchnodes_two_collections_same_file(testdir):
testdir.makeconftest("""
import pytest
def pytest_configure(config):
config.pluginmanager.register(Plugin2())
class Plugin2:
def pytest_collect_file(self, path, parent):
if path.ext == ".abc":
return MyFile2(path, parent)
def pytest_collect_file(path, parent):
if path.ext == ".abc":
return MyFile1(path, parent)
class MyFile1(pytest.Item, pytest.File):
def runtest(self):
pass
class MyFile2(pytest.File):
def collect(self):
return [Item2("hello", parent=self)]
class Item2(pytest.Item):
def runtest(self):
pass
""")
p = testdir.makefile(".abc", "")
result = testdir.runpytest()
assert result.ret == 0
result.stdout.fnmatch_lines([
"*2 passed*",
])
res = testdir.runpytest("%s::hello" % p.basename)
res.stdout.fnmatch_lines([
"*1 passed*",
])
class TestNodekeywords:
def test_no_under(self, testdir):
modcol = testdir.getmodulecol("""
def test_pass(): pass
def test_fail(): assert 0
""")
l = list(modcol.keywords)
assert modcol.name in l
for x in l:
assert not x.startswith("_")
assert modcol.name in repr(modcol.keywords)
def test_issue345(self, testdir):
testdir.makepyfile("""
def test_should_not_be_selected():
assert False, 'I should not have been selected to run'
def test___repr__():
pass
""")
reprec = testdir.inline_run("-k repr")
reprec.assertoutcome(passed=1, failed=0)
| [
"[email protected]"
]
| |
14b7d15f64f419181184d6af5c739890b8d7acaf | 12a72da6848ae461b995ec2fc6c4e1827be82803 | /common/monitor_bak.py | 69ab77e717e2eebdea993ff2d36e9666824bb3cb | []
| no_license | lim1942/coin_helper | f3ed40c07a049a00f052dfa3e59cee7eefe969cf | d34ce363371fd964d8c46d5dd04ca7c5eb7d35b4 | refs/heads/main | 2023-04-30T10:46:03.231440 | 2021-05-25T12:15:49 | 2021-05-25T12:15:49 | 366,247,314 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,442 | py | import json
import redis
import requests
import traceback
from threading import Thread,Lock
from datetime import datetime,timedelta
from coin_helper.settings import REDIS_URL
class RedisTool(object):
def __init__(self):
self.R = redis.Redis.from_url(REDIS_URL,decode_responses=True)
self.P = self.R.pipeline(transaction=False)
def set(self,k,v,ex):
self.R.set(k,v,ex=ex)
def get(self,k):
return self.R.get(k)
class Monitor:
redis_obj = RedisTool()
def __init__(self,**kwargs):
self.kwargs = kwargs
self.notify_lock = Lock()
self.last_notify_time = {}
self.long_before_time = datetime.now() - timedelta(days=1)
def record(self,k,v,ex=10):
try:
return self.redis_obj.set(k,v,ex)
except:
traceback.print_exc()
def compare(self,k,v,k2):
pass
def notify(self,k,message):
Thread(target=self._notify,args=(k,message)).start()
def _notify(self,k,message):
notify_time = datetime.now()
with self.notify_lock:
if notify_time - timedelta(hours=6) >= self.last_notify_time.get(k,self.long_before_time):
webhook='https://oapi.dingtalk.com/robot/send?access_token=494a793fe8aa1146b93baeef9aba96cbfa725e2ce6230c0eaa37bb682e06eea8'
header = {
"Content-Type": "application/json",
"Charset": "UTF-8"}
data ={
"msgtype": "text",
"text": {
"content": f"触发价差信号 {message}"
},
"at": {
"atMobiles":[
"13750872274"
],
"isAtAll": False
}}
ret = requests.post(url=webhook,data=json.dumps(data),headers=header).text
self.last_notify_time[k] = notify_time
return ret
class OkexMonitor(Monitor):
def __init__(self,**kwargs):
super(OkexMonitor, self).__init__(**kwargs)
self.variance_threshold = 0.05
def compare(self,k,v,k2):
try:
v = float(v)
v2 = float(self.redis_obj.get(k2))
variance = abs(v - v2)
variance_rate = variance/v
if variance_rate > self.variance_threshold:
message = f"【{k}:{v}】与【{k2}:{v2}】差异率大于{self.variance_threshold}, 差值{round(variance,6)} 差率{round(variance_rate,6)}"
self.notify(k,message)
except:
pass
# print(k,k2)
def okex_record(self,item):
self.record(item['instrument_id'],item['price'])
def okex_compare_1(self,item):
"""okex永续币本位,永续USDT 币币 三个市场两两对比"""
try:
instrument_id = item['instrument_id']
if instrument_id.endswith('USDT-SWAP'):
self.compare(instrument_id,item['price'],item['instrument_id'].split('-')[0]+'-USDT')
self.compare(instrument_id,item['price'],item['instrument_id'].split('-')[0]+'-USD-SWAP')
# 币本位永续和币币比较
elif instrument_id.endswith('USD-SWAP'):
self.compare(instrument_id,item['price'],item['instrument_id'].split('-')[0]+'-USDT')
except:
traceback.print_exc()
| [
"[email protected]"
]
| |
05b60a337fe7a12315b91c9f03f05cbc27accb90 | 5e48579f65ab45469469a5cf0cbef82bf2323585 | /CovIdMX withREact/Covid19AcuteMx_Support_App/account_app/forms.py | 015094a334d599f574668d60ee456db90449a046 | []
| no_license | jayquake/React-Applications | 9e5786e4402dfe9f4e33d4daef657adb40bae1c8 | b9f27872846e7e8b7da94f77c2120755909db572 | refs/heads/master | 2022-12-30T04:45:11.591814 | 2020-10-11T13:29:05 | 2020-10-11T13:29:05 | 284,446,444 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,164 | py | from django import forms
from django.db import transaction
from .models import User, Subject, DoctorProfile, PatientProfile, EMTProfile
from django.contrib.auth.forms import UserCreationForm
from . import models
class PatientRegisterForm(UserCreationForm):
interests = forms.ModelMultipleChoiceField(
queryset=Subject.objects.all(),
widget=forms.CheckboxSelectMultiple,
required=False
)
class Meta(UserCreationForm.Meta):
model = User
@transaction.atomic
def save(self):
user = super().save(commit=False)
user.is_patient = True
user.save()
patient = PatientProfile.objects.create(user=user)
patient.interests.add(*self.cleaned_data.get('interests'))
return user
class DoctorRegisterForm(UserCreationForm):
class Meta(UserCreationForm.Meta):
model = User
def save(self, commit=True):
user = super().save(commit=False)
user.is_doctor = True
if commit:
user.save()
return user
class PatientUpdateFrom(forms.ModelForm):
class Meta:
model = User
fields = ['username', 'email']
class PatientProfileUpdateForm(forms.ModelForm):
class Meta:
model = PatientProfile
fields = ['image']
class DoctorUpdateFrom(forms.ModelForm):
class Meta:
model = User
fields = ['first_name', 'last_name', 'email']
class DoctorProfileUpdateForm(forms.ModelForm):
class Meta:
model = models.DoctorProfile
email = forms.EmailField()
first_name = forms.CharField(max_length=50)
last_name = forms.CharField(max_length=50)
about_me = forms.Textarea()
resume = forms.FileInput
job_title = forms.ChoiceField
fields = ['image', 'about_me', 'resume']
# class patientRegisterForm(UserCreationForm):
# USER_SCHOOL_CHOICES = ((1, 'High School'),
# (2, 'Some College'),
# (3, 'Associates Degree'),
# (4, 'Bachelors Degree'),
# (5, 'Masters Degree'),
# (6, 'Other'),
# )
# email = forms.EmailField()
# first_name = forms.CharField(max_length=50)
# last_name = forms.CharField(max_length=50)
# academics = forms.Select(choices=USER_SCHOOL_CHOICES)
#
# class Meta:
# model = User
# fields = ['first_name', 'last_name', 'username', 'email', 'password1', 'password2']
#
#
# class DoctorRegisterForm(UserCreationForm):
# USER_Grade_Taught_CHOICES = ((1, 'Kindergarten'),
# (2, 'first grade '),
# (3, 'second grade '),
# (4, 'third grade'),
# (5, 'Fourth Grade'),
# (6, 'Fifth Grade'),
# (7, 'Sixth Grade'),
# (8, 'Seventh Grade'),
# (9, 'Eighth Grade'),
# (10, 'Ninth Grade'),
# (11, ' Grade'),
# )
# email = forms.EmailField()
# first_name = forms.CharField(max_length=50)
# last_name = forms.CharField(max_length=50)
# highest_education_level = forms.Select()
# grade_taught = forms.SelectMultiple(USER_Grade_Taught_CHOICES)
#
# class Meta:
# model = User
# form_class = DoctorRegisterForm
# template_name = 'registration/signup_form.html'
# fields = ['first_name', 'last_name', 'username', 'email', 'password1', 'password2',]
#
#
# def get_context_data(self, **kwargs):
# kwargs['user_type'] = 'Doctor'
# return super().get_context_data(**kwargs)
#
#
#
#
#
#
# email = forms.EmailField()
# first_name = forms.CharField(max_length=50)
# last_name = forms.CharField(max_length=50)
# about_me = forms.Textarea(max_length=150)
# resume = forms.FileInput
# job_title = forms.ChoiceField
# languages = forms.LanguageField(max_length=8, blank=True)
# region = forms.RegionField(blank=True) | [
"[email protected]"
]
| |
1a7048886021c154c279d665d513e857da759255 | 95e9ec4b3b0d86063da53a0e62e138cf794cce3a | /webroot/dqb/dqb/base/myjson.py | e6843b72d13a46c5fb705787579293028f384d2f | []
| no_license | wjl626nice/1902 | c3d350d91925a01628c9402cbceb32ebf812e43c | 5a1a6dd59cdd903563389fa7c73a283e8657d731 | refs/heads/master | 2023-01-05T23:51:47.667675 | 2019-08-19T06:42:09 | 2019-08-19T06:42:09 | 180,686,044 | 4 | 1 | null | 2023-01-04T07:35:24 | 2019-04-11T00:46:43 | Python | UTF-8 | Python | false | false | 1,822 | py | from django.shortcuts import HttpResponse
from rest_framework.renderers import JSONRenderer
from base.errcode import err_number
class JSONResponse(HttpResponse):
"""
用于返回JSON数据.
"""
def __init__(self,code,data='',total=1,count=-1,**kwargs):
kwargs['content_type'] = 'application/json'
try:
content = JSONRenderer().render(data)
if code:
content = '{"code":' \
+ str(code) \
+ ',"msg":"' \
+ err_number[str(code)] \
+ '","data":[]}'
else:
if count < 0:
content = '{"code":'\
+str(code)\
+',"msg":"'\
+err_number[str(code)]\
+'","total":'\
+str(total)\
+',"data":'\
+str(content,encoding="utf-8")\
+'}'
else:
content = '{"code":' \
+ str(code) \
+ ',"msg":"' \
+ err_number[str(code)] \
+ '","total":' \
+ str(total) \
+ ',"count":' \
+ str(count) \
+ ',"data":' \
+ str(content, encoding="utf-8") \
+ '}'
except:
content = '{"code":' \
+ '-1' \
+ ',"msg":"返回有误","data":[]}'
super(JSONResponse, self).__init__(content, **kwargs) | [
"[email protected]"
]
| |
f64e6334a50348abd20c1e2b1141f25c1a15d653 | 38bd99c72ca2521489ce1eb02b7604095b02b585 | /src/1680-ConcatenationOfConsecutiveBinaryNumbers.py | 67fc18efbe6b891b864fd59abb68a2db2a44bdad | [
"MIT"
]
| permissive | Jiezhi/myleetcode | eadbd7d9f1f0ea6a0ee15c2da9040dcfbd28b522 | 4dd1e54d8d08f7e6590bc76abd08ecaacaf775e5 | refs/heads/master | 2023-03-16T15:52:21.833622 | 2023-03-09T14:33:03 | 2023-03-09T14:33:03 | 139,965,948 | 1 | 2 | null | null | null | null | UTF-8 | Python | false | false | 1,049 | py | #!/usr/bin/env python3
"""
CREATED AT: 2022-09-23
URL: https://leetcode.com/problems/concatenation-of-consecutive-binary-numbers/
GITHUB: https://github.com/Jiezhi/myleetcode
FileName: 1680-ConcatenationOfConsecutiveBinaryNumbers
Difficulty: Medium
Desc:
Tag:
See: https://leetcode.cn/problems/concatenation-of-consecutive-binary-numbers/solution/lian-jie-lian-xu-er-jin-zhi-shu-zi-by-ze-t40j/
"""
class Solution:
def concatenatedBinary(self, n: int) -> int:
"""
Runtime: 2060 ms, faster than 66.93%
Memory Usage: 13.9 MB, less than 80.31%
1 <= n <= 10^5
"""
module = 10 ** 9 + 7
ret, shift = 0, 0
for i in range(1, n + 1):
if i & (i - 1) == 0:
shift += 1
ret = ((ret << shift) + i) % module
return ret
def test():
assert Solution().concatenatedBinary(n=1) == 1
assert Solution().concatenatedBinary(n=3) == 27
assert Solution().concatenatedBinary(n=12) == 505379714
if __name__ == '__main__':
test()
| [
"[email protected]"
]
| |
9a6be77d3f1ab6b5515bb83d0b6a6eee5e09b43b | eda7fbf7bbc0614e6fc448d2f6e3fd1918dadcbe | /new-api-tests/applications/create-surface-caps-from-centerlines/create_surface_caps.py | e61f1afbfba81befc17a2e58529183112bb6877e | []
| no_license | SimVascular/SimVascular-Tests | e97c136ad3bf3a7275d40c0323abca7817eb2eca | 55018e1edcd070bce77ae5af4caf2105353d3697 | refs/heads/master | 2023-02-11T02:19:06.755815 | 2023-02-02T18:26:31 | 2023-02-02T18:26:31 | 42,211,398 | 2 | 10 | null | 2023-02-02T18:26:32 | 2015-09-10T00:06:14 | Python | UTF-8 | Python | false | false | 3,496 | py | #!/usr/bin/env python
"""This script is used to create an SV model from a closed segmentation surface.
The
"""
import argparse
import os
import sys
from centerlines import Centerlines
from surface import Surface
sys.path.insert(1, '../../graphics/')
import graphics as gr
def parse_args():
'''Parse command-line arguments.
'''
parser = argparse.ArgumentParser()
parser.add_argument("--clip-distance", type=float, default=0.0,
help="The distance from the end of a centerline branch to clip a surface.")
parser.add_argument("--clip-width-scale", type=float, default=1.0,
help="The width multiplied by the centerline branch end radius to define the width of the box used to clip a surface.")
parser.add_argument("--surface-file", required=True, help="Input surface (.vtp or .vtk) file.")
parser.add_argument("--mesh-scale", type=float, default=1.0,
help="The factor used to scale the fe volume meshing edge size. A larger scale creates a coarser mesh. The initial edge size is determined from the largest surface triangle.")
parser.add_argument("--remesh-scale", type=float, default=1.0,
help="The factor used to scale the surface remeshing edge size. A larger scale creates a coarser suface mesh. The initial edge size is determined from the largest surface triangle.")
args = parser.parse_args()
if len(sys.argv) == 1:
parser.print_help()
sys.exit(1)
return args
def main():
# Get command-line arguments.
args = parse_args()
## Create renderer and graphics window.
win_width = 500
win_height = 500
renderer, renderer_window = gr.init_graphics(win_width, win_height)
## Read in the segmentation surface.
surface_file_name = args.surface_file
surface = Surface(gr, renderer_window, renderer)
surface.read(surface_file_name)
gr_geom = gr.add_geometry(renderer, surface.geometry, color=[0.8, 0.8, 1.0])
surface.vtk_actor = gr_geom
#gr_geom.GetProperty().SetOpacity(0.5)
## Create a Centerlines object used to clip the surface.
centerlines = Centerlines()
centerlines.graphics = gr
centerlines.surface = surface
centerlines.window = renderer_window
centerlines.renderer = renderer
centerlines.clip_distance = args.clip_distance
centerlines.clip_width_scale = args.clip_width_scale
centerlines.remesh_scale = args.remesh_scale
centerlines.mesh_scale = args.mesh_scale
print("---------- Alphanumeric Keys ----------")
print("a - Compute model automatically for a three vessel surface with flat ends.")
print("c - Compute centerlines.")
print("m - Create a model from the surface and centerlines.")
print("q - Quit")
print("s - Select a centerline source point.")
print("t - Select a centerline target point.")
print("u - Undo the selection of a centerline source or target point.")
## Create a mouse interactor for selecting centerline points.
picking_keys = ['s', 't']
event_table = {
'a': (surface.create_model_automatically, centerlines),
'c': (surface.compute_centerlines, surface),
'm': (centerlines.create_model, surface),
's': surface.add_centerlines_source_node,
't': surface.add_centerlines_target_node
}
interactor = gr.init_picking(renderer_window, renderer, surface.geometry, picking_keys, event_table)
## Display window.
interactor.Start()
if __name__ == '__main__':
main()
| [
"[email protected]"
]
| |
7d0eac6bc6769a63f609d726e612586ed47b6af8 | e1ae535d8613aae44e8f9eaa4daf50c1e63665b7 | /multimedia/south_migrations/0026_auto__chg_field_remotestorage_media.py | e4f8b05c6dae4836b6317150e40ea7eda035d2ed | []
| no_license | teury/django-multimedia | 48b8fba9abc101286990b1306d85967bd197f08e | 4ddd5e6d9f4f680e2f4f68cc3616ced8f0fc2a43 | refs/heads/master | 2021-01-16T20:50:24.573686 | 2015-04-23T21:22:38 | 2015-04-23T21:22:38 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,388 | py | # -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Changing field 'RemoteStorage.media'
db.alter_column(u'multimedia_remotestorage', 'media_id', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['multimedia.Media'], null=True, on_delete=models.SET_NULL))
def backwards(self, orm):
# User chose to not deal with backwards NULL issues for 'RemoteStorage.media'
raise RuntimeError("Cannot reverse this migration. 'RemoteStorage.media' and its values cannot be restored.")
# The following code is provided here to aid in writing a correct migration
# Changing field 'RemoteStorage.media'
db.alter_column(u'multimedia_remotestorage', 'media_id', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['multimedia.Media']))
models = {
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Group']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Permission']"}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
u'multimedia.encodeprofile': {
'Meta': {'object_name': 'EncodeProfile'},
'command': ('django.db.models.fields.CharField', [], {'max_length': '1024'}),
'container': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'file_type': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'})
},
u'multimedia.media': {
'Meta': {'ordering': "(u'-created',)", 'object_name': 'Media'},
'created': ('django.db.models.fields.DateTimeField', [], {}),
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'file': ('django.db.models.fields.files.FileField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {}),
'owner': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']"}),
'profiles': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['multimedia.EncodeProfile']", 'symmetrical': 'False'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '255'})
},
u'multimedia.remotestorage': {
'Meta': {'object_name': 'RemoteStorage'},
'content_hash': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'created': ('django.db.models.fields.DateTimeField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'media': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['multimedia.Media']", 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {}),
'profile': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['multimedia.EncodeProfile']", 'on_delete': 'models.PROTECT'})
}
}
complete_apps = ['multimedia'] | [
"[email protected]"
]
| |
8af8855e074aad7b7515f888ec0f24f85164debb | 5e6d8b9989247801718dd1f10009f0f7f54c1eb4 | /sdk/python/pulumi_azure_native/web/v20210115/web_app_relay_service_connection_slot.py | dda61e329470f348eb6bc50714d28126c870113f | [
"BSD-3-Clause",
"Apache-2.0"
]
| permissive | vivimouret29/pulumi-azure-native | d238a8f91688c9bf09d745a7280b9bf2dd6d44e0 | 1cbd988bcb2aa75a83e220cb5abeb805d6484fce | refs/heads/master | 2023-08-26T05:50:40.560691 | 2021-10-21T09:25:07 | 2021-10-21T09:25:07 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 16,719 | py | # coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from ... import _utilities
__all__ = ['WebAppRelayServiceConnectionSlotArgs', 'WebAppRelayServiceConnectionSlot']
@pulumi.input_type
class WebAppRelayServiceConnectionSlotArgs:
def __init__(__self__, *,
name: pulumi.Input[str],
resource_group_name: pulumi.Input[str],
slot: pulumi.Input[str],
biztalk_uri: Optional[pulumi.Input[str]] = None,
entity_connection_string: Optional[pulumi.Input[str]] = None,
entity_name: Optional[pulumi.Input[str]] = None,
hostname: Optional[pulumi.Input[str]] = None,
kind: Optional[pulumi.Input[str]] = None,
port: Optional[pulumi.Input[int]] = None,
resource_connection_string: Optional[pulumi.Input[str]] = None,
resource_type: Optional[pulumi.Input[str]] = None):
"""
The set of arguments for constructing a WebAppRelayServiceConnectionSlot resource.
:param pulumi.Input[str] name: Name of the app.
:param pulumi.Input[str] resource_group_name: Name of the resource group to which the resource belongs.
:param pulumi.Input[str] slot: Name of the deployment slot. If a slot is not specified, the API will create or update a hybrid connection for the production slot.
:param pulumi.Input[str] kind: Kind of resource.
"""
pulumi.set(__self__, "name", name)
pulumi.set(__self__, "resource_group_name", resource_group_name)
pulumi.set(__self__, "slot", slot)
if biztalk_uri is not None:
pulumi.set(__self__, "biztalk_uri", biztalk_uri)
if entity_connection_string is not None:
pulumi.set(__self__, "entity_connection_string", entity_connection_string)
if entity_name is not None:
pulumi.set(__self__, "entity_name", entity_name)
if hostname is not None:
pulumi.set(__self__, "hostname", hostname)
if kind is not None:
pulumi.set(__self__, "kind", kind)
if port is not None:
pulumi.set(__self__, "port", port)
if resource_connection_string is not None:
pulumi.set(__self__, "resource_connection_string", resource_connection_string)
if resource_type is not None:
pulumi.set(__self__, "resource_type", resource_type)
@property
@pulumi.getter
def name(self) -> pulumi.Input[str]:
"""
Name of the app.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: pulumi.Input[str]):
pulumi.set(self, "name", value)
@property
@pulumi.getter(name="resourceGroupName")
def resource_group_name(self) -> pulumi.Input[str]:
"""
Name of the resource group to which the resource belongs.
"""
return pulumi.get(self, "resource_group_name")
@resource_group_name.setter
def resource_group_name(self, value: pulumi.Input[str]):
pulumi.set(self, "resource_group_name", value)
@property
@pulumi.getter
def slot(self) -> pulumi.Input[str]:
"""
Name of the deployment slot. If a slot is not specified, the API will create or update a hybrid connection for the production slot.
"""
return pulumi.get(self, "slot")
@slot.setter
def slot(self, value: pulumi.Input[str]):
pulumi.set(self, "slot", value)
@property
@pulumi.getter(name="biztalkUri")
def biztalk_uri(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "biztalk_uri")
@biztalk_uri.setter
def biztalk_uri(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "biztalk_uri", value)
@property
@pulumi.getter(name="entityConnectionString")
def entity_connection_string(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "entity_connection_string")
@entity_connection_string.setter
def entity_connection_string(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "entity_connection_string", value)
@property
@pulumi.getter(name="entityName")
def entity_name(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "entity_name")
@entity_name.setter
def entity_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "entity_name", value)
@property
@pulumi.getter
def hostname(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "hostname")
@hostname.setter
def hostname(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "hostname", value)
@property
@pulumi.getter
def kind(self) -> Optional[pulumi.Input[str]]:
"""
Kind of resource.
"""
return pulumi.get(self, "kind")
@kind.setter
def kind(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "kind", value)
@property
@pulumi.getter
def port(self) -> Optional[pulumi.Input[int]]:
return pulumi.get(self, "port")
@port.setter
def port(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "port", value)
@property
@pulumi.getter(name="resourceConnectionString")
def resource_connection_string(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "resource_connection_string")
@resource_connection_string.setter
def resource_connection_string(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "resource_connection_string", value)
@property
@pulumi.getter(name="resourceType")
def resource_type(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "resource_type")
@resource_type.setter
def resource_type(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "resource_type", value)
class WebAppRelayServiceConnectionSlot(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
biztalk_uri: Optional[pulumi.Input[str]] = None,
entity_connection_string: Optional[pulumi.Input[str]] = None,
entity_name: Optional[pulumi.Input[str]] = None,
hostname: Optional[pulumi.Input[str]] = None,
kind: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
port: Optional[pulumi.Input[int]] = None,
resource_connection_string: Optional[pulumi.Input[str]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
resource_type: Optional[pulumi.Input[str]] = None,
slot: Optional[pulumi.Input[str]] = None,
__props__=None):
"""
Hybrid Connection for an App Service app.
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] kind: Kind of resource.
:param pulumi.Input[str] name: Name of the app.
:param pulumi.Input[str] resource_group_name: Name of the resource group to which the resource belongs.
:param pulumi.Input[str] slot: Name of the deployment slot. If a slot is not specified, the API will create or update a hybrid connection for the production slot.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: WebAppRelayServiceConnectionSlotArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
Hybrid Connection for an App Service app.
:param str resource_name: The name of the resource.
:param WebAppRelayServiceConnectionSlotArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(WebAppRelayServiceConnectionSlotArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
biztalk_uri: Optional[pulumi.Input[str]] = None,
entity_connection_string: Optional[pulumi.Input[str]] = None,
entity_name: Optional[pulumi.Input[str]] = None,
hostname: Optional[pulumi.Input[str]] = None,
kind: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
port: Optional[pulumi.Input[int]] = None,
resource_connection_string: Optional[pulumi.Input[str]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
resource_type: Optional[pulumi.Input[str]] = None,
slot: Optional[pulumi.Input[str]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = WebAppRelayServiceConnectionSlotArgs.__new__(WebAppRelayServiceConnectionSlotArgs)
__props__.__dict__["biztalk_uri"] = biztalk_uri
__props__.__dict__["entity_connection_string"] = entity_connection_string
__props__.__dict__["entity_name"] = entity_name
__props__.__dict__["hostname"] = hostname
__props__.__dict__["kind"] = kind
if name is None and not opts.urn:
raise TypeError("Missing required property 'name'")
__props__.__dict__["name"] = name
__props__.__dict__["port"] = port
__props__.__dict__["resource_connection_string"] = resource_connection_string
if resource_group_name is None and not opts.urn:
raise TypeError("Missing required property 'resource_group_name'")
__props__.__dict__["resource_group_name"] = resource_group_name
__props__.__dict__["resource_type"] = resource_type
if slot is None and not opts.urn:
raise TypeError("Missing required property 'slot'")
__props__.__dict__["slot"] = slot
__props__.__dict__["type"] = None
alias_opts = pulumi.ResourceOptions(aliases=[pulumi.Alias(type_="azure-nextgen:web/v20210115:WebAppRelayServiceConnectionSlot"), pulumi.Alias(type_="azure-native:web:WebAppRelayServiceConnectionSlot"), pulumi.Alias(type_="azure-nextgen:web:WebAppRelayServiceConnectionSlot"), pulumi.Alias(type_="azure-native:web/v20150801:WebAppRelayServiceConnectionSlot"), pulumi.Alias(type_="azure-nextgen:web/v20150801:WebAppRelayServiceConnectionSlot"), pulumi.Alias(type_="azure-native:web/v20160801:WebAppRelayServiceConnectionSlot"), pulumi.Alias(type_="azure-nextgen:web/v20160801:WebAppRelayServiceConnectionSlot"), pulumi.Alias(type_="azure-native:web/v20180201:WebAppRelayServiceConnectionSlot"), pulumi.Alias(type_="azure-nextgen:web/v20180201:WebAppRelayServiceConnectionSlot"), pulumi.Alias(type_="azure-native:web/v20181101:WebAppRelayServiceConnectionSlot"), pulumi.Alias(type_="azure-nextgen:web/v20181101:WebAppRelayServiceConnectionSlot"), pulumi.Alias(type_="azure-native:web/v20190801:WebAppRelayServiceConnectionSlot"), pulumi.Alias(type_="azure-nextgen:web/v20190801:WebAppRelayServiceConnectionSlot"), pulumi.Alias(type_="azure-native:web/v20200601:WebAppRelayServiceConnectionSlot"), pulumi.Alias(type_="azure-nextgen:web/v20200601:WebAppRelayServiceConnectionSlot"), pulumi.Alias(type_="azure-native:web/v20200901:WebAppRelayServiceConnectionSlot"), pulumi.Alias(type_="azure-nextgen:web/v20200901:WebAppRelayServiceConnectionSlot"), pulumi.Alias(type_="azure-native:web/v20201001:WebAppRelayServiceConnectionSlot"), pulumi.Alias(type_="azure-nextgen:web/v20201001:WebAppRelayServiceConnectionSlot"), pulumi.Alias(type_="azure-native:web/v20201201:WebAppRelayServiceConnectionSlot"), pulumi.Alias(type_="azure-nextgen:web/v20201201:WebAppRelayServiceConnectionSlot"), pulumi.Alias(type_="azure-native:web/v20210101:WebAppRelayServiceConnectionSlot"), pulumi.Alias(type_="azure-nextgen:web/v20210101:WebAppRelayServiceConnectionSlot"), pulumi.Alias(type_="azure-native:web/v20210201:WebAppRelayServiceConnectionSlot"), pulumi.Alias(type_="azure-nextgen:web/v20210201:WebAppRelayServiceConnectionSlot")])
opts = pulumi.ResourceOptions.merge(opts, alias_opts)
super(WebAppRelayServiceConnectionSlot, __self__).__init__(
'azure-native:web/v20210115:WebAppRelayServiceConnectionSlot',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None) -> 'WebAppRelayServiceConnectionSlot':
"""
Get an existing WebAppRelayServiceConnectionSlot resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = WebAppRelayServiceConnectionSlotArgs.__new__(WebAppRelayServiceConnectionSlotArgs)
__props__.__dict__["biztalk_uri"] = None
__props__.__dict__["entity_connection_string"] = None
__props__.__dict__["entity_name"] = None
__props__.__dict__["hostname"] = None
__props__.__dict__["kind"] = None
__props__.__dict__["name"] = None
__props__.__dict__["port"] = None
__props__.__dict__["resource_connection_string"] = None
__props__.__dict__["resource_type"] = None
__props__.__dict__["type"] = None
return WebAppRelayServiceConnectionSlot(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="biztalkUri")
def biztalk_uri(self) -> pulumi.Output[Optional[str]]:
return pulumi.get(self, "biztalk_uri")
@property
@pulumi.getter(name="entityConnectionString")
def entity_connection_string(self) -> pulumi.Output[Optional[str]]:
return pulumi.get(self, "entity_connection_string")
@property
@pulumi.getter(name="entityName")
def entity_name(self) -> pulumi.Output[Optional[str]]:
return pulumi.get(self, "entity_name")
@property
@pulumi.getter
def hostname(self) -> pulumi.Output[Optional[str]]:
return pulumi.get(self, "hostname")
@property
@pulumi.getter
def kind(self) -> pulumi.Output[Optional[str]]:
"""
Kind of resource.
"""
return pulumi.get(self, "kind")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
"""
Resource Name.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def port(self) -> pulumi.Output[Optional[int]]:
return pulumi.get(self, "port")
@property
@pulumi.getter(name="resourceConnectionString")
def resource_connection_string(self) -> pulumi.Output[Optional[str]]:
return pulumi.get(self, "resource_connection_string")
@property
@pulumi.getter(name="resourceType")
def resource_type(self) -> pulumi.Output[Optional[str]]:
return pulumi.get(self, "resource_type")
@property
@pulumi.getter
def type(self) -> pulumi.Output[str]:
"""
Resource type.
"""
return pulumi.get(self, "type")
| [
"[email protected]"
]
| |
c6ecf3c59e8d315c1650c67532864af71b386c05 | 4e8b37ca121be19cd3b4e73a6592be2659d8134c | /backend/Techfesia2019/accounts/migrations/0005_auto_20190701_1708.py | a7113d24504a420a0d91b930fb768ac3673981f3 | [
"MIT"
]
| permissive | masterashu/Techfesia2019 | 365b9b8dc1cb0bc6b613c72632e8b7a2a2a70905 | 8fd82c4867c8d870b82a936fc0f9e80f11ae03e7 | refs/heads/backend-event-registrations | 2020-06-10T20:58:40.850415 | 2019-07-27T23:00:21 | 2019-07-27T23:00:21 | 193,744,800 | 1 | 1 | MIT | 2019-06-29T17:12:31 | 2019-06-25T16:29:12 | Python | UTF-8 | Python | false | false | 466 | py | # Generated by Django 2.2.2 on 2019-07-01 11:38
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('accounts', '0004_auto_20190701_0956'),
]
operations = [
migrations.AlterField(
model_name='institute',
name='name',
field=models.CharField(default='Indian Institute of Information Technology, Sri City', max_length=200, unique=True),
),
]
| [
"[email protected]"
]
| |
a9d2eeab18066cbc76789aba31dd51329d4f3780 | 9f0b9a8fe27336b8a231a33c6f693ed019a61b6e | /blacklinetest.py | f6eb1fa445e64a1ab1daa8cf7cc3bd44fcadc93b | []
| no_license | Duong-NVH/tool-set | e2647cf74fa085eab42fe3f19c852634629e956e | e7c5f7f4522e75eefe74e808a07ecf6575c4ebf5 | refs/heads/main | 2023-06-15T07:37:30.783287 | 2021-07-09T15:58:12 | 2021-07-09T15:58:12 | 382,987,402 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 517 | py | import cv2
import numpy as np
img = cv2.imread('blacklinetest.jpg')
gray = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
edges = cv2.Canny(gray, 50, 150, apertureSize=3)
lines = cv2.HoughLines(edges, 1, np.pi/180, 500)
for rho, theta in lines[0]:
a = np.cos(theta)
b = np.sin(theta)
x0 = a*rho
y0 = b*rho
x1 = int(x0 + 1000*(-b))
y1 = int(y0 + 1000*(a))
x2 = int(x0 - 1000*(-b))
y2 = int(y0 - 1000*(a))
cv2.line(img, (x1, y1), (x2, y2), (0, 0, 255), 2)
cv2.imwrite('houghlines3.jpg', img)
| [
"[email protected]"
]
| |
d7aab2532f25c287a63c8bd8d282163103684f29 | d7567ee75e48bd7872a1c332d471ff3ce7433cb9 | /checkout/urls.py | 233bfb99df176d4ab47c4bae44affd20f8155e9c | []
| no_license | sarahbarron/ecommerce | 30cd0ff26afa5ec9031165b63ecde8c0f7f6086f | aba5370fd731e7ec9e677041504f6c3457b0d405 | refs/heads/master | 2020-03-17T21:10:56.385918 | 2020-01-17T18:35:28 | 2020-01-17T18:35:28 | 133,947,336 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 131 | py | from django.conf.urls import url
from .views import checkout
urlpatterns = [
url(r'^$', checkout, name='checkout'),
] | [
"[email protected]"
]
| |
ab932c024897c581d9bb5dd95eef2ee759d421c2 | bac5ecb5eef06dfe76b9b7bff80faee7485c67dd | /.history/django_vuejs_tutorial/django_vuejs/dataiku/models_20200829125121.py | c7e0a2d229648bf8a2326333ab23d5a72731658d | []
| no_license | MChrys/dataiku | fb1e48401d544cbcc5a80a0a27668dc9d2d196e5 | 6091b24f565224260a89246e29c0a1cbb72f58ed | refs/heads/master | 2022-12-16T11:06:13.896643 | 2020-09-12T19:03:51 | 2020-09-12T19:03:51 | 293,287,620 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,147 | py | from django.db import models
from django.utils import timezone
class Task(models.Model):
'''
All task that could be apply to a specific account
'''
name = models.CharField(max_length=60, primary_key=True)
description = models.CharField(max_length=510, null=True, blank=True)
supertask = models.ForeignKey('self',null=True, blank=True, on_delete=models.SET_NULL)
class Dataiku_account(models.Model):
STATUS = (
('in operation', 'in operation'),
('avaible', 'avaible')
)
email = models.CharField(max_length=60, primary_key=True)
password = models.CharField(max_length=255, null=True, blank=True)
#task = models.CharField(max_length=255, null=True, blank=True)
status = models.CharField(max_length=255, null=True, blank=True, choices = STATUS)
def __str__(self):
return self.email
class Operation(models.Model):
'''
A running Task like : validate this course or take this QCM
'''
creation = models.DateTimeField( editable = False)
STATUS = (
('pending', 'pending'),
('running', 'running'),
('done', 'done')
)
task = models.OneToOneField(Task, null=True, blank=True,on_delete=models.SET_NULL)
account = models.ForeignKey(Dataiku_account, on_delete=models.CASCADE)
statut = models.CharField(max_length=255, null=True, blank=True, choices = STATUS)
def save(self, *args, **kwargs):
if not self.id:
self.creation = timezone.now()
return super(User, self).save(*args, **kwargs)
class QCM(models.Model):
LearningPathUrl = models.CharField(max_length=255, null=True, blank=True)
LearningPathName = models.CharField(max_length=255, null=True, blank=True)
CourseUrl = models.CharField(max_length=255, null=True, blank=True)
CourseName = models.CharField(max_length=255, null=True, blank=True)
QcmUrl = models.CharField(max_length=255, null=True, blank=True)
QcmName = models.CharField(max_length=255, null=True, blank=True)
Lenght = models.IntegerField(default =0)
Verif = models.IntegerField(default =0)
status = models.BooleanField(default = False)
def __str__(self):
return "{}_{}_{}".format(self.LearningPathName, self.CourseName,self.QcmName)
class Session(models.Model):
STATUS = (
('running','running'),
('finish','finish')
)
email = models.ForeignKey(Dataiku_account , on_delete=models.CASCADE)
start = models.DateTimeField(editable =False)
countdown = models.CharField(max_length=10, blank=True, null=True, default= '59:59')
score = models.IntegerField(default=0)
lenght = models.IntegerField(default=0)
def save(self, *args, **kwargs):
''' On save, update timestamps '''
if not self.id:
self.start = timezone.now()
return super(User, self).save(*args, **kwargs)
# Create your models here.
class Question(models.Model):
STATUS = (
('pending', 'pending'),
('check', 'check')
)
CHOICES_TYPE = (
('checkbox', 'checkbox'),
('radio', 'radio')
)
text = models.CharField(max_length=255, primary_key=True)
#session = models.ForeignKey(Session, null=True, blank=True,on_delete = models.SET_NULL)
status = models.CharField(max_length=255, null=True, blank=True, choices = STATUS)
choice_type = models.CharField(max_length=255, null=True, blank=True, default= "radio" ,choices = CHOICES_TYPE)
max_choices = models.IntegerField(default = 0)
cursor = models.IntegerField(default = 1)
# cursor is the number of answer currently explored
qcm_link = models.ForeignKey(QCM, null=True, blank=True, on_delete=models.CASCADE)
def __str__(self):
return self.text
class Run(models.Model):
'''
A Run is a try
'''
STATUS = (
(True, 'True'),
(False, 'False')
)
id = models.AutoField(primary_key=True)
#creation = models.DateTimeField(editable =False)
question_link = models.ForeignKey(Question, null=True, blank=True, on_delete=models.CASCADE)
session_link = models.ForeignKey(Session, null=True, blank=True, on_delete=models.CASCADE)
status = models.BooleanField(default = False)
class Posibility(models.Model):
CHOICES = (
(1, '1'),
(2, '2'),
(3, '3'),
(4, '4'),
)
rank = models.IntegerField( null=True, default= 1, blank=True, choices = CHOICES)
question_link = models.ForeignKey(Question, null=True, blank=True,on_delete=models.CASCADE)
text = models.CharField(max_length=255, null=True, blank=True)
#rank = models.ForeignKey(Answer, null=True, blank=True,on_delete= models.SET_NULL)
def __str__(self):
return self.text
class Answer(models.Model):
#choice = models.IntegerField( null=True, default= 1, blank=True, choices = CHOICES)
choice = models.ForeignKey(Posibility,to_field='rank',blank=True,null=True,on_delete= models.SET_NULL)
connected_run = models.ForeignKey(Run,to_field='id',blank=True,null=True,on_delete= models.SET_NULL)
def __str__(self):
return self.choice
| [
"[email protected]"
]
| |
18b985fd2a25b161ab12d7f4f4e09fc83c30cc2e | 3b21cbe5320137a3d8f7da40558294081211f63f | /Chapter04/AutoencMnist.py | daebd29ec15d7b88a838e6b5aa4a4d8016f69927 | [
"MIT"
]
| permissive | Evelynatrocks/Python-Machine-Learning-Cookbook-Second-Edition | d06812bba0a32a9bd6e5e8d788769a07d28084cd | 99d8b799dbfe1d9a82f0bcc3648aaeb147b7298f | refs/heads/master | 2023-04-06T20:23:05.384943 | 2021-01-18T12:06:36 | 2021-01-18T12:06:36 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,181 | py | from keras.datasets import mnist
(XTrain, YTrain), (XTest, YTest) = mnist.load_data()
print('XTrain shape = ',XTrain.shape)
print('XTest shape = ',XTest.shape)
print('YTrain shape = ',YTrain.shape)
print('YTest shape = ',YTest.shape)
import numpy as np
print('YTrain values = ',np.unique(YTrain))
print('YTest values = ',np.unique(YTest))
unique, counts = np.unique(YTrain, return_counts=True)
print('YTrain distribution = ',dict(zip(unique, counts)))
unique, counts = np.unique(YTest, return_counts=True)
print('YTrain distribution = ',dict(zip(unique, counts)))
import matplotlib.pyplot as plt
plt.figure(1)
plt.subplot(121)
plt.hist(YTrain, alpha=0.8, ec='black')
plt.xlabel("Classes")
plt.ylabel("Number of occurrences")
plt.title("YTrain data")
plt.subplot(122)
plt.hist(YTest, alpha=0.8, ec='black')
plt.xlabel("Classes")
plt.ylabel("Number of occurrences")
plt.title("YTest data")
plt.show()
XTrain = XTrain.astype('float32') / 255
XTest = XTest.astype('float32') / 255
XTrain = XTrain.reshape((len(XTrain), np.prod(XTrain.shape[1:])))
XTest = XTest.reshape((len(XTest), np.prod(XTest.shape[1:])))
from keras.layers import Input
from keras.layers import Dense
from keras.models import Model
InputModel = Input(shape=(784,))
EncodedLayer = Dense(32, activation='relu')(InputModel)
DecodedLayer = Dense(784, activation='sigmoid')(EncodedLayer)
AutoencoderModel = Model(InputModel, DecodedLayer)
AutoencoderModel.summary()
AutoencoderModel.compile(optimizer='adadelta', loss='binary_crossentropy')
history = AutoencoderModel.fit(XTrain, XTrain,
batch_size=256,
epochs=100,
shuffle=True,
validation_data=(XTest, XTest))
DecodedDigits = AutoencoderModel.predict(XTest)
n=5
plt.figure(figsize=(20, 4))
for i in range(n):
ax = plt.subplot(2, n, i + 1)
plt.imshow(XTest[i+10].reshape(28, 28))
plt.gray()
ax.get_xaxis().set_visible(False)
ax.get_yaxis().set_visible(False)
ax = plt.subplot(2, n, i + 1 + n)
plt.imshow(DecodedDigits[i+10].reshape(28, 28))
plt.gray()
ax.get_xaxis().set_visible(False)
ax.get_yaxis().set_visible(False)
plt.show() | [
"[email protected]"
]
| |
be0eb741b4aaaad5085131454dec219bdd1c93dd | 971e0efcc68b8f7cfb1040c38008426f7bcf9d2e | /tests/artificial/transf_Anscombe/trend_LinearTrend/cycle_30/ar_/test_artificial_1024_Anscombe_LinearTrend_30__100.py | 70d9b6daa1932fc44ee8f23227fa9317aea8fd0d | [
"BSD-3-Clause",
"LicenseRef-scancode-unknown-license-reference"
]
| permissive | antoinecarme/pyaf | a105d172c2e7544f8d580d75f28b751351dd83b6 | b12db77cb3fa9292e774b2b33db8ce732647c35e | refs/heads/master | 2023-09-01T09:30:59.967219 | 2023-07-28T20:15:53 | 2023-07-28T20:15:53 | 70,790,978 | 457 | 77 | BSD-3-Clause | 2023-03-08T21:45:40 | 2016-10-13T09:30:30 | Python | UTF-8 | Python | false | false | 268 | py | import pyaf.Bench.TS_datasets as tsds
import tests.artificial.process_artificial_dataset as art
art.process_dataset(N = 1024 , FREQ = 'D', seed = 0, trendtype = "LinearTrend", cycle_length = 30, transform = "Anscombe", sigma = 0.0, exog_count = 100, ar_order = 0); | [
"[email protected]"
]
| |
e333c381e106259eee7a3f4e11f26674dd3a3594 | 30a8b69bd2e0a3f3c2c1c88fb3bd8a28e6fc4cd0 | /Part1/auth_foursquare.py | dc09d963b40958ce2c5e3b9030a232e3dd9ca643 | []
| no_license | llord1/Mining-Georeferenced-Data | d49108f443922f02b90431ad7a9626ea17fd0554 | c71f2e151ccfc4a1a9c07b5fcf4e95b7f7ba70e9 | refs/heads/master | 2021-05-30T13:27:57.663015 | 2015-12-29T09:10:08 | 2015-12-29T09:10:08 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 283 | py | #!/usr/bin/env python
import foursquare
from foursquare_accounts import accounts
app = accounts["tutorial"]
client = foursquare.Foursquare(client_id=app["client_id"],
client_secret=app["client_secret"])
client.set_access_token(app["access_token"])
| [
"[email protected]"
]
| |
c35024eb1eed9b0da1bdde17899977fd5b9b5c96 | 0201ac814d825cac1030dfe1ccdb7ef1657c205b | /__init__.py | a403709aa7de47dca868813496d90679f83afbc3 | [
"BSD-3-Clause"
]
| permissive | karttur/geoimagine03-timeseries | c99be449dccaab767d470cfaa2b71d9dae339fba | aa8e1642fd4a8bc196ad6fce9f90b80066d54dac | refs/heads/main | 2023-08-22T14:12:50.791746 | 2021-10-20T10:54:37 | 2021-10-20T10:54:37 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 361 | py | """
timeseries
==========================================
Package belonging to Karttur´s GeoImagine Framework.
Author
------
Thomas Gumbricht ([email protected])
"""
from .version import __version__, VERSION, metadataD
from .timeseries import ProcessTimeSeries
from .numbautil import TimeSeriesNumba
#from .tsgraph import ProcessTimeSeriesGraph
| [
"[email protected]"
]
| |
0eb944d3d4b625dd58953dcd5ad39efa5bcaeaa1 | 9c14bd53c8629262b1310962c1663a3c503ba3a0 | /projects/golem/tests/project/add_directory_to_pages.py | 7b7d283d6d17e02f6e630f4b7d7aad6a000fea95 | []
| no_license | ShubhamAnand/golem-demo | b083d44b5d2d5db79eae96aa5bb1f3307272d64b | a40ced5500b3bfdb54351393eeb8ccba19a50564 | refs/heads/master | 2021-07-16T00:44:57.663282 | 2017-10-22T22:56:25 | 2017-10-22T22:56:25 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 521 | py |
description = 'Verify that the user can add a directory in the pages section by appending \'\\\' at the end'
pages = ['login',
'index',
'project']
def setup(data):
navigate(data.env.url)
login.do_login('admin', 'admin')
index.create_access_project('test')
def test(data):
store('directory_name', random('ccccc'))
project.add_page_directory(data.directory_name)
project.verify_page_directory_exists(data.directory_name)
def teardown(data):
close()
| [
"[email protected]"
]
| |
6f25ab872e193cdb10b2e82ee3a0967273771d8c | 09e57dd1374713f06b70d7b37a580130d9bbab0d | /data/p3BR/R1/benchmark/startQiskit_noisy99.py | 63b18b1983cf2ea49aadbaea3bb289271ef74cae | [
"BSD-3-Clause"
]
| permissive | UCLA-SEAL/QDiff | ad53650034897abb5941e74539e3aee8edb600ab | d968cbc47fe926b7f88b4adf10490f1edd6f8819 | refs/heads/main | 2023-08-05T04:52:24.961998 | 2021-09-19T02:56:16 | 2021-09-19T02:56:16 | 405,159,939 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,183 | py | # qubit number=3
# total number=18
import numpy as np
from qiskit import QuantumCircuit, execute, Aer, QuantumRegister, ClassicalRegister, transpile, BasicAer, IBMQ
from qiskit.visualization import plot_histogram
from typing import *
from pprint import pprint
from math import log2
from collections import Counter
from qiskit.test.mock import FakeVigo, FakeYorktown
kernel = 'circuit/bernstein'
def bitwise_xor(s: str, t: str) -> str:
length = len(s)
res = []
for i in range(length):
res.append(str(int(s[i]) ^ int(t[i])))
return ''.join(res[::-1])
def bitwise_dot(s: str, t: str) -> str:
length = len(s)
res = 0
for i in range(length):
res += int(s[i]) * int(t[i])
return str(res % 2)
def build_oracle(n: int, f: Callable[[str], str]) -> QuantumCircuit:
# implement the oracle O_f
# NOTE: use multi_control_toffoli_gate ('noancilla' mode)
# https://qiskit.org/documentation/_modules/qiskit/aqua/circuits/gates/multi_control_toffoli_gate.html
# https://quantumcomputing.stackexchange.com/questions/3943/how-do-you-implement-the-toffoli-gate-using-only-single-qubit-and-cnot-gates
# https://quantumcomputing.stackexchange.com/questions/2177/how-can-i-implement-an-n-bit-toffoli-gate
controls = QuantumRegister(n, "ofc")
target = QuantumRegister(1, "oft")
oracle = QuantumCircuit(controls, target, name="Of")
for i in range(2 ** n):
rep = np.binary_repr(i, n)
if f(rep) == "1":
for j in range(n):
if rep[j] == "0":
oracle.x(controls[j])
oracle.mct(controls, target[0], None, mode='noancilla')
for j in range(n):
if rep[j] == "0":
oracle.x(controls[j])
# oracle.barrier()
# oracle.draw('mpl', filename=(kernel + '-oracle.png'))
return oracle
def build_circuit(n: int, f: Callable[[str], str]) -> QuantumCircuit:
# implement the Bernstein-Vazirani circuit
zero = np.binary_repr(0, n)
b = f(zero)
# initial n + 1 bits
input_qubit = QuantumRegister(n+1, "qc")
classicals = ClassicalRegister(n, "qm")
prog = QuantumCircuit(input_qubit, classicals)
# inverse last one (can be omitted if using O_f^\pm)
prog.x(input_qubit[n])
# circuit begin
prog.h(input_qubit[1]) # number=1
prog.rx(-0.09738937226128368,input_qubit[2]) # number=2
prog.h(input_qubit[1]) # number=3
# apply H to get superposition
for i in range(n):
prog.h(input_qubit[i])
prog.h(input_qubit[n])
prog.barrier()
# apply oracle O_f
oracle = build_oracle(n, f)
prog.append(
oracle.to_gate(),
[input_qubit[i] for i in range(n)] + [input_qubit[n]])
# apply H back (QFT on Z_2^n)
for i in range(n):
prog.h(input_qubit[i])
prog.barrier()
# measure
return prog
def get_statevector(prog: QuantumCircuit) -> Any:
state_backend = Aer.get_backend('statevector_simulator')
statevec = execute(prog, state_backend).result()
quantum_state = statevec.get_statevector()
qubits = round(log2(len(quantum_state)))
quantum_state = {
"|" + np.binary_repr(i, qubits) + ">": quantum_state[i]
for i in range(2 ** qubits)
}
return quantum_state
def evaluate(backend_str: str, prog: QuantumCircuit, shots: int, b: str) -> Any:
# Q: which backend should we use?
# get state vector
quantum_state = get_statevector(prog)
# get simulate results
# provider = IBMQ.load_account()
# backend = provider.get_backend(backend_str)
# qobj = compile(prog, backend, shots)
# job = backend.run(qobj)
# job.result()
backend = Aer.get_backend(backend_str)
# transpile/schedule -> assemble -> backend.run
results = execute(prog, backend, shots=shots).result()
counts = results.get_counts()
a = Counter(counts).most_common(1)[0][0][::-1]
return {
"measurements": counts,
# "state": statevec,
"quantum_state": quantum_state,
"a": a,
"b": b
}
def bernstein_test_1(rep: str):
"""011 . x + 1"""
a = "011"
b = "1"
return bitwise_xor(bitwise_dot(a, rep), b)
def bernstein_test_2(rep: str):
"""000 . x + 0"""
a = "000"
b = "0"
return bitwise_xor(bitwise_dot(a, rep), b)
def bernstein_test_3(rep: str):
"""111 . x + 1"""
a = "111"
b = "1"
return bitwise_xor(bitwise_dot(a, rep), b)
if __name__ == "__main__":
n = 2
a = "11"
b = "1"
f = lambda rep: \
bitwise_xor(bitwise_dot(a, rep), b)
prog = build_circuit(n, f)
sample_shot =4000
writefile = open("../data/startQiskit_noisy99.csv", "w")
# prog.draw('mpl', filename=(kernel + '.png'))
backend = FakeYorktown()
circuit1 = transpile(prog, FakeYorktown())
circuit1.h(qubit=2)
circuit1.x(qubit=3)
circuit1.measure_all()
info = execute(circuit1,backend=backend, shots=sample_shot).result().get_counts()
print(info, file=writefile)
print("results end", file=writefile)
print(circuit1.depth(), file=writefile)
print(circuit1, file=writefile)
writefile.close()
| [
"[email protected]"
]
| |
4034bde7a9e06c5d7487997a7acb9e10b85cca2b | 0f1001169c4f229c253a6f1dc1c9aff51c797cca | /docs/markdown_to_html.py | ffacda661ea31a8286a001a77d5178f08b9a1fd3 | [
"Apache-2.0"
]
| permissive | alencon/dash-bootstrap-components | f40e360787c96a1d9f7827cf042872b2f9cffcac | 4f39856c13f66730512c57ed6dc0a819e8629293 | refs/heads/master | 2023-01-22T13:07:05.880865 | 2020-12-03T21:25:50 | 2020-12-03T21:25:50 | 318,998,227 | 1 | 0 | Apache-2.0 | 2020-12-06T09:42:13 | 2020-12-06T09:42:13 | null | UTF-8 | Python | false | false | 1,541 | py | from pathlib import Path
import markdown
from markdown.extensions.fenced_code import FencedBlockPreprocessor
# highlightJS expects the class "language-*" but markdown default is "*"
FencedBlockPreprocessor.LANG_TAG = ' class="language-%s"'
CONTENT = Path(__file__).parent / "content"
DEST = Path(__file__).parent / "templates" / "generated"
DOCS_HTML_TEMPLATE = """{% extends "docs.html" %}
{% block title %}<title><TITLE></title>{% endblock %}
{% block content %}<CONTENT>{% endblock %}
"""
CHANGELOG_HTML_TEMPLATE = """{% extends "changelog.html" %}
{% block title %}<title><TITLE></title>{% endblock %}
{% block content %}<CONTENT>{% endblock %}
"""
def convert_all_markdown_files():
for path in CONTENT.glob("docs/*.md"):
template = template_from_markdown(path, title_suffix=" - dbc docs")
with open(DEST / "docs" / path.name.replace(".md", ".html"), "w") as f:
f.write(template)
for path in CONTENT.glob("*.md"):
template = template_from_markdown(
path, template=CHANGELOG_HTML_TEMPLATE
)
with open(DEST / path.name.replace(".md", ".html"), "w") as f:
f.write(template)
def template_from_markdown(path, title_suffix="", template=DOCS_HTML_TEMPLATE):
md = markdown.Markdown(extensions=["fenced_code", "meta"])
text = path.read_text()
template = template.replace("<CONTENT>", md.convert(text))
return template.replace("<TITLE>", f"{md.Meta['title'][0]} - dbc docs")
if __name__ == "__main__":
convert_all_markdown_files()
| [
"[email protected]"
]
| |
2400de35f3a6c6902ae173e097d54b31040a551a | 2cbf3aaad62f4922d827af658fb5dbb7ac651bef | /teledusite/teledu/models/conceptAttribute.py | fc12d964e90614a6ff7813077017d177a3c7fecb | []
| no_license | tctimmeh/teledu | 0266240aa864cd2eed75857e66eaeb8270f44c1a | 04135ffb04f397f29152ca48f868a957b18d504a | refs/heads/master | 2021-01-23T08:52:32.817693 | 2013-10-29T01:34:41 | 2013-10-29T01:34:41 | 2,566,893 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 674 | py | from django.db import models
from concept import Concept
from attribute import Attribute
class ConceptAttribute(Attribute):
concept = models.ForeignKey(Concept, related_name = 'attributes')
class Meta:
app_label = 'teledu'
unique_together = (('concept', 'name'))
def __unicode__(self):
return '%s - %s' % (self.concept.name, self.name)
def conceptName(self):
return self.concept.name
def gameSystem(self):
return self.concept.gameSystem
def getAttributeValuesForInstance(self, instance):
from conceptAttributeValue import ConceptAttributeValue
return ConceptAttributeValue.objects.filter(attribute = self, instance = instance)
| [
"[email protected]"
]
| |
ccc456d17a7c5c5b509e388397e01ad74e2f0559 | 00a9295409b78a53ce790f7ab44931939f42c0e0 | /FPGA/apio/iCEBreaker/FIR_Filter/sympy/venv/lib/python3.8/site-packages/sympy/solvers/tests/test_pde.py | 1b43eb0b0886235a8a8b1b4a593e4e7d486fcfae | [
"Apache-2.0"
]
| permissive | klei22/Tech-OnBoarding-Class | c21f0762d2d640d5e9cb124659cded5c865b32d4 | 960e962322c37be9117e0523641f8b582a2beceb | refs/heads/master | 2022-11-10T13:17:39.128342 | 2022-10-25T08:59:48 | 2022-10-25T08:59:48 | 172,292,871 | 2 | 3 | Apache-2.0 | 2019-05-19T00:26:32 | 2019-02-24T03:50:35 | C | UTF-8 | Python | false | false | 9,057 | py | from sympy import (Derivative as D, Eq, exp, sin,
Function, Symbol, symbols, cos, log)
from sympy.core import S
from sympy.solvers.pde import (pde_separate, pde_separate_add, pde_separate_mul,
pdsolve, classify_pde, checkpdesol)
from sympy.testing.pytest import raises
a, b, c, x, y = symbols('a b c x y')
def test_pde_separate_add():
x, y, z, t = symbols("x,y,z,t")
F, T, X, Y, Z, u = map(Function, 'FTXYZu')
eq = Eq(D(u(x, t), x), D(u(x, t), t)*exp(u(x, t)))
res = pde_separate_add(eq, u(x, t), [X(x), T(t)])
assert res == [D(X(x), x)*exp(-X(x)), D(T(t), t)*exp(T(t))]
def test_pde_separate():
x, y, z, t = symbols("x,y,z,t")
F, T, X, Y, Z, u = map(Function, 'FTXYZu')
eq = Eq(D(u(x, t), x), D(u(x, t), t)*exp(u(x, t)))
raises(ValueError, lambda: pde_separate(eq, u(x, t), [X(x), T(t)], 'div'))
def test_pde_separate_mul():
x, y, z, t = symbols("x,y,z,t")
c = Symbol("C", real=True)
Phi = Function('Phi')
F, R, T, X, Y, Z, u = map(Function, 'FRTXYZu')
r, theta, z = symbols('r,theta,z')
# Something simple :)
eq = Eq(D(F(x, y, z), x) + D(F(x, y, z), y) + D(F(x, y, z), z), 0)
# Duplicate arguments in functions
raises(
ValueError, lambda: pde_separate_mul(eq, F(x, y, z), [X(x), u(z, z)]))
# Wrong number of arguments
raises(ValueError, lambda: pde_separate_mul(eq, F(x, y, z), [X(x), Y(y)]))
# Wrong variables: [x, y] -> [x, z]
raises(
ValueError, lambda: pde_separate_mul(eq, F(x, y, z), [X(t), Y(x, y)]))
assert pde_separate_mul(eq, F(x, y, z), [Y(y), u(x, z)]) == \
[D(Y(y), y)/Y(y), -D(u(x, z), x)/u(x, z) - D(u(x, z), z)/u(x, z)]
assert pde_separate_mul(eq, F(x, y, z), [X(x), Y(y), Z(z)]) == \
[D(X(x), x)/X(x), -D(Z(z), z)/Z(z) - D(Y(y), y)/Y(y)]
# wave equation
wave = Eq(D(u(x, t), t, t), c**2*D(u(x, t), x, x))
res = pde_separate_mul(wave, u(x, t), [X(x), T(t)])
assert res == [D(X(x), x, x)/X(x), D(T(t), t, t)/(c**2*T(t))]
# Laplace equation in cylindrical coords
eq = Eq(1/r * D(Phi(r, theta, z), r) + D(Phi(r, theta, z), r, 2) +
1/r**2 * D(Phi(r, theta, z), theta, 2) + D(Phi(r, theta, z), z, 2), 0)
# Separate z
res = pde_separate_mul(eq, Phi(r, theta, z), [Z(z), u(theta, r)])
assert res == [D(Z(z), z, z)/Z(z),
-D(u(theta, r), r, r)/u(theta, r) -
D(u(theta, r), r)/(r*u(theta, r)) -
D(u(theta, r), theta, theta)/(r**2*u(theta, r))]
# Lets use the result to create a new equation...
eq = Eq(res[1], c)
# ...and separate theta...
res = pde_separate_mul(eq, u(theta, r), [T(theta), R(r)])
assert res == [D(T(theta), theta, theta)/T(theta),
-r*D(R(r), r)/R(r) - r**2*D(R(r), r, r)/R(r) - c*r**2]
# ...or r...
res = pde_separate_mul(eq, u(theta, r), [R(r), T(theta)])
assert res == [r*D(R(r), r)/R(r) + r**2*D(R(r), r, r)/R(r) + c*r**2,
-D(T(theta), theta, theta)/T(theta)]
def test_issue_11726():
x, t = symbols("x t")
f = symbols("f", cls=Function)
X, T = symbols("X T", cls=Function)
u = f(x, t)
eq = u.diff(x, 2) - u.diff(t, 2)
res = pde_separate(eq, u, [T(x), X(t)])
assert res == [D(T(x), x, x)/T(x),D(X(t), t, t)/X(t)]
def test_pde_classify():
# When more number of hints are added, add tests for classifying here.
f = Function('f')
eq1 = a*f(x,y) + b*f(x,y).diff(x) + c*f(x,y).diff(y)
eq2 = 3*f(x,y) + 2*f(x,y).diff(x) + f(x,y).diff(y)
eq3 = a*f(x,y) + b*f(x,y).diff(x) + 2*f(x,y).diff(y)
eq4 = x*f(x,y) + f(x,y).diff(x) + 3*f(x,y).diff(y)
eq5 = x**2*f(x,y) + x*f(x,y).diff(x) + x*y*f(x,y).diff(y)
eq6 = y*x**2*f(x,y) + y*f(x,y).diff(x) + f(x,y).diff(y)
for eq in [eq1, eq2, eq3]:
assert classify_pde(eq) == ('1st_linear_constant_coeff_homogeneous',)
for eq in [eq4, eq5, eq6]:
assert classify_pde(eq) == ('1st_linear_variable_coeff',)
def test_checkpdesol():
f, F = map(Function, ['f', 'F'])
eq1 = a*f(x,y) + b*f(x,y).diff(x) + c*f(x,y).diff(y)
eq2 = 3*f(x,y) + 2*f(x,y).diff(x) + f(x,y).diff(y)
eq3 = a*f(x,y) + b*f(x,y).diff(x) + 2*f(x,y).diff(y)
for eq in [eq1, eq2, eq3]:
assert checkpdesol(eq, pdsolve(eq))[0]
eq4 = x*f(x,y) + f(x,y).diff(x) + 3*f(x,y).diff(y)
eq5 = 2*f(x,y) + 1*f(x,y).diff(x) + 3*f(x,y).diff(y)
eq6 = f(x,y) + 1*f(x,y).diff(x) + 3*f(x,y).diff(y)
assert checkpdesol(eq4, [pdsolve(eq5), pdsolve(eq6)]) == [
(False, (x - 2)*F(3*x - y)*exp(-x/S(5) - 3*y/S(5))),
(False, (x - 1)*F(3*x - y)*exp(-x/S(10) - 3*y/S(10)))]
for eq in [eq4, eq5, eq6]:
assert checkpdesol(eq, pdsolve(eq))[0]
sol = pdsolve(eq4)
sol4 = Eq(sol.lhs - sol.rhs, 0)
raises(NotImplementedError, lambda:
checkpdesol(eq4, sol4, solve_for_func=False))
def test_solvefun():
f, F, G, H = map(Function, ['f', 'F', 'G', 'H'])
eq1 = f(x,y) + f(x,y).diff(x) + f(x,y).diff(y)
assert pdsolve(eq1) == Eq(f(x, y), F(x - y)*exp(-x/2 - y/2))
assert pdsolve(eq1, solvefun=G) == Eq(f(x, y), G(x - y)*exp(-x/2 - y/2))
assert pdsolve(eq1, solvefun=H) == Eq(f(x, y), H(x - y)*exp(-x/2 - y/2))
def test_pde_1st_linear_constant_coeff_homogeneous():
f, F = map(Function, ['f', 'F'])
u = f(x, y)
eq = 2*u + u.diff(x) + u.diff(y)
assert classify_pde(eq) == ('1st_linear_constant_coeff_homogeneous',)
sol = pdsolve(eq)
assert sol == Eq(u, F(x - y)*exp(-x - y))
assert checkpdesol(eq, sol)[0]
eq = 4 + (3*u.diff(x)/u) + (2*u.diff(y)/u)
assert classify_pde(eq) == ('1st_linear_constant_coeff_homogeneous',)
sol = pdsolve(eq)
assert sol == Eq(u, F(2*x - 3*y)*exp(-S(12)*x/13 - S(8)*y/13))
assert checkpdesol(eq, sol)[0]
eq = u + (6*u.diff(x)) + (7*u.diff(y))
assert classify_pde(eq) == ('1st_linear_constant_coeff_homogeneous',)
sol = pdsolve(eq)
assert sol == Eq(u, F(7*x - 6*y)*exp(-6*x/S(85) - 7*y/S(85)))
assert checkpdesol(eq, sol)[0]
eq = a*u + b*u.diff(x) + c*u.diff(y)
sol = pdsolve(eq)
assert checkpdesol(eq, sol)[0]
def test_pde_1st_linear_constant_coeff():
f, F = map(Function, ['f', 'F'])
u = f(x,y)
eq = -2*u.diff(x) + 4*u.diff(y) + 5*u - exp(x + 3*y)
sol = pdsolve(eq)
assert sol == Eq(f(x,y),
(F(4*x + 2*y) + exp(x/S(2) + 4*y)/S(15))*exp(x/S(2) - y))
assert classify_pde(eq) == ('1st_linear_constant_coeff',
'1st_linear_constant_coeff_Integral')
assert checkpdesol(eq, sol)[0]
eq = (u.diff(x)/u) + (u.diff(y)/u) + 1 - (exp(x + y)/u)
sol = pdsolve(eq)
assert sol == Eq(f(x, y), F(x - y)*exp(-x/2 - y/2) + exp(x + y)/S(3))
assert classify_pde(eq) == ('1st_linear_constant_coeff',
'1st_linear_constant_coeff_Integral')
assert checkpdesol(eq, sol)[0]
eq = 2*u + -u.diff(x) + 3*u.diff(y) + sin(x)
sol = pdsolve(eq)
assert sol == Eq(f(x, y),
F(3*x + y)*exp(x/S(5) - 3*y/S(5)) - 2*sin(x)/S(5) - cos(x)/S(5))
assert classify_pde(eq) == ('1st_linear_constant_coeff',
'1st_linear_constant_coeff_Integral')
assert checkpdesol(eq, sol)[0]
eq = u + u.diff(x) + u.diff(y) + x*y
sol = pdsolve(eq)
assert sol == Eq(f(x, y),
-x*y + x + y + F(x - y)*exp(-x/S(2) - y/S(2)) - 2)
assert classify_pde(eq) == ('1st_linear_constant_coeff',
'1st_linear_constant_coeff_Integral')
assert checkpdesol(eq, sol)[0]
eq = u + u.diff(x) + u.diff(y) + log(x)
assert classify_pde(eq) == ('1st_linear_constant_coeff',
'1st_linear_constant_coeff_Integral')
def test_pdsolve_all():
f, F = map(Function, ['f', 'F'])
u = f(x,y)
eq = u + u.diff(x) + u.diff(y) + x**2*y
sol = pdsolve(eq, hint = 'all')
keys = ['1st_linear_constant_coeff',
'1st_linear_constant_coeff_Integral', 'default', 'order']
assert sorted(sol.keys()) == keys
assert sol['order'] == 1
assert sol['default'] == '1st_linear_constant_coeff'
assert sol['1st_linear_constant_coeff'] == Eq(f(x, y),
-x**2*y + x**2 + 2*x*y - 4*x - 2*y + F(x - y)*exp(-x/S(2) - y/S(2)) + 6)
def test_pdsolve_variable_coeff():
f, F = map(Function, ['f', 'F'])
u = f(x, y)
eq = x*(u.diff(x)) - y*(u.diff(y)) + y**2*u - y**2
sol = pdsolve(eq, hint="1st_linear_variable_coeff")
assert sol == Eq(u, F(x*y)*exp(y**2/2) + 1)
assert checkpdesol(eq, sol)[0]
eq = x**2*u + x*u.diff(x) + x*y*u.diff(y)
sol = pdsolve(eq, hint='1st_linear_variable_coeff')
assert sol == Eq(u, F(y*exp(-x))*exp(-x**2/2))
assert checkpdesol(eq, sol)[0]
eq = y*x**2*u + y*u.diff(x) + u.diff(y)
sol = pdsolve(eq, hint='1st_linear_variable_coeff')
assert sol == Eq(u, F(-2*x + y**2)*exp(-x**3/3))
assert checkpdesol(eq, sol)[0]
eq = exp(x)**2*(u.diff(x)) + y
sol = pdsolve(eq, hint='1st_linear_variable_coeff')
assert sol == Eq(u, y*exp(-2*x)/2 + F(y))
assert checkpdesol(eq, sol)[0]
eq = exp(2*x)*(u.diff(y)) + y*u - u
sol = pdsolve(eq, hint='1st_linear_variable_coeff')
assert sol == Eq(u, exp((-y**2 + 2*y + 2*F(x))*exp(-2*x)/2))
| [
"[email protected]"
]
| |
a45dacabb65a8b878d1cb07374fde8bc5ac07d6d | f305f84ea6f721c2391300f0a60e21d2ce14f2a5 | /7_graph/bfs求无权图的最短路径/广义的bfs/488. 祖玛游戏-bfs剪枝.py | 12413155d1f8a0da0d66c30102d92f4f104f18a7 | []
| no_license | 981377660LMT/algorithm-study | f2ada3e6959338ae1bc21934a84f7314a8ecff82 | 7e79e26bb8f641868561b186e34c1127ed63c9e0 | refs/heads/master | 2023-09-01T18:26:16.525579 | 2023-09-01T12:21:58 | 2023-09-01T12:21:58 | 385,861,235 | 225 | 24 | null | null | null | null | UTF-8 | Python | false | false | 2,038 | py | # 1 <= board.length <= 16
# 1 <= hand.length <= 5
from collections import deque
from functools import lru_cache
import re
# 为什么使用广度优先搜索?
# 因为只需要找出需要回合数最少的方案,因此使用广度优先搜索可以得到可以消除桌面上所有球的方案时就直接返回结果,而不需要继续遍历更多需要回合数更多的方案。
class Solution:
def findMinStep(self, board: str, hand: str) -> int:
"""请你按上述操作步骤移除掉桌上所有球,计算并返回所需的 最少 球数。如果不能移除桌上所有的球,返回 -1 。"""
@lru_cache(None)
def clean(s: str) -> str:
"""碰到三个就删除整个"""
count = 1
while count:
s, count = re.subn(r'(\w)\1{2,}', '', s)
return s
hand = ''.join(sorted(hand))
queue = deque([(board, hand, 0)])
visited = set([(board, hand)])
while queue:
b, h, step = queue.popleft()
if not b:
return step
# 插入位置
for i in range(len(b)):
# 删除那个元素
for j in range(len(h)):
# 最重要的剪枝是,当手上的球 h[j] 和插入位置 i 前后的球 b[i-1], b[i] 三个球各不相同时,插入是不必要的:
sequence = [b[i - 1], b[i], h[j]] if i else [b[i], h[j]]
if len(set(sequence)) < len(sequence):
nextB = clean(b[:i] + h[j] + b[i:])
nextH = h[:j] + h[j + 1 :]
if (nextB, nextH) not in visited:
visited.add((nextB, nextH))
queue.append((nextB, nextH, step + 1))
return -1
print(Solution().findMinStep(board="WRRBBW", hand="RB"))
print(Solution().findMinStep(board="WWRRBBWW", hand="WRBRW"))
# re.subn返回一个元组
| [
"[email protected]"
]
| |
c560a98412f1f79c8b28518349b9281f419d3cd1 | 5f313d8fce26a8ecfff8817ff566b7e1810fcba7 | /timethings.py | 4d68c2cbbbfce64ba5da5943421cca52b094884d | []
| no_license | luispedro/mahotas-paper | cd2769a264149cac74ce8c694ca4f02e3f4a6c93 | 698f2a8640feba4e285318e2cd866db3705ec2c3 | refs/heads/master | 2020-03-30T16:26:20.362126 | 2013-08-26T09:03:17 | 2013-08-26T09:03:17 | 4,877,058 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,733 | py | import skimage.morphology
import skimage.filter
import skimage.feature
import numpy as np
import timeit
import mahotas
import cv2
from os import path
luispedro_image = path.join(
path.dirname(mahotas.__file__),
'demos',
'data',
'luispedro.jpg')
f = mahotas.imread(luispedro_image, as_grey=True)
markers = np.zeros_like(f)
markers[100,100] = 1
markers[200,200] = 2
f = f.astype(np.uint8)
markers = markers.astype(int)
otsu = mahotas.otsu(f.astype(np.uint8))
fbin = f > otsu
fbin8 = fbin.astype(np.uint8)
Bc = np.eye(3)
Bc = Bc.astype(bool)
Bc8 = Bc.astype(np.uint8)
f3 = np.dstack([f,f,f])
f3 = f3.astype(np.uint8)
f3 = f3.copy()
filt = np.array([
[1,0,-1,0],
[2,2,3,-2],
[-1,0,0,1]
])
markers32 = markers.astype(np.int32)
def octagon(r):
octagon = np.ones((r*2+1, r*2+1), dtype=np.bool)
lim = r//2
for y in xrange(lim):
octagon[y,:lim-y] = 0
octagon &= octagon[::-1]
octagon &= octagon[:,::-1]
return octagon
pre ='''
import skimage.filter
import skimage.morphology
import skimage.feature
import numpy as np
import mahotas
import pymorph
import cv2
import timethings
octagon = timethings.octagon
f = timethings.f
f3 = timethings.f3
fbin = timethings.fbin
fbin8 = timethings.fbin8
f64 = f.astype(np.float64)
Bc = timethings.Bc
Bc8 = timethings.Bc8
markers = timethings.markers
markers32 = timethings.markers32
filt = timethings.filt
'''
def t(s):
return min(timeit.timeit(s, setup=pre, number=24) for i in xrange(3))
tests = [
('convolve', [
'mahotas.convolve(f, filt)',
None,
None,
None,
]),
('erode', [
'mahotas.erode(fbin, Bc)',
'pymorph.erode(fbin, Bc)',
'skimage.morphology.erosion(fbin8, Bc8)',
'cv2.erode(fbin8, Bc8)',
]),
('dilate', [
'mahotas.dilate(fbin, Bc)',
'pymorph.dilate(fbin, Bc)',
'skimage.morphology.dilation(fbin8, Bc8)',
'cv2.dilate(fbin8, Bc8)',
]),
('open', [
'mahotas.open(fbin, Bc)',
'pymorph.open(fbin, Bc)',
'skimage.morphology.opening(fbin8, Bc8)',
None,
]),
('median filter (2)', [
'mahotas.median_filter(f, octagon(2))',
None,
'skimage.filter.median_filter(f, 2)',
None,
]),
('median filter (10)', [
'mahotas.median_filter(f, octagon(10))',
None,
'skimage.filter.median_filter(f, 10)',
None,
]),
('center mass', [
'mahotas.center_of_mass(f)',
None,
'skimage.measure.regionprops(np.ones(f.shape, np.intc), ["WeightedCentroid"], intensity_image=f)',
None,
]),
('sobel', [
'mahotas.sobel(f, just_filter=True)',
None,
'skimage.filter.sobel(f)',
'cv2.Sobel(f, cv2.CV_32F, 1, 1)',
]),
('cwatershed', [
'mahotas.cwatershed(f, markers)',
'pymorph.cwatershed(f, markers)',
'skimage.morphology.watershed(f, markers)',
'cv2.watershed(f3, markers32.copy())',
]),
('daubechies', [
'mahotas.daubechies(f, "D4")',
None,
None,
None,
]),
('haralick', [
'mahotas.features.haralick(f)',
None,
'skimage.feature.greycoprops(skimage.feature.greycomatrix(f, [1], [0]))',
None,
]),
]
if __name__ == '__main__':
base = t('np.max(f)')
for name,statements in tests:
print r'{0:<20} &'.format(name),
for st in statements:
if st is None:
result = 'NA'
else:
result = '{:.1f}'.format( t(st)/base )
print '{0:>8} &'.format(result),
print r'\\'
| [
"[email protected]"
]
| |
286c6510e842c109cd1fabfbe090e84a978c9b28 | fab14fae2b494068aa793901d76464afb965df7e | /benchmarks/f3_wrong_hints/scaling_ltl_timed_transition_system/19-sender_receiver_7.py | 46038b17bbf43428b545dfc234384f2bb5c2c34d | [
"MIT"
]
| permissive | teodorov/F3 | 673f6f9ccc25acdfdecbfc180f439253474ba250 | c863215c318d7d5f258eb9be38c6962cf6863b52 | refs/heads/master | 2023-08-04T17:37:38.771863 | 2021-09-16T07:38:28 | 2021-09-16T07:38:28 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 19,406 | py | from typing import FrozenSet
from collections import Iterable
from math import log, ceil
from mathsat import msat_term, msat_env
from mathsat import msat_make_constant, msat_declare_function
from mathsat import msat_get_integer_type, msat_get_rational_type, msat_get_bool_type
from mathsat import msat_make_and, msat_make_not, msat_make_or, msat_make_iff
from mathsat import msat_make_leq, msat_make_equal, msat_make_true
from mathsat import msat_make_number, msat_make_plus, msat_make_times
from pysmt.environment import Environment as PysmtEnv
import pysmt.typing as types
from ltl.ltl import TermMap, LTLEncoder
from utils import name_next, symb_to_next
from hint import Hint, Location
delta_name = "delta"
def decl_consts(menv: msat_env, name: str, c_type) -> tuple:
assert not name.startswith("_"), name
s = msat_declare_function(menv, name, c_type)
s = msat_make_constant(menv, s)
x_s = msat_declare_function(menv, name_next(name), c_type)
x_s = msat_make_constant(menv, x_s)
return s, x_s
def make_enum(menv, v_name: str, enum_size: int):
bool_type = msat_get_bool_type(menv)
num_bits = ceil(log(enum_size, 2))
b_vars = []
for idx in range(num_bits):
c_name = "{}{}".format(v_name, idx)
b_vars.append(tuple(decl_consts(menv, c_name, bool_type)))
vals = []
x_vals = []
for enum_val in range(enum_size):
bit_val = format(enum_val, '0{}b'.format(num_bits))
assert len(bit_val) == num_bits
assert all(c in {'0', '1'} for c in bit_val)
assign = [b_vars[idx] if c == '1' else
(msat_make_not(menv, b_vars[idx][0]),
msat_make_not(menv, b_vars[idx][1]))
for idx, c in enumerate(reversed(bit_val))]
pred = assign[0][0]
x_pred = assign[0][1]
for it in assign[1:]:
pred = msat_make_and(menv, pred, it[0])
x_pred = msat_make_and(menv, x_pred, it[1])
vals.append(pred)
x_vals.append(x_pred)
assert len(vals) == enum_size
assert len(x_vals) == enum_size
return b_vars, vals, x_vals
def msat_make_minus(menv: msat_env, arg0: msat_term, arg1: msat_term):
m_one = msat_make_number(menv, "-1")
arg1 = msat_make_times(menv, arg1, m_one)
return msat_make_plus(menv, arg0, arg1)
def msat_make_lt(menv: msat_env, arg0: msat_term, arg1: msat_term):
geq = msat_make_geq(menv, arg0, arg1)
return msat_make_not(menv, geq)
def msat_make_geq(menv: msat_env, arg0: msat_term, arg1: msat_term):
return msat_make_leq(menv, arg1, arg0)
def msat_make_gt(menv: msat_env, arg0: msat_term, arg1: msat_term):
leq = msat_make_leq(menv, arg0, arg1)
return msat_make_not(menv, leq)
def msat_make_impl(menv: msat_env, arg0: msat_term, arg1: msat_term):
n_arg0 = msat_make_not(menv, arg0)
return msat_make_or(menv, n_arg0, arg1)
def diverging_symbs(menv: msat_env) -> frozenset:
real_type = msat_get_rational_type(menv)
delta = msat_declare_function(menv, delta_name, real_type)
delta = msat_make_constant(menv, delta)
return frozenset([delta])
def check_ltl(menv: msat_env, enc: LTLEncoder) -> (Iterable, msat_term,
msat_term, msat_term):
assert menv
assert isinstance(menv, msat_env)
assert enc
assert isinstance(enc, LTLEncoder)
int_type = msat_get_integer_type(menv)
real_type = msat_get_rational_type(menv)
r2s, x_r2s = decl_consts(menv, "r2s", int_type)
s2r, x_s2r = decl_consts(menv, "s2r", int_type)
delta, x_delta = decl_consts(menv, delta_name, real_type)
sender = Sender("s", menv, enc, r2s, x_r2s, s2r, x_s2r, delta)
receiver = Receiver("r", menv, enc, s2r, x_s2r, r2s, x_r2s, delta)
curr2next = {r2s: x_r2s, s2r: x_s2r, delta: x_delta}
for comp in [sender, receiver]:
for s, x_s in comp.symb2next.items():
curr2next[s] = x_s
zero = msat_make_number(menv, "0")
init = msat_make_and(menv, receiver.init, sender.init)
trans = msat_make_and(menv, receiver.trans, sender.trans)
# invar delta >= 0
init = msat_make_and(menv, init,
msat_make_geq(menv, delta, zero))
trans = msat_make_and(menv, trans,
msat_make_geq(menv, x_delta, zero))
# delta > 0 -> (r2s' = r2s & s2r' = s2r)
lhs = msat_make_gt(menv, delta, zero)
rhs = msat_make_and(menv,
msat_make_equal(menv, x_r2s, r2s),
msat_make_equal(menv, x_s2r, s2r))
trans = msat_make_and(menv, trans,
msat_make_impl(menv, lhs, rhs))
# (G F !s.stutter) -> G (s.wait_ack -> F s.send)
lhs = enc.make_G(enc.make_F(msat_make_not(menv, sender.stutter)))
rhs = enc.make_G(msat_make_impl(menv, sender.wait_ack,
enc.make_F(sender.send)))
ltl = msat_make_impl(menv, lhs, rhs)
return TermMap(curr2next), init, trans, ltl
class Module:
def __init__(self, name: str, menv: msat_env, enc: LTLEncoder,
*args, **kwargs):
self.name = name
self.menv = menv
self.enc = enc
self.symb2next = {}
true = msat_make_true(menv)
self.init = true
self.trans = true
def _symb(self, v_name, v_type):
v_name = "{}_{}".format(self.name, v_name)
return decl_consts(self.menv, v_name, v_type)
def _enum(self, v_name: str, enum_size: int):
c_name = "{}_{}".format(self.name, v_name)
return make_enum(self.menv, c_name, enum_size)
class Sender(Module):
def __init__(self, name: str, menv: msat_env, enc: LTLEncoder,
in_c, x_in_c, out_c, x_out_c, delta):
super().__init__(name, menv, enc)
bool_type = msat_get_bool_type(menv)
int_type = msat_get_integer_type(menv)
real_type = msat_get_rational_type(menv)
loc, x_loc = self._symb("l", bool_type)
evt, x_evt = self._symb("evt", bool_type)
msg_id, x_msg_id = self._symb("msg_id", int_type)
timeout, x_timeout = self._symb("timeout", real_type)
c, x_c = self._symb("c", real_type)
self.move = evt
self.stutter = msat_make_not(menv, evt)
self.x_move = x_evt
self.x_stutter = msat_make_not(menv, x_evt)
self.send = loc
self.wait_ack = msat_make_not(menv, loc)
self.x_send = x_loc
self.x_wait_ack = msat_make_not(menv, x_loc)
self.symb2next = {loc: x_loc, evt: x_evt, msg_id: x_msg_id,
timeout: x_timeout, c: x_c}
zero = msat_make_number(menv, "0")
one = msat_make_number(menv, "1")
base_timeout = one
# send & c = 0 & msg_id = 0
self.init = msat_make_and(menv,
msat_make_and(menv, self.send,
msat_make_equal(menv, c,
zero)),
msat_make_equal(menv, msg_id, zero))
# invar: wait_ack -> c <= timeout
self.init = msat_make_and(
menv, self.init,
msat_make_impl(menv, self.wait_ack,
msat_make_leq(menv, c, timeout)))
self.trans = msat_make_impl(menv, self.x_wait_ack,
msat_make_leq(menv, x_c, x_timeout))
# delta > 0 | stutter -> l' = l & msg_id' = msg_id & timeout' = timeout &
# c' = c + delta & out_c' = out_c
lhs = msat_make_or(menv, msat_make_gt(menv, delta, zero), self.stutter)
rhs = msat_make_and(
menv,
msat_make_and(menv,
msat_make_iff(menv, x_loc, loc),
msat_make_equal(menv, x_msg_id, msg_id)),
msat_make_and(menv,
msat_make_equal(menv, x_timeout, timeout),
msat_make_equal(menv, x_c,
msat_make_plus(menv, c, delta))))
rhs = msat_make_and(menv, rhs,
msat_make_equal(menv, x_out_c, out_c))
self.trans = msat_make_and(menv, self.trans,
msat_make_impl(menv, lhs, rhs))
disc_t = msat_make_and(menv, self.move,
msat_make_equal(menv, delta, zero))
# (send & send') ->
# (msg_id' = msg_id & timeout' = base_timeout & c' = 0 & out_c' = out_c)
lhs = msat_make_and(menv, disc_t,
msat_make_and(menv, self.send, self.x_send))
rhs = msat_make_and(
menv,
msat_make_and(menv,
msat_make_equal(menv, x_msg_id, msg_id),
msat_make_equal(menv, x_timeout, base_timeout)),
msat_make_and(menv,
msat_make_equal(menv, x_c, zero),
msat_make_equal(menv, x_out_c, out_c)))
self.trans = msat_make_and(menv, self.trans,
msat_make_impl(menv, lhs, rhs))
# (send & wait_ack') ->
# (msg_id' = msg_id + 1 & timeout' = base_timeout & c' = 0 & out_c' = out_c)
lhs = msat_make_and(menv, disc_t,
msat_make_and(menv, self.send, self.x_wait_ack))
rhs = msat_make_and(
menv,
msat_make_and(menv,
msat_make_equal(menv, x_msg_id,
msat_make_plus(menv, msg_id, one)),
msat_make_equal(menv, x_timeout, base_timeout)),
msat_make_and(menv,
msat_make_equal(menv, x_c, zero),
msat_make_equal(menv, x_out_c, out_c)))
self.trans = msat_make_and(menv, self.trans,
msat_make_impl(menv, lhs, rhs))
# (wait_ack) -> (c' = 0 & out_c' = out_c &
# (wait_ack' <-> (in_c != msg_id & c > timeout))
lhs = msat_make_and(menv, disc_t, self.wait_ack)
rhs_iff = msat_make_and(menv,
msat_make_not(menv,
msat_make_equal(menv, in_c,
msg_id)),
msat_make_geq(menv, c, timeout))
rhs_iff = msat_make_iff(menv, self.x_wait_ack, rhs_iff)
rhs = msat_make_and(menv,
msat_make_and(menv,
msat_make_equal(menv, x_c, zero),
msat_make_equal(menv, x_out_c,
out_c)),
rhs_iff)
self.trans = msat_make_and(menv, self.trans,
msat_make_impl(menv, lhs, rhs))
# (wait_ack & wait_ack') -> (timeout' > timeout)
lhs = msat_make_and(menv, disc_t,
msat_make_and(menv, self.wait_ack,
self.x_wait_ack))
rhs = msat_make_gt(menv, x_timeout, timeout)
self.trans = msat_make_and(menv, self.trans,
msat_make_impl(menv, lhs, rhs))
# (wait_ack) -> (send' <-> (in_c = msg_id & c < timeout))
lhs = msat_make_and(menv, disc_t, self.wait_ack)
rhs = msat_make_iff(menv, self.x_send,
msat_make_and(menv,
msat_make_equal(menv, in_c, msg_id),
msat_make_lt(menv, c, timeout)))
self.trans = msat_make_and(menv, self.trans,
msat_make_impl(menv, lhs, rhs))
# (wait_ack & send') -> (timeout' = base_timeout)
lhs = msat_make_and(menv, disc_t,
msat_make_and(menv, self.wait_ack, self.x_send))
rhs = msat_make_equal(menv, x_timeout, base_timeout)
self.trans = msat_make_and(menv, self.trans,
msat_make_impl(menv, lhs, rhs))
class Receiver(Module):
def __init__(self, name: str, menv: msat_env, enc: LTLEncoder,
in_c, x_in_c, out_c, x_out_c, delta):
super().__init__(name, menv, enc)
bool_type = msat_get_bool_type(menv)
loc, x_loc = self._symb("l", bool_type)
self.wait = loc
self.work = msat_make_not(menv, loc)
self.x_wait = x_loc
self.x_work = msat_make_not(menv, x_loc)
self.symb2next = {loc: x_loc}
zero = msat_make_number(menv, "0")
# wait
self.init = self.wait
# delta > 0 -> loc' = loc & out_c' = out_c
lhs = msat_make_gt(menv, delta, zero)
rhs = msat_make_and(menv,
msat_make_iff(menv, x_loc, loc),
msat_make_equal(menv, x_out_c, out_c))
self.trans = msat_make_impl(menv, lhs, rhs)
disc_t = msat_make_equal(menv, delta, zero)
# wait -> (wait' <-> in_c = out_c)
lhs = msat_make_and(menv, disc_t, self.wait)
rhs = msat_make_iff(menv, self.x_wait,
msat_make_equal(menv, in_c, out_c))
self.trans = msat_make_and(menv, self.trans,
msat_make_impl(menv, lhs, rhs))
# (wait & wait') -> (out_c' = out_c)
lhs = msat_make_and(menv, disc_t,
msat_make_and(menv, self.wait, self.x_wait))
rhs = msat_make_equal(menv, x_out_c, out_c)
self.trans = msat_make_and(menv, self.trans,
msat_make_impl(menv, lhs, rhs))
# (wait & work') -> out_c' = in_c
lhs = msat_make_and(menv, disc_t,
msat_make_and(menv, self.wait, self.x_work))
rhs = msat_make_equal(menv, x_out_c, in_c)
self.trans = msat_make_and(menv, self.trans,
msat_make_impl(menv, lhs, rhs))
# work -> out_c' = out_c
lhs = msat_make_and(menv, disc_t, self.work)
rhs = msat_make_equal(menv, x_out_c, out_c)
self.trans = msat_make_and(menv, self.trans,
msat_make_impl(menv, lhs, rhs))
def hints(env: PysmtEnv) -> FrozenSet[Hint]:
assert isinstance(env, PysmtEnv)
mgr = env.formula_manager
delta = mgr.Symbol(delta_name, types.REAL)
r2s = mgr.Symbol("r2s", types.INT)
s2r = mgr.Symbol("r2s", types.INT)
s_l = mgr.Symbol("s_l", types.BOOL)
s_evt = mgr.Symbol("s_evt", types.BOOL)
s_msg_id = mgr.Symbol("s_msg_id", types.INT)
s_timeout = mgr.Symbol("s_timeout", types.REAL)
s_c = mgr.Symbol("s_c", types.REAL)
r_l = mgr.Symbol("r_l", types.BOOL)
symbs = frozenset([delta, r2s, s2r, s_l, s_evt, s_msg_id, s_timeout, s_c,
r_l])
x_delta = symb_to_next(mgr, delta)
x_r2s = symb_to_next(mgr, r2s)
x_s2r = symb_to_next(mgr, s2r)
x_s_l = symb_to_next(mgr, s_l)
x_s_evt = symb_to_next(mgr, s_evt)
x_s_msg_id = symb_to_next(mgr, s_msg_id)
x_s_timeout = symb_to_next(mgr, s_timeout)
x_s_c = symb_to_next(mgr, s_c)
x_r_l = symb_to_next(mgr, r_l)
res = []
r0 = mgr.Real(0)
r1 = mgr.Real(1)
i0 = mgr.Int(0)
i1 = mgr.Int(1)
loc0 = Location(env, mgr.Equals(delta, r0))
loc0.set_progress(0, mgr.Equals(x_delta, r0))
hint = Hint("h_delta0", env, frozenset([delta]), symbs)
hint.set_locs([loc0])
res.append(hint)
loc0 = Location(env, mgr.Equals(s2r, i0))
loc0.set_progress(0, mgr.Equals(x_s2r, i0))
hint = Hint("h_s2r0", env, frozenset([s2r]), symbs)
hint.set_locs([loc0])
res.append(hint)
loc0 = Location(env, mgr.Equals(r2s, i0))
loc0.set_progress(0, mgr.Equals(x_r2s, i0))
hint = Hint("h_r2s0", env, frozenset([r2s]), symbs)
hint.set_locs([loc0])
res.append(hint)
loc0 = Location(env, s_l)
loc0.set_progress(0, x_s_l)
hint = Hint("h_s_l0", env, frozenset([s_l]), symbs)
hint.set_locs([loc0])
res.append(hint)
loc0 = Location(env, s_evt)
loc0.set_progress(0, x_s_evt)
hint = Hint("h_s_evt0", env, frozenset([s_evt]), symbs)
hint.set_locs([loc0])
res.append(hint)
loc0 = Location(env, mgr.Equals(s_msg_id, i0))
loc0.set_progress(0, mgr.Equals(x_s_msg_id, i0))
hint = Hint("h_s_msg_id0", env, frozenset([s_msg_id]), symbs)
hint.set_locs([loc0])
res.append(hint)
loc0 = Location(env, mgr.Equals(s_timeout, r0))
loc0.set_progress(0, mgr.Equals(x_s_timeout, r0))
hint = Hint("h_s_timeout0", env, frozenset([s_timeout]), symbs)
hint.set_locs([loc0])
res.append(hint)
loc0 = Location(env, mgr.Equals(s_c, r0))
loc0.set_progress(0, mgr.Equals(x_s_c, r0))
hint = Hint("h_s_c0", env, frozenset([s_c]), symbs)
hint.set_locs([loc0])
res.append(hint)
loc0 = Location(env, r_l)
loc0.set_progress(0, x_r_l)
hint = Hint("h_r_l0", env, frozenset([r_l]), symbs)
hint.set_locs([loc0])
res.append(hint)
loc0 = Location(env, mgr.GE(delta, r0))
loc0.set_progress(0, mgr.Equals(x_delta, r1))
hint = Hint("h_delta1", env, frozenset([delta]), symbs)
hint.set_locs([loc0])
res.append(hint)
loc0 = Location(env, mgr.GE(s2r, i0))
loc0.set_progress(0, mgr.Equals(x_s2r, i1))
hint = Hint("h_s2r1", env, frozenset([s2r]), symbs)
hint.set_locs([loc0])
res.append(hint)
loc0 = Location(env, mgr.GE(r2s, i0))
loc0.set_progress(0, mgr.Equals(x_r2s, i1))
hint = Hint("h_r2s1", env, frozenset([r2s]), symbs)
hint.set_locs([loc0])
res.append(hint)
loc0 = Location(env, s_evt)
loc0.set_progress(1, mgr.Not(x_s_evt))
loc1 = Location(env, mgr.Not(s_evt))
loc1.set_progress(0, x_s_evt)
hint = Hint("h_s_evt1", env, frozenset([s_evt]), symbs)
hint.set_locs([loc0, loc1])
res.append(hint)
loc0 = Location(env, mgr.GE(s_msg_id, i0))
loc0.set_progress(0, mgr.Equals(x_s_msg_id, mgr.Plus(s_msg_id, i1)))
hint = Hint("h_s_msg_id1", env, frozenset([s_msg_id]), symbs)
hint.set_locs([loc0])
res.append(hint)
loc0 = Location(env, mgr.GE(s_timeout, r0))
loc0.set_progress(0, mgr.Equals(x_s_timeout, mgr.Plus(s_timeout, r1)))
hint = Hint("h_s_timeout1", env, frozenset([s_timeout]), symbs)
hint.set_locs([loc0])
res.append(hint)
loc0 = Location(env, mgr.GE(s_c, r0))
loc0.set_progress(0, mgr.Equals(x_s_c, mgr.Plus(s_c, r1)))
hint = Hint("h_s_c1", env, frozenset([s_c]), symbs)
hint.set_locs([loc0])
res.append(hint)
loc0 = Location(env, r_l)
loc0.set_progress(1, mgr.Not(x_r_l))
loc1 = Location(env, mgr.Not(r_l))
loc1.set_progress(0, x_r_l)
hint = Hint("h_r_l1", env, frozenset([r_l]), symbs)
hint.set_locs([loc0, loc1])
res.append(hint)
loc0 = Location(env, mgr.GE(delta, r0))
loc0.set_progress(0, mgr.Equals(x_delta, mgr.Plus(delta, r1)))
hint = Hint("h_delta2", env, frozenset([delta]), symbs)
hint.set_locs([loc0])
res.append(hint)
loc0 = Location(env, mgr.GE(s2r, i0))
loc0.set_progress(0, mgr.Equals(x_s2r, mgr.Plus(s2r, i1)))
hint = Hint("h_s2r2", env, frozenset([s2r]), symbs)
hint.set_locs([loc0])
res.append(hint)
return frozenset(res)
| [
"[email protected]"
]
| |
32a624033c4fcd4b0dab2f56ea427738fac85532 | 0fd5793e78e39adbfe9dcd733ef5e42390b8cc9a | /python3/16_Web_Services/k_Projects/b_twitter/twitter_scrapping.py | e22fe2652cae147f89fc3a8955b3336f6f812e4b | []
| no_license | udhayprakash/PythonMaterial | 3ea282ceb4492d94d401e3bc8bad9bf6e9cfa156 | e72f44e147141ebc9bf9ec126b70a5fcdbfbd076 | refs/heads/develop | 2023-07-08T21:07:33.154577 | 2023-07-03T10:53:25 | 2023-07-03T10:53:25 | 73,196,374 | 8 | 5 | null | 2023-05-26T09:59:17 | 2016-11-08T14:55:51 | Jupyter Notebook | UTF-8 | Python | false | false | 2,215 | py | #!/usr/bin/python
"""
Purpose: Twitter data scrapping
"""
import tweepy
class TwitterLogin:
def __init__(self):
consumer_key = "xxxxxxxxxxxxxxxxxxxxx"
consumer_secret = "xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx"
access_token = "00000-xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx"
access_token_secret = "xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx"
auth = tweepy.OAuthHandler(consumer_key, consumer_secret)
auth.set_access_token(access_token, access_token_secret)
self.api = tweepy.API(
auth, wait_on_rate_limit=True, wait_on_rate_limit_notify=True
)
def credentials_verification(self):
result = vars(self.api.verify_credentials())["_json"]
print(
f"""Account
User : {result['name']}
Screen Name : {result['screen_name']}
Location : {result['location']}
Profile description : {result['description']}
Account Created at : {result['created_at']}
Display URL : {result['entities']['url']['urls'][0]['display_url']}
"""
)
class TwitterScrapping(TwitterLogin):
def __init__(self):
TwitterLogin.__init__(self)
# twtr = TwitterLogin()
# twtr.credentials_verification()
twrt_scrp = TwitterScrapping()
twrt_scrp.credentials_verification()
# Latest Public Timeline
tweet = twrt_scrp.api.home_timeline()[0]
print(
f"""
tweet.text : {tweet.text}
tweet.contributors : {tweet.contributors}
tweet.created_at : {tweet.created_at}
tweet.lang : {tweet.lang}
tweet.source : {tweet.source}
tweet.source_url : {tweet.source_url}
tweet.truncated : {tweet.truncated}
tweet.retweet_count : {tweet.retweet_count}
tweet.retweeted : {tweet.retweeted}
tweet.retweet : {tweet.retweet}
tweet.retweets : {tweet.retweets}
tweet.possibly_sensitive : {tweet.possibly_sensitive}
tweet.possibly_sensitive_appealable : {tweet.possibly_sensitive_appealable}
"""
)
| [
"[email protected]"
]
| |
6ebf11b3f019ebe0338ba4e09bbe5dcd2b7fbd4f | ec4e153f3bf1b335bc1b31b85e6f9db4a6c4faa9 | /wd_extractor/Document.py | 6ed52824a553446bd88f07562c5ca97fb6fb3529 | [
"Apache-2.0",
"CC-BY-3.0"
]
| permissive | DuaneNielsen/wd_extractor | 7936ac29ae97972cfe74973108aaad1efa5054b6 | 128a189bacd0cd2d7f1fa598202b9c4e55f48e2f | refs/heads/master | 2021-01-19T14:13:42.441554 | 2017-09-19T02:16:08 | 2017-09-19T02:16:08 | 100,887,646 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 903 | py | from .Graminator import Graminator
class Document:
def __init__(self, corpus, path, grams):
self.corpus = corpus
self.grams = grams
self.graminator = None
self.path = path
self.tokens = corpus.tokenizer.tokens(self)
def getText(self):
if self.path is not None:
handle = open(self.path, "r")
text = handle.read()
return text
def length(self):
return len(self.tokens)
def nGrams(self, gramsize):
return Graminator(self, gramsize)
def hasNext(self, index):
index += 1
return (index > 0) and index < len(self.tokens)
def nextToken(self, index):
return self.tokens[index + 1]
def hasPrev(self, index):
index -= 1
return (index > 0) and index < len(self.tokens)
def prevToken(self, index):
return self.tokens[index-1]
| [
"[email protected]"
]
| |
12eca4b3e8ae4bf6f27c07a03bbc58a313b36f5f | d668209e9951d249020765c011a836f193004c01 | /tools/pnnx/tests/test_torch_fft_irfft.py | 8f92dd551a1f5c2f0b5ff9c8894b75b1b122d362 | [
"BSD-3-Clause",
"Zlib",
"BSD-2-Clause"
]
| permissive | Tencent/ncnn | d8371746c00439304c279041647362a723330a79 | 14b000d2b739bd0f169a9ccfeb042da06fa0a84a | refs/heads/master | 2023-08-31T14:04:36.635201 | 2023-08-31T04:19:23 | 2023-08-31T04:19:23 | 95,879,426 | 18,818 | 4,491 | NOASSERTION | 2023-09-14T15:44:56 | 2017-06-30T10:55:37 | C++ | UTF-8 | Python | false | false | 1,804 | py | # Tencent is pleased to support the open source community by making ncnn available.
#
# Copyright (C) 2022 THL A29 Limited, a Tencent company. All rights reserved.
#
# Licensed under the BSD 3-Clause License (the "License"); you may not use this file except
# in compliance with the License. You may obtain a copy of the License at
#
# https://opensource.org/licenses/BSD-3-Clause
#
# Unless required by applicable law or agreed to in writing, software distributed
# under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
# CONDITIONS OF ANY KIND, either express or implied. See the License for the
# specific language governing permissions and limitations under the License.
import torch
import torch.nn as nn
import torch.nn.functional as F
class Model(nn.Module):
def __init__(self):
super(Model, self).__init__()
def forward(self, x, y, z):
x = torch.fft.irfft(x, norm="backward")
y = torch.fft.irfft(y, dim=(1), norm="forward")
z = torch.fft.irfft(z, norm="ortho")
return x, y, z
def test():
net = Model()
net.eval()
torch.manual_seed(0)
x = torch.rand(1, 3, 120, 120)
y = torch.rand(1, 100, 2, 120)
z = torch.rand(1, 20, 20)
a = net(x, y, z)
# export torchscript
mod = torch.jit.trace(net, (x, y, z))
mod.save("test_torch_fft_irfft.pt")
# torchscript to pnnx
import os
os.system("../src/pnnx test_torch_fft_irfft.pt inputshape=[1,3,120,120],[1,100,2,120],[1,20,20]")
# pnnx inference
import test_torch_fft_irfft_pnnx
b = test_torch_fft_irfft_pnnx.test_inference()
for a0, b0 in zip(a, b):
if not torch.equal(a0, b0):
return False
return True
if __name__ == "__main__":
if test():
exit(0)
else:
exit(1)
| [
"[email protected]"
]
| |
fff29da02d95309713cc9a0f7a86f69832ba5220 | 83a506a501561602ad3b259341225ddfbddab160 | /GameServer/matchGames/Match_PK_DouDiZhu/redis_instance.py | 3fe50de16f52f543bb74fc19e6b8dcc7b80828c3 | []
| no_license | daxingyou/SouYouJi_Game | 9dc5f02eb28b910efb229653a8d0bffe425a7911 | 7311a994c9aba15b7234331709975ebc37e8453d | refs/heads/master | 2023-03-28T01:36:48.955107 | 2020-04-05T01:24:17 | 2020-04-05T01:24:17 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 519 | py | # -*- coding:utf-8 -*-
# !/bin/python
"""
Author: Pipo
Date: $Date$
Revision: $Revision$
Description: Redis
"""
import redis
from configs import CONFIGS
redisdb = None
def getInst(dbNum=CONFIGS['redis']['db']):
global redisdb
redisdb = redis.ConnectionPool(
host=CONFIGS['redis']['host'],
port=CONFIGS['redis']['port'],
db=dbNum,
password=CONFIGS['redis']['password']
)
redisData = redis.Redis(connection_pool=redisdb)
return redisData
| [
"[email protected]"
]
| |
95e9f1d292ccffad970294b2b502147429f71198 | 23b5337bf410415b7b150e3ad60cafc1578a0441 | /07-User-Authentication/01-Flask-Login/myproject/__init__.py | 54b954d72924a39c7987de9eda326bbc04bd3512 | []
| no_license | VerdantFox/flask_course | b8de13ad312c14229f0c3bc2af70e8609a3b00fb | 47b167b54bc580734fa69fc1a2d7e724adfb9610 | refs/heads/master | 2021-09-10T05:01:47.385859 | 2020-02-24T21:07:05 | 2020-02-24T21:07:05 | 241,973,705 | 0 | 0 | null | 2021-09-08T01:40:59 | 2020-02-20T19:40:42 | Python | UTF-8 | Python | false | false | 761 | py | import os
from flask import Flask
from flask_sqlalchemy import SQLAlchemy
from flask_migrate import Migrate
from flask_login import LoginManager
# Create a login manager object
login_manager = LoginManager()
app = Flask(__name__)
# Often people will also separate these into a separate config.py file
app.config["SECRET_KEY"] = "mysecretkey"
basedir = os.path.abspath(os.path.dirname(__file__))
app.config["SQLALCHEMY_DATABASE_URI"] = "sqlite:///" + os.path.join(
basedir, "data.sqlite"
)
app.config["SQLALCHEMY_TRACK_MODIFICATIONS"] = False
db = SQLAlchemy(app)
Migrate(app, db)
# We can now pass in our app to the login manager
login_manager.init_app(app)
# Tell users what view to go to when they need to login.
login_manager.login_view = "login"
| [
"[email protected]"
]
| |
51f55bc16f6ed44f56ff1aebecc74e8ef660f3e9 | 222b17dacb95640499ebd484697ead32e83b9ac1 | /find_defining_class.py | 3bf7302beb99684035cd35f6b235fee80a90520b | []
| no_license | cicekozkan/python-examples | 08330ef0fb1678cace17716ac2f490a3c5b95dd2 | 01b0e654c884946f8353995333a6946062c9c158 | refs/heads/master | 2021-01-14T14:06:37.585963 | 2014-12-26T07:55:13 | 2014-12-26T07:55:13 | 25,510,316 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 344 | py | # -*- coding: utf-8 -*-
"""
Created on Mon Nov 17 11:46:43 2014
@author: ocicek
"""
def find_defining_class(obj, meth_name):
"""takes an object and a method name (as a string) and returns
the class that provides the definition of the method"""
for ty in type(obj).mro():
if meth_name in ty.__dict__:
return ty | [
"[email protected]"
]
| |
663f935d7eb0b3d622d212ba615d6a7387719c88 | c4cb90afb658a822c4ab867eec979227c0a25a6d | /testdemo/settings.py | 752c0a3676d4faf49f9a97caa9ee3abc5b89683d | []
| no_license | Contraz/demosys-py-test | 81afb3dd801c0deb6046ddb0e7836de61182a36f | 2aa760cb94ea34e3fb610ca8c43f1549ba9b53de | refs/heads/master | 2021-01-19T16:58:33.608630 | 2018-07-13T07:59:34 | 2018-07-13T07:59:34 | 88,294,443 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,823 | py | import os
PROJECT_DIR = os.path.dirname(os.path.abspath(__file__))
DEBUG = False
SCREENSHOT_PATH = os.path.join(PROJECT_DIR, 'screenshots')
# Profile: any, core, compat
OPENGL = {
"version": (3, 3),
}
WINDOW = {
"size": (1280, 720),
"vsync": True,
"resizable": True,
"fullscreen": False,
"title": "demosys-py",
"cursor": True,
}
# MUSIC = os.path.join(PROJECT_DIR, 'resources/music/tg2035.mp3')
TIMER = 'demosys.timers.Timer'
# TIMER = 'demosys.timers.RocketTimer'
# TIMER = 'demosys.timers.RocketMusicTimer'
# TIMER = 'demosys.timers.MusicTimer'
ROCKET = {
'mode': 'project',
# 'mode': 'editor',
'rps': 60,
'project': os.path.join(PROJECT_DIR, 'resources', 'cube.xml'),
'files': os.path.join(PROJECT_DIR, 'resources', 'tracks'),
}
# What effects to load
EFFECTS = (
# 'testdemo.plain',
# 'testdemo.bouncingcubes',
# 'testdemo.bouncingcubes_instanced',
# 'testdemo.cube',
# 'testdemo.deferred',
# 'demosys.deferred',
'testdemo.feedback',
# 'testdemo.multilayer',
# 'testdemo.rockettest',
)
SHADER_DIRS = (
os.path.join(PROJECT_DIR, 'resources/shaders'),
)
SHADER_FINDERS = (
'demosys.core.shaderfiles.finders.FileSystemFinder',
'demosys.core.shaderfiles.finders.EffectDirectoriesFinder',
)
# Hardcoded paths to shader dirs
TEXTURE_DIRS = (
os.path.join(PROJECT_DIR, 'resource/textures'),
)
# Finder classes
TEXTURE_FINDERS = (
'demosys.core.texturefiles.finders.FileSystemFinder',
'demosys.core.texturefiles.finders.EffectDirectoriesFinder'
)
# Tell demosys how to find shaders split into multiple files
SHADERS = {
'vertex_shader_suffix': ('vert', '_vs.glsl', '.glslv'),
'fragment_shader_suffix': ('frag', '_fs.glsl', '.glslf'),
'geometry_shader_suffix': ('geom', '_gs.glsl', '.glslg'),
}
| [
"[email protected]"
]
| |
f1ef29d00b9e612458bdb8429ac6cc2833dcfeb1 | cd58faaffc84a4b1194fa55206ecce3458289edb | /setup.py | 00f05e0c3c24ac0059253c0b709c8ccd9fd0b61a | [
"MIT"
]
| permissive | danieleteti/revelation | 89327833d896c7350d41a7983d4781d980134a79 | de4f8221e6c78aca174600dd333b0f9a5f62baa2 | refs/heads/master | 2020-03-21T08:10:47.420032 | 2018-07-17T18:05:17 | 2018-07-17T18:05:17 | 138,326,204 | 0 | 0 | MIT | 2018-06-22T16:43:33 | 2018-06-22T16:43:33 | null | UTF-8 | Python | false | false | 2,222 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""revelation setup file"""
import os
import re
from setuptools import find_packages, setup
PACKAGE = "revelation"
REQUIREMENTS = [
"Jinja2==2.10",
"Werkzeug==0.14.1",
"click==6.7",
"gevent-websocket==0.10.1",
"gevent==1.3.4",
"watchdog==0.8.3",
]
TEST_REQUIREMENTS = [
"coverage==4.5.1",
"coveralls==1.3.0",
"flake8==3.5.0",
"mock",
"nose==1.3.7",
]
with open("README.md", "r") as f:
README = f.read()
with open(os.path.join(PACKAGE, "__init__.py")) as init_file:
INIT = init_file.read()
VERSION = re.search(
"^__version__ = ['\"]([^'\"]+)['\"]", INIT, re.MULTILINE
).group(1)
AUTHOR = re.search(
"^__author__ = ['\"]([^'\"]+)['\"]", INIT, re.MULTILINE
).group(1)
EMAIL = re.search(
"^__email__ = ['\"]([^'\"]+)['\"]", INIT, re.MULTILINE
).group(1)
setup(
name=PACKAGE,
version=VERSION,
description="Make awesome reveal.js presentations with revelation",
long_description=README,
long_description_content_type="text/markdown",
author=AUTHOR,
author_email=EMAIL,
url="https://github.com/humrochagf/revelation",
license="MIT",
packages=find_packages(),
package_data={PACKAGE: ["templates/presentation.html"]},
zip_safe=False,
install_requires=REQUIREMENTS,
entry_points=dict(console_scripts=["revelation=revelation.cli:cli"]),
platforms="any",
keywords="presentation slides reveal.js markdown",
classifiers=[
"Environment :: Console",
"Environment :: Web Environment",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Natural Language :: English",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
"Topic :: Multimedia :: Graphics :: Presentation",
"Topic :: Text Processing :: Markup :: HTML",
],
test_suite="tests",
tests_require=TEST_REQUIREMENTS,
extras_require={"test": TEST_REQUIREMENTS},
)
| [
"[email protected]"
]
| |
5fb0d6de6e07ff397e5a483f3a634518532a6424 | 427cb811a465677542172b59f5e5f102e3cafb1a | /python/print/printContent.py | 6a213db972abd85fe761285d5c7b5bbb5ae57cdd | []
| no_license | IzaakWN/CodeSnippets | 1ecc8cc97f18f77a2fbe980f322242c04dacfb89 | 07ad94d9126ea72c1a8ee5b7b2af176c064c8854 | refs/heads/master | 2023-07-26T21:57:10.660979 | 2023-07-20T20:35:59 | 2023-07-20T20:35:59 | 116,404,943 | 18 | 4 | null | null | null | null | UTF-8 | Python | false | false | 4,361 | py | # https://docs.python.org/2/library/optparse.html
# http://www.macworld.com/article/1132219/software-utilities/termfoldercomp.html
# https://automatetheboringstuff.com/chapter7/
# TODO: function to replace patterns https://docs.python.org/2/library/re.html
# TODO: add month and year to fileName
# TODO .bundle
import os, sys
from argparse import ArgumentParser
import re
import time
argv = sys.argv
parser = ArgumentParser(description="Make textfile with hierarchy of subdir for a given dir")
parser.add_argument( "file",
type=str, action='store',
metavar="DIRECTORY", help="Input directory" )
parser.add_argument( "-o", "--output", dest="fileName",
default=None, action='store',
metavar="FILE_NAME", help="file name to print subdirs hierarchy" )
parser.add_argument( "-t", "--extensions", dest="extensions",
nargs='+', default=None, action='store',
metavar="EXT", help="only specified extensions" )
parser.add_argument( "-d","--depth", dest="maxDepth",
type=int, default=None, action='store',
metavar="MAX_DEPTH", help="set maximum subdir depth" )
parser.add_argument( "-e", "--excludeFiles", dest="excludeFiles",
default=False, action='store_true',
help="exclude files" )
parser.add_argument( "-a", "--all", dest="showAll",
default=False, action='store_true',
help="show hidden files and directories" )
args = parser.parse_args()
fileName = args.fileName
extensions = args.extensions
maxDepth = args.maxDepth
includeFiles = not args.excludeFiles
showAll = args.showAll
print args.file
tab = " "
def replacePattern2(string,pattern,replaceString):
parts = pattern.split("*")
a = 0
for part in parts:
if part in string[a:]:
a = sting[a:].index(part)
else:
return string
def replacePattern2(string,patterns,replaceString=""):
# pattern = re.compile (r'\[720.*?BluRay.*?YIFY\]')
# pattern.findall("lol (2010) [720p foo BluRay YIFY bar]")
for pattern in patterns:
pattern = pattern.replace("[","\[").replace("]","\]").replace("*",".*?")
comp = re.compile(pattern)
matches = findall(string)
for match in matches:
string = string.replace(match,replaceString,1)
def listSubDirs(dir,extensions=[],indent="",depth=0):
list = os.listdir(dir)
hierarchy = [ ]
for i in list:
if i[0] != "." or showAll:
subdir = dir+"/"+i
if os.path.isdir(subdir) and not i[-4:] == ".app":
hierarchy += [ indent+i ]
if (maxDepth == None or depth < maxDepth):
hierarchy += listSubDirs( subdir,
extensions=extensions,
indent=tab+indent,
depth=depth+1 )
elif includeFiles or i[-4:] == ".app":
if extensions:
for ext in extensions:
if ext == i[-len(ext):]:
hierarchy += [ indent+i ]
break
else:
hierarchy += [ indent+i ]
return hierarchy
def main(dir):
global fileName
path = "/"
if "/" in dir:
if dir[-1] == "/":
dir = dir[:-1]
path = dir[:dir.rfind("/")+1]
hierarchy = listSubDirs(dir,extensions=extensions)
for i in hierarchy:
print i
if not fileName:
t = time.struct_time(time.localtime())
fileName = "%s hierarchy %i-%i-%i.txt" % (dir.replace(path,""), t.tm_mday, t.tm_mon, t.tm_year)
file = open(fileName,'write')
file.write(dir+"\n\n")
for i in hierarchy:
file.write(i+"\n")
print ">>> %s written" % fileName
file.close()
if __name__ == '__main__':
if len(sys.argv) > 1:
dir = str(sys.argv[1])
if os.path.isdir(dir):
main(dir)
else:
if not os.path.isdir(dir):
print ">>> ERROR: argument is not a directory: %s" % dir
else:
print ">>> ERROR: Needs an arguments"
print ">>> done"
| [
"[email protected]"
]
| |
a5c7326e28f20fc08a463bfb04e69b82c6be461e | 466c185dd064d0a1fb8f20e72b21e227e2cb3efc | /individual_tutorials/pygamestartercode-PghTrickster-master/00-IntroToPython/01_expressions.py | ce823f663fc15dfaa6c674cfdb136a7c8bc9ae00 | [
"MIT"
]
| permissive | rhit-catapult/2021-session1 | 3e937235fe48cb03a1dc69b5573879a17b2e81eb | 60c70abeb90ab7edc8d6ddb2c6beb12243a244fc | refs/heads/main | 2023-06-15T20:34:34.449474 | 2021-07-16T20:15:15 | 2021-07-16T20:15:15 | 386,752,351 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 14,282 | py | """
Permits exploration of EXPRESSIONS, e.g. 3 + (5 * 2) and "hello" + "goodbye",
and NAMES and ASSIGNMENT, e.g. n = n + 1
Authors: David Mutchler, Sana Ebrahimi, Mohammed Noureddine, Vibha Alangar,
Matt Boutell, Dave Fisher, their colleagues, and
"""
import random
import math
###############################################################################
# Done 1: Smile
###############################################################################
# You Fool, I AM Smiling
###############################################################################
# Done 2:
# Write a statement that prints your name
###############################################################################
print("My Name is:")
print("Alexander Atticus Trick")
###############################################################################
# Part 1: Numbers, Arithmetic, and Precedence.
###############################################################################
###############################################################################
# Done: 3.
# Uncomment the following and then run the program, paying close attention
# to what gets printed.
# _
# Then type an example of your own for each of:
# -- subtraction
# -- division
# and run the program, checking that what gets printed is what you expect.
###############################################################################
print()
print("_TODO 3:")
print("4 + 8 evaluates to: ", 4 + 8)
print("7 * 10 evaluates to: ", 7 * 10)
print("1.53 + 8 evaluates to:", 1.53 + 8)
###############################################################################
# Done: 4.
# Uncomment the following and then run the program,
# paying close attention to what gets printed.
###############################################################################
print()
print("_TODO 4:")
print("(4 + 2) * 3 evaluates to:", (4 + 2) * 3)
print("4 + (2 * 3) evaluates to:", 4 + (2 * 3))
print("4 + 2 * 3 evaluates to:", 4 + 2 * 3)
print("(4 - 2) + 3 evaluates to:", (4 - 2) + 3)
print("4 - (2 + 3) evaluates to:", 4 - (2 + 3))
print("4 - 2 + 3 evaluates to:", 4 - 2 + 3)
###############################################################################
# Done: 5.
# Uncomment the following and then run the program,
# paying close attention to what gets printed.
###############################################################################
print()
print("_TODO 5:")
print("2 ** 10 evaluates to:", 2 ** 10)
print("10 ** 2 evaluates to:", 10 ** 2)
print("2 ** 0.5 evaluates to:", 2 ** 0.5)
print("10 ** -2 evaluates to:", 10 ** -2)
print("10 ** -0.5 evaluates to:", 10 ** -0.5, "(do you see why?)")
###############################################################################
# Done: 6.
# Type some expressions of your own choosing that use combinations of:
# -- addition, subtraction
# -- multiplication, division
# -- exponentiation
# using parentheses to make clear the order of operations.
# Then run the program, checking that what gets printed is what you expect.
# _
###############################################################################
print()
print("_TODO 6:")
print((1 ** 1000) * 21 - 42 + (84 / 4))
###############################################################################
# Part 2: Exceptions: Syntax and Run-Time Errors.
###############################################################################
###############################################################################
# Done: 7.
# Uncomment the following and then run the program,
# paying close attention to what gets printed.
# _
# Then comment-out the line that causes the syntax error.
# _
# Now type some other statement that causes a syntax error,
# for example a statement that is missing a required parenthesis.
# Run again to see the error-message from your syntax error,
# and finally comment-out your statement to continue to the next _TODO.
###############################################################################
print()
print("_TODO 7:")
# This is crazy! Python will make no sense of it!
###############################################################################
# Done: 8.
# Uncomment the following and then run the program,
# paying close attention to what gets printed, especially the last red line.
# Note that the error-output (in red) may (or may not) appear BEFORE the
# ordinary output from previously executed PRINT statements.
# _
# Then comment-out the line that causes the run-time error.
###############################################################################
print()
print("_TODO 8:")
print("3 + 2 evaluates to:", 3 + 2)
# print("3 / 0 evaluates to:", 3 / 0)
###############################################################################
# Done: 9.
# Uncomment the following and then run the program,
# paying close attention to what gets printed, especially the last red line.
# Again note that the error-output (in red) may PRECEDE ordinary output.
# _
# Then comment-out the first line that causes the run-time error
# and run the program again to see the result of running the line below it.
###############################################################################
print()
print("_TODO 9:")
# print("3 / 'hello' evaluates to:", 3 / 'hello')
###############################################################################
# Done: 10.
# Type some expressions of your own choosing that cause error messages.
# Then run the program, paying close attention to the last line
# of each error message (in red).
# _
###############################################################################
# print()
# print("_TODO 10:")
# print(four)
# print(four / 3)
# print(four / 3 / 0)
###############################################################################
# Part 3: Objects, Types, and Values.
###############################################################################
###############################################################################
# Done: 11.
# READ the following statements and PREDICT what they will produce as output.
# Then, uncomment them and run the program, checking your predictions
# and learning from any predictions that you got wrong
###############################################################################
print()
print("_TODO 11:")
print("The type of 482 is:", type(482))
print("The type of 48.203 is:", type(48.203))
print('The type of "blah blah blah" is:', type("blah blah blah"))
print("The type of 'blah blah blah' is:", type('blah blah blah'))
print("The type of [4, 2, 9] is:", type([4, 2, 9]))
print("The type of (4, 2, 9) is:", type((4, 2, 9)))
print("The type of min is:", type(min))
print("The type of 'min' is:", type('min'))
print("The type of min(4, 6, 2, 12, 10) is:", type(min(4, 6, 2, 12, 10)))
print("The type of min(4, 6, 2.0, 12, 10) is:", type(min(4, 6, 2.0, 12, 10)))
###############################################################################
# Done: 12.
# Type an expression that involves addition, subtraction and multiplication
# (but NOT division, yet), using whole numbers (which are of type int).
# Then run the program, checking that what gets printed is what you expect.
# _
# Next, repeat the above, but making just a single one of the numbers in
# your expression a float, by appending a decimal point to it, like this:
# instead of 2 (which is an int), write 2.0 (which is a float).
# _
# Finally, try division by uncommenting the following and then run the program,
# paying close attention to what gets printed. What do you notice about the
# type that results from division, even if both arguments are int objects?
###############################################################################
print()
print("_TODO 12:")
print("4.2 / 2.0 evaluates to:", 4.2 / 2.0)
print("4.2 / 2 evaluates to:", 4.2 / 2)
print("4 / 2 evaluates to:", 4 / 2)
print("3 / 2 evaluates to:", 3 / 2)
###############################################################################
# Done: 13.
# Uncomment the following and then run the program,
# paying close attention to what gets printed.
# _
# Then try more expressions involving the // and % operators
# until you understand what those operators do.
###############################################################################
print()
print("_TODO 13:")
print("17 // 5 evaluates to:", 17 // 5)
print("17 % 5 evaluates to:", 17 % 5)
###############################################################################
# Done: 14.
# Uncomment the following and then run the program,
# paying close attention to what gets printed.
# _
# Then try more expressions involving string arithmetic as needed, until you
# understand what the + and * operators do when applied to strings.
###############################################################################
print()
print("_TODO 14:")
#
print("hello" + "goodbye girl")
print("big" * 20)
print(("hello " + "goodbye ") * 4)
###############################################################################
# Done: 15.
# Type a statement that prints:
# I'm not a bug, that's right!
# and then run the program, checking that it printed the above sentence
# (including the punctuation exactly as written above).
# _
# Then repeat the above for the sentence:
# What does "yarborough" mean?
# _
# Then repeat the above for the sentence:
# I'm on "pins and needles" about '"'".
# Hint: consider using the + operator as part of your solution.
# _
###############################################################################
print()
print("_TODO 15:")
print("I'm not a bug, that's right! ")
print('What does "yarborough " mean?')
print("I'm on " + '"pins and needles" about' + "'" + '"' + "'" + '"' )
###############################################################################
# Part 4: Names, Variables, and Assignment.
###############################################################################
###############################################################################
# Done: 16.
# Uncomment the following and then run the program,
# paying close attention to what gets printed.
# _
# Then comment-out the line that causes the run-time error,
# PREDICT what the subsequent lines will print,
# and run again to check your predictions.
# _
# Finally, practice assignment as suggested by the examples below, that is:
# choose your own names, given them values by using the assignment (=)
# operator, and define new names by using expressions that include names
# that you defined previously.
###############################################################################
print()
print("_TODO 16:")
first_program = "Hello, world!"
print(first_program)
#print(greeting)
#
greeting = "Hello, earthlings"
print(greeting)
print(first_program + (greeting * 2))
#
n = 3
print(first_program * n)
n = 2 * first_program
print(n + greeting)
###############################################################################
# Donw: 17.
# Uncomment the following and then run the program,
# paying close attention to what gets printed.
# _
# Throughout this program, remember that error-output may (or may not)
# PRECEDE ordinary output from previous PRINT statements. Be sure to scroll
# up to see if any error message (in red) appears higher up in the Console.
# _
# Then repeatedly:
# -- comment-out the line that causes a run-time error
# -- run again to see the output from the statements that follow it.
# until you see the output from the last statement below,
# noting its perhaps-surprising output.
# _
# Finally, try out your own assignment statements that yield run-time errors.
###############################################################################
# print()
# print("_TODO 17:")
# r = 0
# s = -9
# t = s / r
# y = "oops" + s
# u = math.sqrt(-2)
# v = (-2) ** 0.5
# print(v)
###############################################################################
# Done: 18.
# Uncomment the following and then run the program,
# paying close attention to what gets printed.
# _
# Then comment-out the line that causes the run-time error,
# PREDICT what the subsequent lines will print,
# and run again to check your predictions.
###############################################################################
print()
print("_TODO 18:")
a = 45
# 45 = a
b = 10
c = b + 20
b = c
print(a, b, c)
###############################################################################
# Done: 19.
# Uncomment the following and PREDICT what will get printed.
# Then run the program, checking to see whether your prediction is correct.
###############################################################################
print()
print("_TODO 19:")
x = 5
x = x + 1
print(x)
#
x = x + 1
print(x)
#
x = x + 1
print(x)
###############################################################################
# Done: 20.
# Uncomment the following and PREDICT what will get printed.
# (Hint: what gets printed is NOT 75 10.)
# Then run the program, checking to see whether your prediction is correct.
###############################################################################
print()
print("_TODO 20:")
x = 10
y = 75
x = y
y = x
print(x, y)
###############################################################################
# Done.
# The statements below make x and y refer to random integers between 1 and 99,
# then prints the values of x and y.
# _
# Challenge: can you write statements below the following that causes the
# values of x and y to SWAP? For example, if the values of x and y are set
# randomly to 40 and 33, so that the given print statement prints: 40 33
# then your code should print: 33 40
# _
# Spend up to 1 minute on this challenge, typing your code and running the
# program to try out your solution.
# _
###############################################################################
print()
print("_TODO 22:")
x = random.randint(1, 99)
y = random.randint(1, 99)
print(x, y)
# Challenge Area
z = x
x = y
y = z
print(x, y)
| [
"[email protected]"
]
| |
376d6b0ccb6509c96d3c340f24977524379fc444 | 45de3aa97525713e3a452c18dcabe61ac9cf0877 | /src/secondaires/diligence/fonctions/diligences.py | 0c9dd2f3eb7a0ab9f84de366ec3c7a1105448876 | [
"BSD-3-Clause"
]
| permissive | stormi/tsunami | 95a6da188eadea3620c70f7028f32806ee2ec0d1 | bdc853229834b52b2ee8ed54a3161a1a3133d926 | refs/heads/master | 2020-12-26T04:27:13.578652 | 2015-11-17T21:32:38 | 2015-11-17T21:32:38 | 25,606,146 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,016 | py | # -*-coding:Utf-8 -*
# Copyright (c) 2014 LE GOFF Vincent
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# * Neither the name of the copyright holder nor the names of its contributors
# may be used to endorse or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT
# OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
"""Fichier contenant la fonction diligences."""
from primaires.scripting.fonction import Fonction
from primaires.scripting.instruction import ErreurExecution
class ClasseFonction(Fonction):
"""Retourne les diligences (salle d'entrée)."""
@classmethod
def init_types(cls):
cls.ajouter_types(cls.toutes_diligences)
@staticmethod
def toutes_diligences():
"""Retourne toutes les diligences de l'univers.
Cette fonction retourne toutes les diligences sous la forme
d'une liste. Cette liste contient des salles. Les fonctions
et actions manipulant les diligencs attendent une salle
comme paramètre : les salles retournées sont les salles
d'entrées (celles de mnémonique "1"). La diligence possède
normalement une sortie "bas" menant vers la salle permettant
d'accéder à la diligence.
Cette fonction n'attend aucun paramètre.
Exemple d'utilisation :
diligences = diligences()
pour chaque entree dans diligences:
exterieur = destination(entree, "bas")
# exterieur contient la salle à l'extérieur de la diligence
fait
"""
zones = importeur.diligence.zones
entrees = []
for zone in zones:
salle = importeur.salle.salles.get("{}:1".format(zone.cle))
if salle:
entrees.append(salle)
return entrees
| [
"[email protected]"
]
| |
8fc69ea6d952ef1e4cfc879a40a170fe9c897d6c | d9fd9c6329461235f140393f1e934362d0f645df | /Unidad 2/Módulo 6/Sección 4/eje_09.py | e3cf1510314a26331adc0b550e3c13291c3325ad | [
"MIT"
]
| permissive | angelxehg/utzac-python | e6b5ee988d1d76c549ab0fa49717eb042fa7d91f | fb88bcc661518bb35c08a102a67c20d0659f71db | refs/heads/main | 2022-12-02T11:16:27.134741 | 2020-08-14T19:38:33 | 2020-08-14T19:38:33 | 265,944,612 | 0 | 0 | MIT | 2020-08-07T21:23:53 | 2020-05-21T20:25:24 | Python | UTF-8 | Python | false | false | 375 | py | class MiClase:
pass
obj = MiClase()
obj.a = 1
obj.b = 2
obj.i = 3
obj.ireal = 3.5
obj.entero = 4
obj.z = 5
def incIntsI(obj):
for name in obj.__dict__.keys():
if name.startswith('i'):
val = getattr(obj, name)
if isinstance(val, int):
setattr(obj, name, val + 1)
print(obj.__dict__)
incIntsI(obj)
print(obj.__dict__)
| [
"[email protected]"
]
| |
bb936e36f73b3022e5fc4ff938b2e48d6d89e8c1 | 4273f162abb12ef1939271c2aabee9547ac6afee | /studio_usd_pipe/test/ver.py | e3d449cb801732082a041c7c123caf699f61c94a | []
| no_license | xiyuhao/subins_tutorials | 2717c47aac0adde099432e5dfd231606bf45a266 | acbe4fe16483397e9b0f8e240ca23bdca652b92d | refs/heads/master | 2023-07-28T13:42:41.445399 | 2021-09-12T11:02:37 | 2021-09-12T11:02:37 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,366 | py | input_data = {
"exe": [
"KONSOLE_EXE",
"/venture/source_code/subins_tutorials/studio_usd_pipe/bin/build-in/konsole/main.sh"
],
"name": [
"APPLICATION_NAME",
"konsole2.10.5"
],
"version": [
"KONSOLE_VERSION",
"konsole2.10.5"
],
"path": [
"KONSOLE_PATH",
"/venture/source_code/subins_tutorials/studio_usd_pipe/bin/build-in/konsole"
],
"order": 0,
"bash": "/venture/source_code/subins_tutorials/studio_usd_pipe/bin/build-in/konsole/main.sh",
"icon": [
"KONSOLE_ICON",
"/venture/source_code/subins_tutorials/studio_usd_pipe/resource/icons/konsole.png"
]
}
import os
import json
os.environ['KONSOLE_EXE'] = "/venture/source_code/subins_tutorials/studio_usd_pipe/bin/build-in/konsole/main.sh:subin"
for each in input_data:
if not isinstance(input_data[each], list):
continue
env_name = input_data[each][0]
env_value = input_data[each][1]
if isinstance(env_value, list):
env_value = ':'.join(env_value)
else:
env_value = str(env_value)
if os.getenv(env_name):
envrons = os.getenv(env_name).split(':')
envrons.append(env_value)
envrons = list(set(envrons))
env_value = os.environ[':'.join(envrons))
else:
env_value = str(env_value) | [
"[email protected]"
]
| |
4229eb3d57d5f03b46b944d86271693266461296 | e73a2ff9458effe038ebabfe9db6cdaf0c5bc473 | /order_food_online_project/order_food_online/urls.py | c5771206f24064f500d0c904aa8232d203cf5dcb | [
"MIT"
]
| permissive | MaksNech/django_order_food_ingredients | fcad5668b92b90776715d39e3f241577cf4364fa | 3578e36570ce99b25136942320fbcd7df956d435 | refs/heads/master | 2020-04-20T21:20:38.496108 | 2019-04-06T15:17:29 | 2019-04-06T15:17:29 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,333 | py | """order_food_online URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/2.1/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.urls import include, path
from django.conf import settings
from django.shortcuts import redirect
from django.conf.urls.static import static
urlpatterns = [
path('api/v1/', include('order_food_online.api_urls')),
path('', lambda request: redirect('foods/', permanent=True)),
path('i18n/', include('django.conf.urls.i18n')),
path('admin/', admin.site.urls),
path('foods/', include('foods.urls')),
path('notes/', include('notes.urls')),
path('authentication/', include('authentication.urls')),
path('accounts/', include('django.contrib.auth.urls')),
] + static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
| [
"[email protected]"
]
| |
820c2bd2006c8b43d126a6d5226df4dd461d5814 | b6ef959b538e4bffec92998a553175248bd72a77 | /06-Machine_Learning/brain.py | 1376cf1f411443947b708acf7499cd6bdf52de49 | [
"MIT"
]
| permissive | suzynakayama/udemy-python-dev | 9e384e3683a300f07c14d2a5862003038a4b169c | fbb35d00f94296da1281e6042a4efe506f79dddb | refs/heads/main | 2023-02-10T11:50:47.650049 | 2021-01-07T22:46:52 | 2021-01-07T22:46:52 | 307,135,927 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 530 | py | import os
from imageai.Classification import ImageClassification
# get current directory
execution_path = os.getcwd()
prediction = ImageClassification()
prediction.setModelTypeAsMobileNetV2()
prediction.setModelPath(os.path.join(execution_path, "mobilenet_v2.h5"))
prediction.loadModel()
predictions, probabilities = prediction.classifyImage(os.path.join(execution_path, "giraffe.jpg"), result_count=5 )
for eachPrediction, eachProbability in zip(predictions, probabilities):
print(eachPrediction , " : " , eachProbability) | [
"[email protected]"
]
| |
a433ae84fb074b61840e19b067915bc4fc1b848c | 490ffe1023a601760ae7288e86723f0c6e366bba | /kolla-docker/patching/zun_compute_api/provideraccount.py | a338bd5fd861592d8f7f624b5913d613b42fd69c | []
| no_license | bopopescu/Cloud-User-Management | 89696a5ea5d2f95191327fbeab6c3e400bbfb2b8 | 390988bf4915a276c7bf8d96b62c3051c17d9e6e | refs/heads/master | 2022-11-19T10:09:36.662906 | 2018-11-07T20:28:31 | 2018-11-07T20:28:31 | 281,786,345 | 0 | 0 | null | 2020-07-22T21:26:07 | 2020-07-22T21:26:06 | null | UTF-8 | Python | false | false | 1,679 | py | def provideraccount_update(self, context, container, *args):
if direct_action:
return self.manager.provideraccount_update(context, container, *args)
else:
return self.rpcapi.provideraccount_update(context, container, *args)
def provideraccount_show(self, context, container, *args):
if direct_action:
return self.manager.provideraccount_show(context, container)
else:
return self.rpcapi.provideraccount_show(context, container)
def provideraccount_create(self, context, new_provideraccount, extra_spec,
requested_networks):
host_state = None
try:
host_state = {} # self._schedule_container(context, new_provideraccount, extra_spec)
except Exception as exc:
# new_provideraccount.status = consts.ERROR
# new_provideraccount.status_reason = str(exc)
# new_provideraccount.save(context)
return
if direct_action:
self.manager.provideraccount_create(context, "", requested_networks, new_provideraccount)
else:
self.rpcapi.provideraccount_create(context, "", new_provideraccount, "", requested_networks)
# self.rpcapi.provideraccount_create(context, host_state['host'],
# new_provideraccount, host_state['limits'],
# requested_networks)
def provideraccount_delete(self, context, container, *args):
return self.manager.provideraccount_delete(context, container, True)
# return self.rpcapi.provideraccount_delete(context, container, *args)
| [
"[email protected]"
]
| |
87dcdc1f187f0619115ef51295c60468005bd5f3 | dcce56815dca2b18039e392053376636505ce672 | /dumpscripts/itertools_filterfalse.py | 4db9836daa58ad384b41f161c27d4886ab93f22c | []
| no_license | robertopauletto/PyMOTW-it_3.0 | 28ff05d8aeccd61ade7d4107a971d9d2576fb579 | c725df4a2aa2e799a969e90c64898f08b7eaad7d | refs/heads/master | 2021-01-20T18:51:30.512327 | 2020-01-09T19:30:14 | 2020-01-09T19:30:14 | 63,536,756 | 4 | 1 | null | null | null | null | UTF-8 | Python | false | false | 208 | py | # itertools_filterfalse.py
from itertools import *
def check_item(x):
print('Verifica:', x)
return x < 1
for i in filterfalse(check_item, [-1, 0, 1, 2, -2]):
print('Trattengo:', i)
| [
"[email protected]"
]
| |
747ca14a18296d4beabd473f554d3da345152774 | 847273de4b1d814fab8b19dc651c651c2d342ede | /.history/sok2_20180606104430.py | e3d724e0922456fec8afc2db0669485e5ed3545c | []
| no_license | Los4U/sudoku_in_python | 0ba55850afcffeac4170321651620f3c89448b45 | 7d470604962a43da3fc3e5edce6f718076197d32 | refs/heads/master | 2020-03-22T08:10:13.939424 | 2018-07-04T17:21:13 | 2018-07-04T17:21:13 | 139,749,483 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,247 | py | row1 = [0,0,0,0,0,0,0,0,0]
row2 = [0,0,0,5,0,6,0,0,0]
row3 = [0,0,1,0,0,0,0,3,0]
row4 = [0,9,5,0,0,0,2,0,0]
row5 = [0,0,0,0,0,1,6,0,7]
row6 = [1,0,6,0,0,9,0,0,5]
row7 = [7,0,0,8,0,3,9,0,0]
row8 = [0,3,8,9,0,0,0,2,0]
row9 = [0,5,0,0,2,0,7,0,0]
print(row1)
print(row2)
print(row3)
print("")
print(row4)
print(row5)
print(row6)
print("")
print(row7)
print(row8)
print(row9)
while True:
x = input("Wprowadz x y z:")
try:
if int(x[0])==1:
row1[int(x[2])-1]=x[4]
print("ok")
except ValueError: # przechwytuje wyjątek literę i kończy program.
print("Wprowadz cyfrę!")
continue
print(row1[0],row1[1],row1[2], sep=' ', end=" - ")
print(row1[3],row1[4],row1[5], sep=' ', end=" - ")
print(row1[6],row1[7],row1[8], sep=' ')
print(row1[0],row1[1],row1[2], sep=' ', end=" - ")
print(row1[3],row1[4],row1[5], sep=' ', end=" - ")
print(row1[6],row1[7],row1[8], sep=' ')
#print(str(*r11, sep='') + "-" + str(r12) + " - " + str(r13))
print(row2)
print(row3)
print(""),
print(row4)
print(row5)
print(row6)
print("")
print(row7)
print(row8)
print(row9)
#print(new)
#rds.insert(index, "is") | [
"[email protected]"
]
| |
92f3ee7e26c3ee1406bd8042cee27fc0d7f8f4c2 | d115cf7a1b374d857f6b094d4b4ccd8e9b1ac189 | /tags/pygccxml_dev_1.0.0/unittests/plain_c_tester.py | c26b2581fbaca21e9f350c66801aeb71c9acd90f | [
"BSL-1.0"
]
| permissive | gatoatigrado/pyplusplusclone | 30af9065fb6ac3dcce527c79ed5151aade6a742f | a64dc9aeeb718b2f30bd6a5ff8dcd8bfb1cd2ede | refs/heads/master | 2016-09-05T23:32:08.595261 | 2010-05-16T10:53:45 | 2010-05-16T10:53:45 | 700,369 | 4 | 2 | null | null | null | null | UTF-8 | Python | false | false | 1,430 | py | # Copyright 2004-2008 Roman Yakovenko.
# Distributed under the Boost Software License, Version 1.0. (See
# accompanying file LICENSE_1_0.txt or copy at
# http://www.boost.org/LICENSE_1_0.txt)
import unittest
import autoconfig
import parser_test_case
from pygccxml import utils
from pygccxml import parser
from pygccxml import declarations
class tester_t( parser_test_case.parser_test_case_t ):
def __init__(self, *args ):
parser_test_case.parser_test_case_t.__init__( self, *args )
self.header = 'plain_c.c'
self.global_ns = None
def setUp(self):
if not self.global_ns:
decls = parser.parse( [self.header], self.config )
self.global_ns = declarations.get_global_namespace( decls )
self.global_ns.init_optimizer()
def test( self ):
self.global_ns.free_fun( 'hello_sum' )
self.global_ns.free_fun( 'hello_print' )
declarations.print_declarations( self.global_ns )
f = self.global_ns.free_fun( 'do_smth' )
for arg in f.arguments:
print arg.type.decl_string
def create_suite():
suite = unittest.TestSuite()
suite.addTest( unittest.makeSuite(tester_t))
return suite
def run_suite():
unittest.TextTestRunner(verbosity=2).run( create_suite() )
if __name__ == "__main__":
run_suite()
| [
"roman_yakovenko@dc5859f9-2512-0410-ae5c-dd123cda1f76"
]
| roman_yakovenko@dc5859f9-2512-0410-ae5c-dd123cda1f76 |
1c1722d15f2ee8dde90347013662ca30cd87c6a3 | 0269037acc7785a58f8786c60be8ccea8ef3f6f3 | /indico/modules/attachments/models/folders_test.py | 71309414a40429ae60741e7457815421438a6ce8 | [
"MIT"
]
| permissive | bebusl/cbnu_indico | 1ffa7042a1f706da953214b39827cbdbb1387cce | 60b37c2bf54cd7f17092b2a9ad21311762729601 | refs/heads/master | 2023-01-18T22:22:09.655751 | 2020-12-02T09:04:06 | 2020-12-02T09:04:06 | 281,068,896 | 0 | 0 | MIT | 2020-07-20T09:09:44 | 2020-07-20T09:09:43 | null | UTF-8 | Python | false | false | 1,890 | py | # This file is part of Indico.
# Copyright (C) 2002 - 2020 CERN
#
# Indico is free software; you can redistribute it and/or
# modify it under the terms of the MIT License; see the
# LICENSE file for more details.
from __future__ import unicode_literals
from indico.modules.attachments import AttachmentFolder
def test_update_principal(dummy_user, dummy_event):
folder = AttachmentFolder(object=dummy_event, is_default=True)
assert not folder.acl_entries
# not changing anything -> shouldn't be added to acl
entry = folder.update_principal(dummy_user)
assert entry is None
assert not folder.acl_entries
# adding user with read access -> new acl entry since the user isn't in there yet
entry = initial_entry = folder.update_principal(dummy_user, read_access=True)
assert folder.acl_entries == {entry}
# not changing anything on existing principal -> shouldn't modify acl
entry = folder.update_principal(dummy_user)
assert entry is initial_entry
assert folder.acl_entries == {entry}
# granting permission which is already present -> shouldn't modify acl
entry = folder.update_principal(dummy_user, read_access=True)
assert entry is initial_entry
assert folder.acl_entries == {entry}
# removing read access -> acl entry is removed
entry = folder.update_principal(dummy_user, read_access=False)
assert entry is None
assert not folder.acl_entries
def test_remove_principal(dummy_user, dummy_event):
folder = AttachmentFolder(object=dummy_event, is_default=True)
assert not folder.acl_entries
entry = folder.update_principal(dummy_user, read_access=True)
assert folder.acl_entries == {entry}
folder.remove_principal(dummy_user)
assert not folder.acl_entries
# doesn't do anything but must not fail either
folder.remove_principal(dummy_user)
assert not folder.acl_entries
| [
"[email protected]"
]
| |
a40856233a9964baf4c68babb5fface0b95472e3 | 045cb1a5638c3575296f83471758dc09a8065725 | /harpiya/service/model.py | d5f3733a91592649bfbd91be8592000f5bcf3b43 | []
| no_license | marionumza/saas | 7236842b0db98d1a0d0c3c88df32d268509629cb | 148dd95d991a348ebbaff9396759a7dd1fe6e101 | refs/heads/main | 2023-03-27T14:08:57.121601 | 2021-03-20T07:59:08 | 2021-03-20T07:59:08 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 8,683 | py | # -*- coding: utf-8 -*-
from contextlib import closing
from functools import wraps
import logging
from psycopg2 import IntegrityError, OperationalError, errorcodes
import random
import threading
import time
import harpiya
from harpiya.exceptions import UserError, ValidationError, QWebException
from harpiya.models import check_method_name
from harpiya.tools.translate import translate, translate_sql_constraint
from harpiya.tools.translate import _
from . import security
from ..tools import traverse_containers, lazy
_logger = logging.getLogger(__name__)
PG_CONCURRENCY_ERRORS_TO_RETRY = (errorcodes.LOCK_NOT_AVAILABLE, errorcodes.SERIALIZATION_FAILURE, errorcodes.DEADLOCK_DETECTED)
MAX_TRIES_ON_CONCURRENCY_FAILURE = 5
def dispatch(method, params):
(db, uid, passwd ) = params[0], int(params[1]), params[2]
# set uid tracker - cleaned up at the WSGI
# dispatching phase in harpiya.service.wsgi_server.application
threading.current_thread().uid = uid
params = params[3:]
if method == 'obj_list':
raise NameError("obj_list has been discontinued via RPC as of 6.0, please query ir.model directly!")
if method not in ['execute', 'execute_kw']:
raise NameError("Method not available %s" % method)
security.check(db,uid,passwd)
registry = harpiya.registry(db).check_signaling()
fn = globals()[method]
with registry.manage_changes():
res = fn(db, uid, *params)
return res
def check(f):
@wraps(f)
def wrapper(___dbname, *args, **kwargs):
""" Wraps around OSV functions and normalises a few exceptions
"""
dbname = ___dbname # NOTE: this forbid to use "___dbname" as arguments in http routes
def tr(src, ttype):
# We try to do the same as the _(), but without the frame
# inspection, since we aready are wrapping an osv function
# trans_obj = self.get('ir.translation') cannot work yet :(
ctx = {}
if not kwargs:
if args and isinstance(args[-1], dict):
ctx = args[-1]
elif isinstance(kwargs, dict):
if 'context' in kwargs:
ctx = kwargs['context']
elif 'kwargs' in kwargs and kwargs['kwargs'].get('context'):
# http entry points such as call_kw()
ctx = kwargs['kwargs'].get('context')
else:
try:
from harpiya.http import request
ctx = request.env.context
except Exception:
pass
lang = ctx and ctx.get('lang')
if not (lang or hasattr(src, '__call__')):
return src
# We open a *new* cursor here, one reason is that failed SQL
# queries (as in IntegrityError) will invalidate the current one.
with closing(harpiya.sql_db.db_connect(dbname).cursor()) as cr:
if ttype == 'sql_constraint':
res = translate_sql_constraint(cr, key=key, lang=lang)
else:
res = translate(cr, name=False, source_type=ttype,
lang=lang, source=src)
return res or src
def _(src):
return tr(src, 'code')
tries = 0
while True:
try:
if harpiya.registry(dbname)._init and not harpiya.tools.config['test_enable']:
raise harpiya.exceptions.Warning('Currently, this database is not fully loaded and can not be used.')
return f(dbname, *args, **kwargs)
except (OperationalError, QWebException) as e:
if isinstance(e, QWebException):
cause = e.qweb.get('cause')
if isinstance(cause, OperationalError):
e = cause
else:
raise
# Automatically retry the typical transaction serialization errors
if e.pgcode not in PG_CONCURRENCY_ERRORS_TO_RETRY:
raise
if tries >= MAX_TRIES_ON_CONCURRENCY_FAILURE:
_logger.info("%s, maximum number of tries reached" % errorcodes.lookup(e.pgcode))
raise
wait_time = random.uniform(0.0, 2 ** tries)
tries += 1
_logger.info("%s, retry %d/%d in %.04f sec..." % (errorcodes.lookup(e.pgcode), tries, MAX_TRIES_ON_CONCURRENCY_FAILURE, wait_time))
time.sleep(wait_time)
except IntegrityError as inst:
registry = harpiya.registry(dbname)
key = inst.diag.constraint_name
if key in registry._sql_constraints:
raise ValidationError(tr(key, 'sql_constraint') or inst.pgerror)
if inst.pgcode in (errorcodes.NOT_NULL_VIOLATION, errorcodes.FOREIGN_KEY_VIOLATION, errorcodes.RESTRICT_VIOLATION):
msg = _('The operation cannot be completed:')
_logger.debug("IntegrityError", exc_info=True)
try:
# Get corresponding model and field
model = field = None
for name, rclass in registry.items():
if inst.diag.table_name == rclass._table:
model = rclass
field = model._fields.get(inst.diag.column_name)
break
if inst.pgcode == errorcodes.NOT_NULL_VIOLATION:
# This is raised when a field is set with `required=True`. 2 cases:
# - Create/update: a mandatory field is not set.
# - Delete: another model has a not nullable using the deleted record.
msg += '\n'
msg += _(
'- Create/update: a mandatory field is not set.\n'
'- Delete: another model requires the record being deleted. If possible, archive it instead.'
)
if model:
msg += '\n\n{} {} ({}), {} {} ({})'.format(
_('Model:'), model._description, model._name,
_('Field:'), field.string if field else _('Unknown'), field.name if field else _('Unknown'),
)
elif inst.pgcode == errorcodes.FOREIGN_KEY_VIOLATION:
# This is raised when a field is set with `ondelete='restrict'`, at
# unlink only.
msg += _(' another model requires the record being deleted. If possible, archive it instead.')
constraint = inst.diag.constraint_name
if model or constraint:
msg += '\n\n{} {} ({}), {} {}'.format(
_('Model:'), model._description if model else _('Unknown'), model._name if model else _('Unknown'),
_('Constraint:'), constraint if constraint else _('Unknown'),
)
except Exception:
pass
raise ValidationError(msg)
else:
raise ValidationError(inst.args[0])
return wrapper
def execute_cr(cr, uid, obj, method, *args, **kw):
harpiya.api.Environment.reset() # clean cache etc if we retry the same transaction
recs = harpiya.api.Environment(cr, uid, {}).get(obj)
if recs is None:
raise UserError(_("Object %s doesn't exist") % obj)
result = harpiya.api.call_kw(recs, method, args, kw)
# force evaluation of lazy values before the cursor is closed, as it would
# error afterwards if the lazy isn't already evaluated (and cached)
for l in traverse_containers(result, lazy):
_0 = l._value
return result
def execute_kw(db, uid, obj, method, args, kw=None):
return execute(db, uid, obj, method, *args, **kw or {})
@check
def execute(db, uid, obj, method, *args, **kw):
threading.currentThread().dbname = db
with harpiya.registry(db).cursor() as cr:
check_method_name(method)
res = execute_cr(cr, uid, obj, method, *args, **kw)
if res is None:
_logger.info('The method %s of the object %s can not return `None` !', method, obj)
return res
| [
"[email protected]"
]
| |
977da3579e8f87f1655e64f2de8938f2c1adc395 | 1207d50126d4d59966573927c5eadd94db6aeb59 | /svggen/library/Rectangle.py | cb7e78caee0de5d274f55684375712ff71248bc0 | []
| no_license | christianwarloe/robotBuilder | aee03c189972f1d305c6e13d106b362b5d26d187 | 3f8fbc267ac7b9bbae534d1208278541a7b5eaa5 | refs/heads/master | 2021-06-13T02:42:24.834816 | 2017-04-07T01:01:52 | 2017-04-07T01:01:52 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 987 | py | from svggen.api.FoldedComponent import FoldedComponent
from svggen.api.composables.graph.Face import Rectangle as Rect
from svggen.api.composables.GraphComposable import Graph
from svggen.api.ports.EdgePort import EdgePort
from svggen.api.ports.FacePort import FacePort
class Rectangle(FoldedComponent):
_test_params = {
'l': 100,
'w': 400,
}
def define(self, **kwargs):
FoldedComponent.define(self, **kwargs)
self.addParameter("l", 100, positive=True)
self.addParameter("w", 400, positive=True)
def assemble(self):
dx = self.getParameter("l")
dy = self.getParameter("w")
self.addFace(Rect("r", dx, dy))
self.place()
self.addInterface("face", FacePort(self, "r"))
self.addInterface("b", EdgePort(self, "e0"))
self.addInterface("r", EdgePort(self, "e1"))
self.addInterface("t", EdgePort(self, "e2"))
self.addInterface("l", EdgePort(self, "e3"))
if __name__ == "__main__":
h = Rectangle()
#h._make_test()
| [
"[email protected]"
]
| |
96eaba8baa60786fa762b5a9ed86e115dfb96fb2 | b5ba12d4dcb240ba6069964380f6a3aede79f448 | /mixins/simulation.py | 7ccaefcd9da8089d5c296d7cfa10fab98b594edc | []
| no_license | 70-6C-65-61-73-75-72-65h/erp | 9e1a6f20a15d16794043f583022b1e04a9435b20 | 0e088c767d0d0c0e5515be703ed71252d55b70d9 | refs/heads/master | 2022-03-27T21:12:52.305257 | 2019-12-17T15:41:59 | 2019-12-17T15:41:59 | 224,333,874 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 403 | py | # from datetime import datetime
# # from string to datetime: time.strptime
# # from datetime to string: time.strftime
# def today():
# # simulational date_today:
# def datetime_date_today():
# """ only date returned """
# month, day, year = today()
# datetime_str = f'{month}/{day}/{year}'
# datetime_object = datetime.strptime(datetime_str, '%m/%d/%y')
# return datetime_object | [
"[email protected]"
]
| |
bc5ad557d4f626a81e3b4e15f4bf084bb239d1a7 | a2d36e471988e0fae32e9a9d559204ebb065ab7f | /huaweicloud-sdk-vod/huaweicloudsdkvod/v1/model/show_asset_detail_request.py | 310a07633cd81850d55c262f5845bd24add26eb3 | [
"Apache-2.0"
]
| permissive | zhouxy666/huaweicloud-sdk-python-v3 | 4d878a90b8e003875fc803a61414788e5e4c2c34 | cc6f10a53205be4cb111d3ecfef8135ea804fa15 | refs/heads/master | 2023-09-02T07:41:12.605394 | 2021-11-12T03:20:11 | 2021-11-12T03:20:11 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,328 | py | # coding: utf-8
import re
import six
from huaweicloudsdkcore.utils.http_utils import sanitize_for_serialization
class ShowAssetDetailRequest:
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
sensitive_list = []
openapi_types = {
'asset_id': 'str',
'categories': 'list[str]'
}
attribute_map = {
'asset_id': 'asset_id',
'categories': 'categories'
}
def __init__(self, asset_id=None, categories=None):
"""ShowAssetDetailRequest - a model defined in huaweicloud sdk"""
self._asset_id = None
self._categories = None
self.discriminator = None
self.asset_id = asset_id
if categories is not None:
self.categories = categories
@property
def asset_id(self):
"""Gets the asset_id of this ShowAssetDetailRequest.
媒资ID。
:return: The asset_id of this ShowAssetDetailRequest.
:rtype: str
"""
return self._asset_id
@asset_id.setter
def asset_id(self, asset_id):
"""Sets the asset_id of this ShowAssetDetailRequest.
媒资ID。
:param asset_id: The asset_id of this ShowAssetDetailRequest.
:type: str
"""
self._asset_id = asset_id
@property
def categories(self):
"""Gets the categories of this ShowAssetDetailRequest.
查询的信息类型。 - 为空时表示查询所有信息。 - 不为空时支持同时查询一个或者多个类型的信息,取值如下: - - base_info:媒资基本信息。 - - transcode_info:转码结果信息。 - - thumbnail_info:截图结果信息。 - - review_info:审核结果信息。
:return: The categories of this ShowAssetDetailRequest.
:rtype: list[str]
"""
return self._categories
@categories.setter
def categories(self, categories):
"""Sets the categories of this ShowAssetDetailRequest.
查询的信息类型。 - 为空时表示查询所有信息。 - 不为空时支持同时查询一个或者多个类型的信息,取值如下: - - base_info:媒资基本信息。 - - transcode_info:转码结果信息。 - - thumbnail_info:截图结果信息。 - - review_info:审核结果信息。
:param categories: The categories of this ShowAssetDetailRequest.
:type: list[str]
"""
self._categories = categories
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
if attr in self.sensitive_list:
result[attr] = "****"
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
import simplejson as json
if six.PY2:
import sys
reload(sys)
sys.setdefaultencoding("utf-8")
return json.dumps(sanitize_for_serialization(self), ensure_ascii=False)
def __repr__(self):
"""For `print`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, ShowAssetDetailRequest):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| [
"[email protected]"
]
| |
5d5b5c72b46a23b4384971602e86d7719b885892 | b8bde9a346685e1428a8284f7ffb14f15e35fb78 | /deploy/pinax.fcgi | 43f92ff0c8e98da7b3b5d94b6afd6d72456e3420 | []
| no_license | bhaugen/pinax-groups-experiments | 9302762c8e7379f067385a7280ef9af4dc4c5e8f | d520ccbfdb8228e10b6e547df6f64106caa6f0ec | refs/heads/master | 2020-04-05T22:49:04.750605 | 2009-11-13T19:36:20 | 2009-11-13T19:36:20 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 535 | fcgi | # pinax.fcgi is configured to live in projects/pinax_groups/deploy.
import os
import sys
from os.path import abspath, dirname, join
from site import addsitedir
sys.path.insert(0, abspath(join(dirname(__file__), "../../")))
from django.conf import settings
os.environ["DJANGO_SETTINGS_MODULE"] = "pinax_groups.settings"
sys.path.insert(0, join(settings.PINAX_ROOT, "apps"))
sys.path.insert(0, join(settings.PROJECT_ROOT, "apps"))
from django.core.servers.fastcgi import runfastcgi
runfastcgi(method="threaded", daemonize="false")
| [
"[email protected]"
]
| |
5756338cb6fc8c1265dcba6437dce7333023f4e4 | 60a831fb3c92a9d2a2b52ff7f5a0f665d4692a24 | /IronPythonStubs/release/stubs.min/System/Windows/Forms/__init___parts/RichTextBoxSelectionTypes.py | 9924fd6d966d8eeaba9fa14927670259ceddad2d | [
"MIT"
]
| permissive | shnlmn/Rhino-Grasshopper-Scripts | a9411098c5d1bbc55feb782def565d535b27b709 | 0e43c3c1d09fb12cdbd86a3c4e2ba49982e0f823 | refs/heads/master | 2020-04-10T18:59:43.518140 | 2020-04-08T02:49:07 | 2020-04-08T02:49:07 | 161,219,695 | 11 | 2 | null | null | null | null | UTF-8 | Python | false | false | 1,054 | py | class RichTextBoxSelectionTypes(Enum,IComparable,IFormattable,IConvertible):
"""
Specifies the type of selection in a System.Windows.Forms.RichTextBox control.
enum (flags) RichTextBoxSelectionTypes,values: Empty (0),MultiChar (4),MultiObject (8),Object (2),Text (1)
"""
def __eq__(self,*args):
""" x.__eq__(y) <==> x==yx.__eq__(y) <==> x==yx.__eq__(y) <==> x==y """
pass
def __format__(self,*args):
""" __format__(formattable: IFormattable,format: str) -> str """
pass
def __ge__(self,*args):
pass
def __gt__(self,*args):
pass
def __init__(self,*args):
""" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """
pass
def __le__(self,*args):
pass
def __lt__(self,*args):
pass
def __ne__(self,*args):
pass
def __reduce_ex__(self,*args):
pass
def __str__(self,*args):
pass
Empty=None
MultiChar=None
MultiObject=None
Object=None
Text=None
value__=None
| [
"[email protected]"
]
| |
9091f98df3dff4ab938bd0ab9d306ef2b2ca9621 | f6f15809ac70089ef4cfb1ade40e2dc58d239f81 | /test/functional/data/invalid_txs.py | 1f19ffe59a0e3a5e593440e7030364022a6315d2 | [
"MIT"
]
| permissive | lamyaim/bitgesell | fcc96f6765d3907ce923f411a1b2c6c4de9d55d6 | 64c24348f1ba8788fbffaf663b3df38d9b49a5d1 | refs/heads/master | 2023-04-30T08:16:40.735496 | 2020-12-10T05:23:08 | 2020-12-10T05:23:08 | 369,859,996 | 1 | 0 | MIT | 2021-05-22T16:50:56 | 2021-05-22T16:48:32 | null | UTF-8 | Python | false | false | 7,089 | py | #!/usr/bin/env python3
# Copyright (c) 2015-2018 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""
Templates for constructing various sorts of invalid transactions.
These templates (or an iterator over all of them) can be reused in different
contexts to test using a number of invalid transaction types.
Hopefully this makes it easier to get coverage of a full variety of tx
validation checks through different interfaces (AcceptBlock, AcceptToMemPool,
etc.) without repeating ourselves.
Invalid tx cases not covered here can be found by running:
$ diff \
<(grep -IREho "bad-txns[a-zA-Z-]+" src | sort -u) \
<(grep -IEho "bad-txns[a-zA-Z-]+" test/functional/data/invalid_txs.py | sort -u)
"""
import abc
from test_framework.messages import CTransaction, CTxIn, CTxOut, COutPoint
from test_framework import script as sc
from test_framework.blocktools import create_tx_with_script, MAX_BLOCK_SIGOPS
from test_framework.script import (
CScript,
OP_CAT,
OP_SUBSTR,
OP_LEFT,
OP_RIGHT,
OP_INVERT,
OP_AND,
OP_OR,
OP_XOR,
OP_2MUL,
OP_2DIV,
OP_MUL,
OP_DIV,
OP_MOD,
OP_LSHIFT,
OP_RSHIFT
)
basic_p2sh = sc.CScript([sc.OP_HASH160, sc.hash160(sc.CScript([sc.OP_0])), sc.OP_EQUAL])
class BadTxTemplate:
"""Allows simple construction of a certain kind of invalid tx. Base class to be subclassed."""
__metaclass__ = abc.ABCMeta
# The expected error code given by BGLd upon submission of the tx.
reject_reason = ""
# Only specified if it differs from mempool acceptance error.
block_reject_reason = ""
# Do we expect to be disconnected after submitting this tx?
expect_disconnect = False
# Is this tx considered valid when included in a block, but not for acceptance into
# the mempool (i.e. does it violate policy but not consensus)?
valid_in_block = False
def __init__(self, *, spend_tx=None, spend_block=None):
self.spend_tx = spend_block.vtx[0] if spend_block else spend_tx
self.spend_avail = sum(o.nValue for o in self.spend_tx.vout)
self.valid_txin = CTxIn(COutPoint(self.spend_tx.sha256, 0), b"", 0xffffffff)
@abc.abstractmethod
def get_tx(self, *args, **kwargs):
"""Return a CTransaction that is invalid per the subclass."""
pass
class OutputMissing(BadTxTemplate):
reject_reason = "bad-txns-vout-empty"
expect_disconnect = True
def get_tx(self):
tx = CTransaction()
tx.vin.append(self.valid_txin)
tx.calc_sha256()
return tx
class InputMissing(BadTxTemplate):
reject_reason = "bad-txns-vin-empty"
expect_disconnect = True
# We use a blank transaction here to make sure
# it is interpreted as a non-witness transaction.
# Otherwise the transaction will fail the
# "surpufluous witness" check during deserialization
# rather than the input count check.
def get_tx(self):
tx = CTransaction()
tx.calc_sha256()
return tx
# The following check prevents exploit of lack of merkle
# tree depth commitment (CVE-2017-12842)
class SizeTooSmall(BadTxTemplate):
reject_reason = "tx-size-small"
expect_disconnect = False
valid_in_block = True
def get_tx(self):
tx = CTransaction()
tx.vin.append(self.valid_txin)
tx.vout.append(CTxOut(0, sc.CScript([sc.OP_TRUE])))
tx.calc_sha256()
return tx
class BadInputOutpointIndex(BadTxTemplate):
# Won't be rejected - nonexistent outpoint index is treated as an orphan since the coins
# database can't distinguish between spent outpoints and outpoints which never existed.
reject_reason = None
expect_disconnect = False
def get_tx(self):
num_indices = len(self.spend_tx.vin)
bad_idx = num_indices + 100
tx = CTransaction()
tx.vin.append(CTxIn(COutPoint(self.spend_tx.sha256, bad_idx), b"", 0xffffffff))
tx.vout.append(CTxOut(0, basic_p2sh))
tx.calc_sha256()
return tx
class DuplicateInput(BadTxTemplate):
reject_reason = 'bad-txns-inputs-duplicate'
expect_disconnect = True
def get_tx(self):
tx = CTransaction()
tx.vin.append(self.valid_txin)
tx.vin.append(self.valid_txin)
tx.vout.append(CTxOut(1, basic_p2sh))
tx.calc_sha256()
return tx
class NonexistentInput(BadTxTemplate):
reject_reason = None # Added as an orphan tx.
expect_disconnect = False
def get_tx(self):
tx = CTransaction()
tx.vin.append(CTxIn(COutPoint(self.spend_tx.sha256 + 1, 0), b"", 0xffffffff))
tx.vin.append(self.valid_txin)
tx.vout.append(CTxOut(1, basic_p2sh))
tx.calc_sha256()
return tx
class SpendTooMuch(BadTxTemplate):
reject_reason = 'bad-txns-in-belowout'
expect_disconnect = True
def get_tx(self):
return create_tx_with_script(
self.spend_tx, 0, script_pub_key=basic_p2sh, amount=(self.spend_avail + 1))
class SpendNegative(BadTxTemplate):
reject_reason = 'bad-txns-vout-negative'
expect_disconnect = True
def get_tx(self):
return create_tx_with_script(self.spend_tx, 0, amount=-1)
class InvalidOPIFConstruction(BadTxTemplate):
reject_reason = "mandatory-script-verify-flag-failed (Invalid OP_IF construction)"
expect_disconnect = True
valid_in_block = True
def get_tx(self):
return create_tx_with_script(
self.spend_tx, 0, script_sig=b'\x64' * 35,
amount=(self.spend_avail // 2))
class TooManySigops(BadTxTemplate):
reject_reason = "bad-txns-too-many-sigops"
block_reject_reason = "bad-blk-sigops, out-of-bounds SigOpCount"
expect_disconnect = False
def get_tx(self):
lotsa_checksigs = sc.CScript([sc.OP_CHECKSIG] * (MAX_BLOCK_SIGOPS))
return create_tx_with_script(
self.spend_tx, 0,
script_pub_key=lotsa_checksigs,
amount=1)
def getDisabledOpcodeTemplate(opcode):
""" Creates disabled opcode tx template class"""
def get_tx(self):
tx = CTransaction()
vin = self.valid_txin
vin.scriptSig = CScript([opcode])
tx.vin.append(vin)
tx.vout.append(CTxOut(1, basic_p2sh))
tx.calc_sha256()
return tx
return type('DisabledOpcode_' + str(opcode), (BadTxTemplate,), {
'reject_reason': "disabled opcode",
'expect_disconnect': True,
'get_tx': get_tx,
'valid_in_block' : True
})
# Disabled opcode tx templates (CVE-2010-5137)
DisabledOpcodeTemplates = [getDisabledOpcodeTemplate(opcode) for opcode in [
OP_CAT,
OP_SUBSTR,
OP_LEFT,
OP_RIGHT,
OP_INVERT,
OP_AND,
OP_OR,
OP_XOR,
OP_2MUL,
OP_2DIV,
OP_MUL,
OP_DIV,
OP_MOD,
OP_LSHIFT,
OP_RSHIFT]]
def iter_all_templates():
"""Iterate through all bad transaction template types."""
return BadTxTemplate.__subclasses__()
| [
"[email protected]"
]
| |
2670a564756e2418d01354846cf57d5defcc1c20 | 460f981dfe1a05f14d2a4cdc6cc71e9ad798b785 | /3/amd64/envs/navigator/lib/python3.6/site-packages/xarray/core/common.py | 5b090bf0d2f077e52b70fdaa639249f070fb92b3 | [
"Python-2.0",
"LicenseRef-scancode-proprietary-license",
"BSD-3-Clause",
"Intel",
"LicenseRef-scancode-unknown-license-reference",
"GPL-2.0-only",
"GPL-1.0-or-later",
"LGPL-2.0-or-later",
"LicenseRef-scancode-mit-old-style",
"dtoa",
"LicenseRef-scancode-public-domain-disclaimer",
"Zlib",
"LicenseRef-scancode-public-domain"
]
| permissive | DFO-Ocean-Navigator/navigator-toolchain | d8c7351b477e66d674b50da54ec6ddc0f3a325ee | 930d26886fdf8591b51da9d53e2aca743bf128ba | refs/heads/master | 2022-11-05T18:57:30.938372 | 2021-04-22T02:02:45 | 2021-04-22T02:02:45 | 234,445,230 | 0 | 1 | BSD-3-Clause | 2022-10-25T06:46:23 | 2020-01-17T01:26:49 | C++ | UTF-8 | Python | false | false | 35,834 | py | from __future__ import absolute_import, division, print_function
from textwrap import dedent
import numpy as np
import pandas as pd
from . import dtypes, duck_array_ops, formatting, ops
from .arithmetic import SupportsArithmetic
from .options import _get_keep_attrs
from .pycompat import OrderedDict, basestring, dask_array_type, suppress
from .utils import Frozen, ReprObject, SortedKeysDict, either_dict_or_kwargs
# Used as a sentinel value to indicate a all dimensions
ALL_DIMS = ReprObject('<all-dims>')
class ImplementsArrayReduce(object):
@classmethod
def _reduce_method(cls, func, include_skipna, numeric_only):
if include_skipna:
def wrapped_func(self, dim=None, axis=None, skipna=None,
**kwargs):
return self.reduce(func, dim, axis,
skipna=skipna, allow_lazy=True, **kwargs)
else:
def wrapped_func(self, dim=None, axis=None,
**kwargs):
return self.reduce(func, dim, axis,
allow_lazy=True, **kwargs)
return wrapped_func
_reduce_extra_args_docstring = dedent("""\
dim : str or sequence of str, optional
Dimension(s) over which to apply `{name}`.
axis : int or sequence of int, optional
Axis(es) over which to apply `{name}`. Only one of the 'dim'
and 'axis' arguments can be supplied. If neither are supplied, then
`{name}` is calculated over axes.""")
_cum_extra_args_docstring = dedent("""\
dim : str or sequence of str, optional
Dimension over which to apply `{name}`.
axis : int or sequence of int, optional
Axis over which to apply `{name}`. Only one of the 'dim'
and 'axis' arguments can be supplied.""")
class ImplementsDatasetReduce(object):
@classmethod
def _reduce_method(cls, func, include_skipna, numeric_only):
if include_skipna:
def wrapped_func(self, dim=None, skipna=None,
**kwargs):
return self.reduce(func, dim, skipna=skipna,
numeric_only=numeric_only, allow_lazy=True,
**kwargs)
else:
def wrapped_func(self, dim=None, **kwargs):
return self.reduce(func, dim,
numeric_only=numeric_only, allow_lazy=True,
**kwargs)
return wrapped_func
_reduce_extra_args_docstring = \
"""dim : str or sequence of str, optional
Dimension(s) over which to apply `{name}`. By default `{name}` is
applied over all dimensions."""
_cum_extra_args_docstring = \
"""dim : str or sequence of str, optional
Dimension over which to apply `{name}`.
axis : int or sequence of int, optional
Axis over which to apply `{name}`. Only one of the 'dim'
and 'axis' arguments can be supplied."""
class AbstractArray(ImplementsArrayReduce, formatting.ReprMixin):
"""Shared base class for DataArray and Variable."""
def __bool__(self):
return bool(self.values)
# Python 3 uses __bool__, Python 2 uses __nonzero__
__nonzero__ = __bool__
def __float__(self):
return float(self.values)
def __int__(self):
return int(self.values)
def __complex__(self):
return complex(self.values)
def __long__(self):
return long(self.values) # noqa
def __array__(self, dtype=None):
return np.asarray(self.values, dtype=dtype)
def __repr__(self):
return formatting.array_repr(self)
def _iter(self):
for n in range(len(self)):
yield self[n]
def __iter__(self):
if self.ndim == 0:
raise TypeError('iteration over a 0-d array')
return self._iter()
@property
def T(self):
return self.transpose()
def get_axis_num(self, dim):
"""Return axis number(s) corresponding to dimension(s) in this array.
Parameters
----------
dim : str or iterable of str
Dimension name(s) for which to lookup axes.
Returns
-------
int or tuple of int
Axis number or numbers corresponding to the given dimensions.
"""
if isinstance(dim, basestring):
return self._get_axis_num(dim)
else:
return tuple(self._get_axis_num(d) for d in dim)
def _get_axis_num(self, dim):
try:
return self.dims.index(dim)
except ValueError:
raise ValueError("%r not found in array dimensions %r" %
(dim, self.dims))
@property
def sizes(self):
"""Ordered mapping from dimension names to lengths.
Immutable.
See also
--------
Dataset.sizes
"""
return Frozen(OrderedDict(zip(self.dims, self.shape)))
class AttrAccessMixin(object):
"""Mixin class that allows getting keys with attribute access
"""
_initialized = False
@property
def _attr_sources(self):
"""List of places to look-up items for attribute-style access"""
return []
@property
def _item_sources(self):
"""List of places to look-up items for key-autocompletion """
return []
def __getattr__(self, name):
if name != '__setstate__':
# this avoids an infinite loop when pickle looks for the
# __setstate__ attribute before the xarray object is initialized
for source in self._attr_sources:
with suppress(KeyError):
return source[name]
raise AttributeError("%r object has no attribute %r" %
(type(self).__name__, name))
def __setattr__(self, name, value):
if self._initialized:
try:
# Allow setting instance variables if they already exist
# (e.g., _attrs). We use __getattribute__ instead of hasattr
# to avoid key lookups with attribute-style access.
self.__getattribute__(name)
except AttributeError:
raise AttributeError(
"cannot set attribute %r on a %r object. Use __setitem__ "
"style assignment (e.g., `ds['name'] = ...`) instead to "
"assign variables." % (name, type(self).__name__))
object.__setattr__(self, name, value)
def __dir__(self):
"""Provide method name lookup and completion. Only provide 'public'
methods.
"""
extra_attrs = [item
for sublist in self._attr_sources
for item in sublist
if isinstance(item, basestring)]
return sorted(set(dir(type(self)) + extra_attrs))
def _ipython_key_completions_(self):
"""Provide method for the key-autocompletions in IPython.
See http://ipython.readthedocs.io/en/stable/config/integrating.html#tab-completion
For the details.
""" # noqa
item_lists = [item
for sublist in self._item_sources
for item in sublist
if isinstance(item, basestring)]
return list(set(item_lists))
def get_squeeze_dims(xarray_obj, dim, axis=None):
"""Get a list of dimensions to squeeze out.
"""
if dim is not None and axis is not None:
raise ValueError('cannot use both parameters `axis` and `dim`')
if dim is None and axis is None:
dim = [d for d, s in xarray_obj.sizes.items() if s == 1]
else:
if isinstance(dim, basestring):
dim = [dim]
if isinstance(axis, int):
axis = (axis, )
if isinstance(axis, tuple):
for a in axis:
if not isinstance(a, int):
raise ValueError(
'parameter `axis` must be int or tuple of int.')
alldims = list(xarray_obj.sizes.keys())
dim = [alldims[a] for a in axis]
if any(xarray_obj.sizes[k] > 1 for k in dim):
raise ValueError('cannot select a dimension to squeeze out '
'which has length greater than one')
return dim
class DataWithCoords(SupportsArithmetic, AttrAccessMixin):
"""Shared base class for Dataset and DataArray."""
def squeeze(self, dim=None, drop=False, axis=None):
"""Return a new object with squeezed data.
Parameters
----------
dim : None or str or tuple of str, optional
Selects a subset of the length one dimensions. If a dimension is
selected with length greater than one, an error is raised. If
None, all length one dimensions are squeezed.
drop : bool, optional
If ``drop=True``, drop squeezed coordinates instead of making them
scalar.
axis : int, optional
Select the dimension to squeeze. Added for compatibility reasons.
Returns
-------
squeezed : same type as caller
This object, but with with all or a subset of the dimensions of
length 1 removed.
See Also
--------
numpy.squeeze
"""
dims = get_squeeze_dims(self, dim, axis)
return self.isel(drop=drop, **{d: 0 for d in dims})
def get_index(self, key):
"""Get an index for a dimension, with fall-back to a default RangeIndex
"""
if key not in self.dims:
raise KeyError(key)
try:
return self.indexes[key]
except KeyError:
# need to ensure dtype=int64 in case range is empty on Python 2
return pd.Index(range(self.sizes[key]), name=key, dtype=np.int64)
def _calc_assign_results(self, kwargs):
results = SortedKeysDict()
for k, v in kwargs.items():
if callable(v):
results[k] = v(self)
else:
results[k] = v
return results
def assign_coords(self, **kwargs):
"""Assign new coordinates to this object.
Returns a new object with all the original data in addition to the new
coordinates.
Parameters
----------
kwargs : keyword, value pairs
keywords are the variables names. If the values are callable, they
are computed on this object and assigned to new coordinate
variables. If the values are not callable, (e.g. a DataArray,
scalar, or array), they are simply assigned.
Returns
-------
assigned : same type as caller
A new object with the new coordinates in addition to the existing
data.
Examples
--------
Convert longitude coordinates from 0-359 to -180-179:
>>> da = xr.DataArray(np.random.rand(4),
... coords=[np.array([358, 359, 0, 1])],
... dims='lon')
>>> da
<xarray.DataArray (lon: 4)>
array([0.28298 , 0.667347, 0.657938, 0.177683])
Coordinates:
* lon (lon) int64 358 359 0 1
>>> da.assign_coords(lon=(((da.lon + 180) % 360) - 180))
<xarray.DataArray (lon: 4)>
array([0.28298 , 0.667347, 0.657938, 0.177683])
Coordinates:
* lon (lon) int64 -2 -1 0 1
Notes
-----
Since ``kwargs`` is a dictionary, the order of your arguments may not
be preserved, and so the order of the new variables is not well
defined. Assigning multiple variables within the same ``assign_coords``
is possible, but you cannot reference other variables created within
the same ``assign_coords`` call.
See also
--------
Dataset.assign
Dataset.swap_dims
"""
data = self.copy(deep=False)
results = self._calc_assign_results(kwargs)
data.coords.update(results)
return data
def assign_attrs(self, *args, **kwargs):
"""Assign new attrs to this object.
Returns a new object equivalent to self.attrs.update(*args, **kwargs).
Parameters
----------
args : positional arguments passed into ``attrs.update``.
kwargs : keyword arguments passed into ``attrs.update``.
Returns
-------
assigned : same type as caller
A new object with the new attrs in addition to the existing data.
See also
--------
Dataset.assign
"""
out = self.copy(deep=False)
out.attrs.update(*args, **kwargs)
return out
def pipe(self, func, *args, **kwargs):
"""
Apply func(self, *args, **kwargs)
This method replicates the pandas method of the same name.
Parameters
----------
func : function
function to apply to this xarray object (Dataset/DataArray).
``args``, and ``kwargs`` are passed into ``func``.
Alternatively a ``(callable, data_keyword)`` tuple where
``data_keyword`` is a string indicating the keyword of
``callable`` that expects the xarray object.
args : positional arguments passed into ``func``.
kwargs : a dictionary of keyword arguments passed into ``func``.
Returns
-------
object : the return type of ``func``.
Notes
-----
Use ``.pipe`` when chaining together functions that expect
xarray or pandas objects, e.g., instead of writing
>>> f(g(h(ds), arg1=a), arg2=b, arg3=c)
You can write
>>> (ds.pipe(h)
... .pipe(g, arg1=a)
... .pipe(f, arg2=b, arg3=c)
... )
If you have a function that takes the data as (say) the second
argument, pass a tuple indicating which keyword expects the
data. For example, suppose ``f`` takes its data as ``arg2``:
>>> (ds.pipe(h)
... .pipe(g, arg1=a)
... .pipe((f, 'arg2'), arg1=a, arg3=c)
... )
See Also
--------
pandas.DataFrame.pipe
"""
if isinstance(func, tuple):
func, target = func
if target in kwargs:
msg = ('%s is both the pipe target and a keyword argument'
% target)
raise ValueError(msg)
kwargs[target] = self
return func(*args, **kwargs)
else:
return func(self, *args, **kwargs)
def groupby(self, group, squeeze=True):
"""Returns a GroupBy object for performing grouped operations.
Parameters
----------
group : str, DataArray or IndexVariable
Array whose unique values should be used to group this array. If a
string, must be the name of a variable contained in this dataset.
squeeze : boolean, optional
If "group" is a dimension of any arrays in this dataset, `squeeze`
controls whether the subarrays have a dimension of length 1 along
that dimension or if the dimension is squeezed out.
Returns
-------
grouped : GroupBy
A `GroupBy` object patterned after `pandas.GroupBy` that can be
iterated over in the form of `(unique_value, grouped_array)` pairs.
Examples
--------
Calculate daily anomalies for daily data:
>>> da = xr.DataArray(np.linspace(0, 1826, num=1827),
... coords=[pd.date_range('1/1/2000', '31/12/2004',
... freq='D')],
... dims='time')
>>> da
<xarray.DataArray (time: 1827)>
array([0.000e+00, 1.000e+00, 2.000e+00, ..., 1.824e+03, 1.825e+03, 1.826e+03])
Coordinates:
* time (time) datetime64[ns] 2000-01-01 2000-01-02 2000-01-03 ...
>>> da.groupby('time.dayofyear') - da.groupby('time.dayofyear').mean('time')
<xarray.DataArray (time: 1827)>
array([-730.8, -730.8, -730.8, ..., 730.2, 730.2, 730.5])
Coordinates:
* time (time) datetime64[ns] 2000-01-01 2000-01-02 2000-01-03 ...
dayofyear (time) int64 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 ...
See Also
--------
core.groupby.DataArrayGroupBy
core.groupby.DatasetGroupBy
""" # noqa
return self._groupby_cls(self, group, squeeze=squeeze)
def groupby_bins(self, group, bins, right=True, labels=None, precision=3,
include_lowest=False, squeeze=True):
"""Returns a GroupBy object for performing grouped operations.
Rather than using all unique values of `group`, the values are discretized
first by applying `pandas.cut` [1]_ to `group`.
Parameters
----------
group : str, DataArray or IndexVariable
Array whose binned values should be used to group this array. If a
string, must be the name of a variable contained in this dataset.
bins : int or array of scalars
If bins is an int, it defines the number of equal-width bins in the
range of x. However, in this case, the range of x is extended by .1%
on each side to include the min or max values of x. If bins is a
sequence it defines the bin edges allowing for non-uniform bin
width. No extension of the range of x is done in this case.
right : boolean, optional
Indicates whether the bins include the rightmost edge or not. If
right == True (the default), then the bins [1,2,3,4] indicate
(1,2], (2,3], (3,4].
labels : array or boolean, default None
Used as labels for the resulting bins. Must be of the same length as
the resulting bins. If False, string bin labels are assigned by
`pandas.cut`.
precision : int
The precision at which to store and display the bins labels.
include_lowest : bool
Whether the first interval should be left-inclusive or not.
squeeze : boolean, optional
If "group" is a dimension of any arrays in this dataset, `squeeze`
controls whether the subarrays have a dimension of length 1 along
that dimension or if the dimension is squeezed out.
Returns
-------
grouped : GroupBy
A `GroupBy` object patterned after `pandas.GroupBy` that can be
iterated over in the form of `(unique_value, grouped_array)` pairs.
The name of the group has the added suffix `_bins` in order to
distinguish it from the original variable.
References
----------
.. [1] http://pandas.pydata.org/pandas-docs/stable/generated/pandas.cut.html
""" # noqa
return self._groupby_cls(self, group, squeeze=squeeze, bins=bins,
cut_kwargs={'right': right, 'labels': labels,
'precision': precision,
'include_lowest': include_lowest})
def rolling(self, dim=None, min_periods=None, center=False, **dim_kwargs):
"""
Rolling window object.
Parameters
----------
dim: dict, optional
Mapping from the dimension name to create the rolling iterator
along (e.g. `time`) to its moving window size.
min_periods : int, default None
Minimum number of observations in window required to have a value
(otherwise result is NA). The default, None, is equivalent to
setting min_periods equal to the size of the window.
center : boolean, default False
Set the labels at the center of the window.
**dim_kwargs : optional
The keyword arguments form of ``dim``.
One of dim or dim_kwargs must be provided.
Returns
-------
Rolling object (core.rolling.DataArrayRolling for DataArray,
core.rolling.DatasetRolling for Dataset.)
Examples
--------
Create rolling seasonal average of monthly data e.g. DJF, JFM, ..., SON:
>>> da = xr.DataArray(np.linspace(0, 11, num=12),
... coords=[pd.date_range('15/12/1999',
... periods=12, freq=pd.DateOffset(months=1))],
... dims='time')
>>> da
<xarray.DataArray (time: 12)>
array([ 0., 1., 2., 3., 4., 5., 6., 7., 8., 9., 10., 11.])
Coordinates:
* time (time) datetime64[ns] 1999-12-15 2000-01-15 2000-02-15 ...
>>> da.rolling(time=3, center=True).mean()
<xarray.DataArray (time: 12)>
array([nan, 1., 2., 3., 4., 5., 6., 7., 8., 9., 10., nan])
Coordinates:
* time (time) datetime64[ns] 1999-12-15 2000-01-15 2000-02-15 ...
Remove the NaNs using ``dropna()``:
>>> da.rolling(time=3, center=True).mean().dropna('time')
<xarray.DataArray (time: 10)>
array([ 1., 2., 3., 4., 5., 6., 7., 8., 9., 10.])
Coordinates:
* time (time) datetime64[ns] 2000-01-15 2000-02-15 2000-03-15 ...
See Also
--------
core.rolling.DataArrayRolling
core.rolling.DatasetRolling
""" # noqa
dim = either_dict_or_kwargs(dim, dim_kwargs, 'rolling')
return self._rolling_cls(self, dim, min_periods=min_periods,
center=center)
def resample(self, indexer=None, skipna=None, closed=None, label=None,
base=0, keep_attrs=None, loffset=None, **indexer_kwargs):
"""Returns a Resample object for performing resampling operations.
Handles both downsampling and upsampling. If any intervals contain no
values from the original object, they will be given the value ``NaN``.
Parameters
----------
indexer : {dim: freq}, optional
Mapping from the dimension name to resample frequency.
skipna : bool, optional
Whether to skip missing values when aggregating in downsampling.
closed : 'left' or 'right', optional
Side of each interval to treat as closed.
label : 'left or 'right', optional
Side of each interval to use for labeling.
base : int, optional
For frequencies that evenly subdivide 1 day, the "origin" of the
aggregated intervals. For example, for '24H' frequency, base could
range from 0 through 23.
loffset : timedelta or str, optional
Offset used to adjust the resampled time labels. Some pandas date
offset strings are supported.
keep_attrs : bool, optional
If True, the object's attributes (`attrs`) will be copied from
the original object to the new one. If False (default), the new
object will be returned without attributes.
**indexer_kwargs : {dim: freq}
The keyword arguments form of ``indexer``.
One of indexer or indexer_kwargs must be provided.
Returns
-------
resampled : same type as caller
This object resampled.
Examples
--------
Downsample monthly time-series data to seasonal data:
>>> da = xr.DataArray(np.linspace(0, 11, num=12),
... coords=[pd.date_range('15/12/1999',
... periods=12, freq=pd.DateOffset(months=1))],
... dims='time')
>>> da
<xarray.DataArray (time: 12)>
array([ 0., 1., 2., 3., 4., 5., 6., 7., 8., 9., 10., 11.])
Coordinates:
* time (time) datetime64[ns] 1999-12-15 2000-01-15 2000-02-15 ...
>>> da.resample(time="QS-DEC").mean()
<xarray.DataArray (time: 4)>
array([ 1., 4., 7., 10.])
Coordinates:
* time (time) datetime64[ns] 1999-12-01 2000-03-01 2000-06-01 2000-09-01
Upsample monthly time-series data to daily data:
>>> da.resample(time='1D').interpolate('linear')
<xarray.DataArray (time: 337)>
array([ 0. , 0.032258, 0.064516, ..., 10.935484, 10.967742, 11. ])
Coordinates:
* time (time) datetime64[ns] 1999-12-15 1999-12-16 1999-12-17 ...
References
----------
.. [1] http://pandas.pydata.org/pandas-docs/stable/timeseries.html#offset-aliases
""" # noqa
# TODO support non-string indexer after removing the old API.
from .dataarray import DataArray
from .resample import RESAMPLE_DIM
from ..coding.cftimeindex import CFTimeIndex
if keep_attrs is None:
keep_attrs = _get_keep_attrs(default=False)
# note: the second argument (now 'skipna') use to be 'dim'
if ((skipna is not None and not isinstance(skipna, bool))
or ('how' in indexer_kwargs and 'how' not in self.dims)
or ('dim' in indexer_kwargs and 'dim' not in self.dims)):
raise TypeError(
'resample() no longer supports the `how` or '
'`dim` arguments. Instead call methods on resample '
"objects, e.g., data.resample(time='1D').mean()")
indexer = either_dict_or_kwargs(indexer, indexer_kwargs, 'resample')
if len(indexer) != 1:
raise ValueError(
"Resampling only supported along single dimensions."
)
dim, freq = indexer.popitem()
dim_name = dim
dim_coord = self[dim]
if isinstance(self.indexes[dim_name], CFTimeIndex):
raise NotImplementedError(
'Resample is currently not supported along a dimension '
'indexed by a CFTimeIndex. For certain kinds of downsampling '
'it may be possible to work around this by converting your '
'time index to a DatetimeIndex using '
'CFTimeIndex.to_datetimeindex. Use caution when doing this '
'however, because switching to a DatetimeIndex from a '
'CFTimeIndex with a non-standard calendar entails a change '
'in the calendar type, which could lead to subtle and silent '
'errors.'
)
group = DataArray(dim_coord, coords=dim_coord.coords,
dims=dim_coord.dims, name=RESAMPLE_DIM)
# TODO: to_offset() call required for pandas==0.19.2
grouper = pd.Grouper(freq=freq, closed=closed, label=label, base=base,
loffset=pd.tseries.frequencies.to_offset(loffset))
resampler = self._resample_cls(self, group=group, dim=dim_name,
grouper=grouper,
resample_dim=RESAMPLE_DIM)
return resampler
def where(self, cond, other=dtypes.NA, drop=False):
"""Filter elements from this object according to a condition.
This operation follows the normal broadcasting and alignment rules that
xarray uses for binary arithmetic.
Parameters
----------
cond : DataArray or Dataset with boolean dtype
Locations at which to preserve this object's values.
other : scalar, DataArray or Dataset, optional
Value to use for locations in this object where ``cond`` is False.
By default, these locations filled with NA.
drop : boolean, optional
If True, coordinate labels that only correspond to False values of
the condition are dropped from the result. Mutually exclusive with
``other``.
Returns
-------
Same type as caller.
Examples
--------
>>> import numpy as np
>>> a = xr.DataArray(np.arange(25).reshape(5, 5), dims=('x', 'y'))
>>> a.where(a.x + a.y < 4)
<xarray.DataArray (x: 5, y: 5)>
array([[ 0., 1., 2., 3., nan],
[ 5., 6., 7., nan, nan],
[ 10., 11., nan, nan, nan],
[ 15., nan, nan, nan, nan],
[ nan, nan, nan, nan, nan]])
Dimensions without coordinates: x, y
>>> a.where(a.x + a.y < 5, -1)
<xarray.DataArray (x: 5, y: 5)>
array([[ 0, 1, 2, 3, 4],
[ 5, 6, 7, 8, -1],
[10, 11, 12, -1, -1],
[15, 16, -1, -1, -1],
[20, -1, -1, -1, -1]])
Dimensions without coordinates: x, y
>>> a.where(a.x + a.y < 4, drop=True)
<xarray.DataArray (x: 4, y: 4)>
array([[ 0., 1., 2., 3.],
[ 5., 6., 7., nan],
[ 10., 11., nan, nan],
[ 15., nan, nan, nan]])
Dimensions without coordinates: x, y
See also
--------
numpy.where : corresponding numpy function
where : equivalent function
"""
from .alignment import align
from .dataarray import DataArray
from .dataset import Dataset
if drop:
if other is not dtypes.NA:
raise ValueError('cannot set `other` if drop=True')
if not isinstance(cond, (Dataset, DataArray)):
raise TypeError("cond argument is %r but must be a %r or %r" %
(cond, Dataset, DataArray))
# align so we can use integer indexing
self, cond = align(self, cond)
# get cond with the minimal size needed for the Dataset
if isinstance(cond, Dataset):
clipcond = cond.to_array().any('variable')
else:
clipcond = cond
# clip the data corresponding to coordinate dims that are not used
nonzeros = zip(clipcond.dims, np.nonzero(clipcond.values))
indexers = {k: np.unique(v) for k, v in nonzeros}
self = self.isel(**indexers)
cond = cond.isel(**indexers)
return ops.where_method(self, cond, other)
def close(self):
"""Close any files linked to this object
"""
if self._file_obj is not None:
self._file_obj.close()
self._file_obj = None
def isin(self, test_elements):
"""Tests each value in the array for whether it is in the supplied list.
Parameters
----------
test_elements : array_like
The values against which to test each value of `element`.
This argument is flattened if an array or array_like.
See numpy notes for behavior with non-array-like parameters.
Returns
-------
isin : same as object, bool
Has the same shape as this object.
Examples
--------
>>> array = xr.DataArray([1, 2, 3], dims='x')
>>> array.isin([1, 3])
<xarray.DataArray (x: 3)>
array([ True, False, True])
Dimensions without coordinates: x
See also
--------
numpy.isin
"""
from .computation import apply_ufunc
from .dataset import Dataset
from .dataarray import DataArray
from .variable import Variable
if isinstance(test_elements, Dataset):
raise TypeError(
'isin() argument must be convertible to an array: {}'
.format(test_elements))
elif isinstance(test_elements, (Variable, DataArray)):
# need to explicitly pull out data to support dask arrays as the
# second argument
test_elements = test_elements.data
return apply_ufunc(
duck_array_ops.isin,
self,
kwargs=dict(test_elements=test_elements),
dask='allowed',
)
def __enter__(self):
return self
def __exit__(self, exc_type, exc_value, traceback):
self.close()
def full_like(other, fill_value, dtype=None):
"""Return a new object with the same shape and type as a given object.
Parameters
----------
other : DataArray, Dataset, or Variable
The reference object in input
fill_value : scalar
Value to fill the new object with before returning it.
dtype : dtype, optional
dtype of the new array. If omitted, it defaults to other.dtype.
Returns
-------
out : same as object
New object with the same shape and type as other, with the data
filled with fill_value. Coords will be copied from other.
If other is based on dask, the new one will be as well, and will be
split in the same chunks.
"""
from .dataarray import DataArray
from .dataset import Dataset
from .variable import Variable
if isinstance(other, Dataset):
data_vars = OrderedDict(
(k, _full_like_variable(v, fill_value, dtype))
for k, v in other.data_vars.items())
return Dataset(data_vars, coords=other.coords, attrs=other.attrs)
elif isinstance(other, DataArray):
return DataArray(
_full_like_variable(other.variable, fill_value, dtype),
dims=other.dims, coords=other.coords, attrs=other.attrs,
name=other.name)
elif isinstance(other, Variable):
return _full_like_variable(other, fill_value, dtype)
else:
raise TypeError("Expected DataArray, Dataset, or Variable")
def _full_like_variable(other, fill_value, dtype=None):
"""Inner function of full_like, where other must be a variable
"""
from .variable import Variable
if isinstance(other.data, dask_array_type):
import dask.array
if dtype is None:
dtype = other.dtype
data = dask.array.full(other.shape, fill_value, dtype=dtype,
chunks=other.data.chunks)
else:
data = np.full_like(other, fill_value, dtype=dtype)
return Variable(dims=other.dims, data=data, attrs=other.attrs)
def zeros_like(other, dtype=None):
"""Shorthand for full_like(other, 0, dtype)
"""
return full_like(other, 0, dtype)
def ones_like(other, dtype=None):
"""Shorthand for full_like(other, 1, dtype)
"""
return full_like(other, 1, dtype)
def is_np_datetime_like(dtype):
"""Check if a dtype is a subclass of the numpy datetime types
"""
return (np.issubdtype(dtype, np.datetime64) or
np.issubdtype(dtype, np.timedelta64))
def contains_cftime_datetimes(var):
"""Check if a variable contains cftime datetime objects"""
try:
from cftime import datetime as cftime_datetime
except ImportError:
return False
else:
if var.dtype == np.dtype('O') and var.data.size > 0:
sample = var.data.ravel()[0]
if isinstance(sample, dask_array_type):
sample = sample.compute()
if isinstance(sample, np.ndarray):
sample = sample.item()
return isinstance(sample, cftime_datetime)
else:
return False
def _contains_datetime_like_objects(var):
"""Check if a variable contains datetime like objects (either
np.datetime64, np.timedelta64, or cftime.datetime)"""
return is_np_datetime_like(var.dtype) or contains_cftime_datetimes(var)
| [
"[email protected]"
]
| |
0fee4123dd316b974c3fdd92e1ace45e6046c0e7 | 1f40a08ee85ef6f78384e6f6f53bcf3f86b8c44b | /shorten/app/views.py | fec1ecdf840fbfdd7d0588f916a668b2701fdb4d | []
| no_license | infsolution/EncurtUrl | bff4543fb17f3c2a6853c64abc24d307abcd04bf | 0f6d8aa23a2498a8bf5575797db9a5a8eb855403 | refs/heads/master | 2020-05-14T09:31:39.265337 | 2019-09-28T17:44:25 | 2019-09-28T17:44:25 | 181,741,563 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,523 | py | from django.contrib.auth import authenticate, login, logout
from django.contrib.auth.decorators import login_required
from django.core.paginator import Paginator, InvalidPage
from django.shortcuts import render, redirect
from django.http import JsonResponse
from rest_framework import generics
from .models import *
from .forms import *
def index(request):
perfil_logado = get_perfil_logado(request)
return render(request,'app/index.html',{"title_page":"O melhor encurtador","perfil_logado":perfil_logado})
def get_perfil_logado(request):
try:
perfil = Perfil.objects.get(user=request.user)
except Exception as e:
return None
return perfil
def shorten(request):
if request.GET.get('url'):
short = Shortened(perfil=get_perfil_logado(request), url_user=request.GET.get('url'))
short.shorten()
if request.GET.getlist('private'):
short.get_private_code()
if request.GET.getlist('preview'):
short.preview=True
short.preview_message = request.GET.get('preview_msg')
short.save()
return render(request, 'app/showurl.html',{"url_short":short.url_shortened,"perfil_logado":get_perfil_logado(request),
"title_page":"TShort: Sua url encurtada"})
return render(request,'app/urlnotfound.html', {"value":"Nenhuma url foi informada",
"title_page":"Url Não encontrada","perfil_logado":get_perfil_logado(request)})
@login_required
def shotened_report(request):
ITEMS_PER_PAGE = 5
perfil_logado = get_perfil_logado(request)
shorteneds = Shortened.objects.filter(perfil=perfil_logado)
paginator = Paginator(shorteneds, ITEMS_PER_PAGE)
page = request.GET.get('page',1)
try:
short_page = paginator.get_page(page)
except InvalidPage:
short_page = paginator.get_page(1)
return render(request, 'app/report.html',{"shorteneds":short_page,"perfil_logado":perfil_logado})
@login_required
def detail(request, shortened_id):
shorten = Shortened.objects.get(id=shortened_id)
return render(request, 'app/report_detail.html', {'shorten':shorten, 'perfil_logado':get_perfil_logado(request)})
def go_to_url(request, shortened):
if request.method == 'GET':
try:
short = Shortened.objects.get(url_shortened=shortened)
get_click(request,short)
except Exception as e:
return render(request,'app/urlnotfound.html', {"value":shortened,"error":e, "title_page":"Url Não encontrada"})
if short.private_code != None:
return render(request, 'app/private_access.html',{"short":short})
if short.preview:
return render(request, 'app/preview.html',{'short':short, 'perfil_logado':get_perfil_logado(request)})
return redirect(short.url_user)
def create_user(request):
if request.method == 'POST':
form = UserModelForm(request.POST)
if form.is_valid():
if request.POST['last-password'] == request.POST['password']:
user = User.objects.create_user(request.POST['username'], request.POST['email'], request.POST['last-password'])#validar se as senhas são igauis
perfil = Perfil(name=user.username, user=user)
perfil.save()
return render(request, 'app/add.html', {'form':UserModelForm(), 'alert_type':'success', 'msg_confirm':'Parabéns seu cadastro foi realizado.'})
else:
return render(request, 'app/add.html', {'form':UserModelForm(),'alert_type':'danger' , 'msg_confirm':'As senhas não são iguais'})
return render(request, 'app/add.html',{'form':UserModelForm(request.POST), 'alert_type':'danger','msg_confirm':'Ocorreu um erro ao realizar o cadastro.'})
form = UserModelForm()
return render(request, 'app/add.html', {"form":form})
'''def do_login(request):
if request.method == 'POST':
user = authenticate(username = request.POST['username'], password = request.POST['password'])
if user is not None:
login(request,user)
#return redirect('/app/'+str(user.id), user)
return redirect('index')
return render(request,'app/login.html' ,{"error_msg":"Usuário ou senha Invalidos"})
return render(request, 'app/login.html')'''
def do_logout(request):
logout(request)
return redirect('/login/')
def access_private(request):
if request.method == 'POST':
short = Shortened.objects.get(url_shortened=request.POST['url_shortened'])
if request.POST.get('private_code') == short.private_code:
return redirect(short.url_user)
return render(request, 'app/private_access.html',{"short":short, "error_msg":"Código inválido"})
@login_required
def get_contatos(request):
return render(request, 'app/contatos.html', {"perfil_logado":get_perfil_logado(request)})
def request_access(request, codeurl):
if request.method == 'POST':
short = Shortened.objects.get(url_shortened=codeurl)
if send_message(short):
return render(request,'app/request_access.html',{"code":codeurl,"msg":"Sua solicitação foi enviada. Aquarde contato."})
return render(request,'app/request_access.html',{"code":codeurl})
def send_message(short):
return True
def get_click(request, shortened):
shor = Click(shortened=shortened)
print(shor.save())
def about(request):
context = {}
if get_perfil_logado(request):
context = {"perfil_logado":get_perfil_logado(request)}
return render(request, 'app/about.html',context)
def help(request):
context = {}
if get_perfil_logado(request):
context = {"perfil_logado":get_perfil_logado(request)}
return render(request, 'app/help.html',context)
def personalize(request, shortened_id):
pass
def valid(request, url):
rersult = None
try:
url = Shortened.objects.get(url_shortened=url)
rersult = True
except Exception as e:
rersult = False
return JsonResponse({'result':result})
#API#
| [
"[email protected]"
]
| |
ce8203a37a0d73246f63399116e942a387aa6b19 | 38eb57300418e6f10433630437388f779ce50e09 | /rbac_permission/rbac/servers/permission.py | 4fc6af516966b9eb74fc2a0ed9e12b36cfe54973 | []
| no_license | SelfShadows/Django-Flask | f37839f763133f0d62bffad3128171c426a1c038 | 13e32d1c8aac1532b43323e1891c423fe78f2813 | refs/heads/master | 2021-01-04T12:31:18.018508 | 2020-02-14T16:29:27 | 2020-02-14T16:29:27 | 240,550,991 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,633 | py |
def init_session(request, user):
# 在session中注册用户 ID
request.session["user_id"] = user.pk
# 方法1
# # 查询当前用户登陆的所有权限 distinct(去重)
# permission = user.roles.all().values("permissions__url").distinct()
# permission_list = []
# for item in permission:
# permission_list.append(item["permissions__url"])
# print(permission_list)
# request.session["permission_list"] = permission_list
# 方法2
permissions = user.roles.all().values("permissions__url", "permissions__action", "permissions__group_id")
permissions_dict = {}
for item in permissions:
group_id = item["permissions__group_id"]
# 键不在字典里
if group_id not in permissions_dict:
permissions_dict[group_id] = {
"urls": [item["permissions__url"]],
"actions": [item["permissions__action"]],
}
# 键在字典里
else:
permissions_dict[group_id]["urls"].append(item["permissions__url"])
permissions_dict[group_id]["actions"].append(item["permissions__action"])
print(permissions_dict)
request.session["permissions_dict"] = permissions_dict
ret = user.roles.all().values("permissions__url", "permissions__action", "permissions__group__name",)
print("ret:", ret)
menu_permission_list = []
for item in ret:
if item["permissions__action"] == "list":
menu_permission_list.append((item["permissions__url"], item["permissions__group__name"]))
request.session["menu_permission_list"] = menu_permission_list
| [
"[email protected]"
]
| |
7609d8654867171cc043ee30d5b4edc4ba5d48f2 | ed8db15dad4236ada32c0355e032dc996266a271 | /Advance_Python/8. Inheritance/4. ConstructorOverriding.py | 207ba7b1ce97664ee242397920676842b2750dc9 | []
| no_license | mukund1985/Python-Tutotrial | a01e0c3ea77690c23c6f30ba1a157c450e5a53ed | bfcf0c81029ce2bee4aa855d90661df25cc94ef9 | refs/heads/master | 2021-05-21T15:41:18.018660 | 2020-11-04T02:20:30 | 2020-11-04T02:20:30 | 309,857,690 | 1 | 0 | null | 2020-11-04T02:14:37 | 2020-11-04T02:11:38 | Python | UTF-8 | Python | false | false | 468 | py | # Constructor Overriding
class Father: # Parent Class
def __init__(self):
self.money = 1000
print("Father Class Constructor")
def show(self):
print("Father Class Instance Method")
class Son(Father): # Child Class
def __init__(self):
self.money = 5000
self.car = 'BMW'
print("Son Class Constructor")
def disp(self):
print("Son Class Instance Method")
s = Son()
print(s.money)
print(s.car)
s.disp()
s.show()
| [
"[email protected]"
]
| |
bacf596e1202013a98cc40f7d2940d69b8a2e216 | afa0d5a97925273f7fb0befef697d36020df5787 | /packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1_generated_alloy_db_admin_get_cluster_sync.py | eb296f332c1d78fa23a91559a1674b1100657a4a | [
"Apache-2.0"
]
| permissive | scooter4j/google-cloud-python | dc7ae1ba6a33a62a40b617b806ec8ed723046b8b | 36b1cf08092d5c07c5971bb46edda7a9928166b1 | refs/heads/master | 2023-04-14T18:36:48.643436 | 2023-04-06T13:19:26 | 2023-04-06T13:19:26 | 188,338,673 | 0 | 0 | null | 2019-05-24T02:27:15 | 2019-05-24T02:27:14 | null | UTF-8 | Python | false | false | 1,805 | py | # -*- coding: utf-8 -*-
# Copyright 2022 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Generated code. DO NOT EDIT!
#
# Snippet for GetCluster
# NOTE: This snippet has been automatically generated for illustrative purposes only.
# It may require modifications to work in your environment.
# To install the latest published package dependency, execute the following:
# python3 -m pip install google-cloud-alloydb
# [START alloydb_v1_generated_AlloyDBAdmin_GetCluster_sync]
# This snippet has been automatically generated and should be regarded as a
# code template only.
# It will require modifications to work:
# - It may require correct/in-range values for request initialization.
# - It may require specifying regional endpoints when creating the service
# client as shown in:
# https://googleapis.dev/python/google-api-core/latest/client_options.html
from google.cloud import alloydb_v1
def sample_get_cluster():
# Create a client
client = alloydb_v1.AlloyDBAdminClient()
# Initialize request argument(s)
request = alloydb_v1.GetClusterRequest(
name="name_value",
)
# Make the request
response = client.get_cluster(request=request)
# Handle the response
print(response)
# [END alloydb_v1_generated_AlloyDBAdmin_GetCluster_sync]
| [
"[email protected]"
]
| |
ba83ffc60ad253aed46ec0172ef01d949a01742e | 57ddab24ba7860f8878c689f9fa22b0779d60157 | /categorias/iniciante/uri1051.py | cb688fa9eb809d54eefeb058ecb54ada5c421f65 | []
| no_license | matheusfelipeog/uri-judge | ba1d32e50ad7239b331ad0e1181a1bffc6e61b41 | 0232be52da78fd67261c6d6a74eff3267d423afd | refs/heads/master | 2021-07-03T02:32:13.395829 | 2021-01-29T18:32:35 | 2021-01-29T18:32:35 | 215,845,427 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 488 | py | # -*- coding: utf-8 -*-
salario = float(input())
if 0.0 <= salario <= 2000.00:
print('Isento')
elif 2000.01 <= salario <= 3000.00:
imposto = ((salario - 2000) * 0.08)
print('R$ {:.2f}'.format(imposto))
elif 3000.01 <= salario <= 4500.00:
imposto = (1000 * 0.08) + ((salario - 3000) * 0.18)
print('R$ {:.2f}'.format(imposto))
elif salario > 4500.00:
imposto = (1000 * 0.08) + (1500 * 0.18) + ((salario - 4500) * 0.28)
print('R$ {:.2f}'.format(imposto))
| [
"[email protected]"
]
| |
9aaa5c64aad7c4b8086e9c0f5c5b5cf18c161a9d | 06a7dc7cc93d019e4a9cbcf672b23a0bbacf8e8b | /2016_schizConnect/supervised_analysis/NMorphCH/VBM/30yo_scripts/03_svm_NMorphCH.py | ddf0b8d658716ee3e6a5800a6a9e9825811f7e0e | []
| no_license | neurospin/scripts | 6c06cd218a5f32de9c3c2b7d1d8bda3f3d107458 | f14a2c9cf2cd7f5fbea767b017c3faf36d170bdb | refs/heads/master | 2021-07-11T22:55:46.567791 | 2021-07-02T13:08:02 | 2021-07-02T13:08:02 | 10,549,286 | 2 | 2 | null | null | null | null | UTF-8 | Python | false | false | 10,690 | py | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Wed Feb 22 09:44:24 2017
@author: ad247405
"""
import os
import json
import numpy as np
from sklearn.cross_validation import StratifiedKFold
from sklearn.metrics import precision_recall_fscore_support
from scipy.stats import binom_test
from collections import OrderedDict
from sklearn import preprocessing
from sklearn.metrics import roc_auc_score
from sklearn import svm
import pandas as pd
import shutil
WD = '/neurospin/brainomics/2016_schizConnect/analysis/NMorphCH/VBM/results_30yo/svm/svm_NMorphCH_30yo'
def config_filename(): return os.path.join(WD,"config_dCV.json")
def results_filename(): return os.path.join(WD,"results_dCV.xlsx")
#############################################################################
def load_globals(config):
import mapreduce as GLOBAL # access to global variables
GLOBAL.DATA = GLOBAL.load_data(config["data"])
def resample(config, resample_nb):
import mapreduce as GLOBAL # access to global variables
GLOBAL.DATA = GLOBAL.load_data(config["data"])
resample = config["resample"][resample_nb]
GLOBAL.DATA_RESAMPLED = {k: [GLOBAL.DATA[k][idx, ...] for idx in resample]
for k in GLOBAL.DATA}
def mapper(key, output_collector):
import mapreduce as GLOBAL
Xtr = GLOBAL.DATA_RESAMPLED["X"][0]
Xte = GLOBAL.DATA_RESAMPLED["X"][1]
ytr = GLOBAL.DATA_RESAMPLED["y"][0]
yte = GLOBAL.DATA_RESAMPLED["y"][1]
c = float(key[0])
print("c:%f" % (c))
class_weight='balanced' # unbiased
mask = np.ones(Xtr.shape[0], dtype=bool)
scaler = preprocessing.StandardScaler().fit(Xtr)
Xtr = scaler.transform(Xtr)
Xte=scaler.transform(Xte)
mod = svm.LinearSVC(C=c,fit_intercept=False,class_weight= class_weight)
mod.fit(Xtr, ytr.ravel())
y_pred = mod.predict(Xte)
y_proba_pred = mod.decision_function(Xte)
ret = dict(y_pred=y_pred, y_true=yte,prob_pred = y_proba_pred, beta=mod.coef_, mask=mask)
if output_collector:
output_collector.collect(key, ret)
else:
return ret
def scores(key, paths, config):
import mapreduce
print (key)
values = [mapreduce.OutputCollector(p) for p in paths]
values = [item.load() for item in values]
y_true = [item["y_true"].ravel() for item in values]
y_pred = [item["y_pred"].ravel() for item in values]
y_true = np.concatenate(y_true)
y_pred = np.concatenate(y_pred)
prob_pred = [item["prob_pred"].ravel() for item in values]
prob_pred = np.concatenate(prob_pred)
p, r, f, s = precision_recall_fscore_support(y_true, y_pred, average=None)
auc = roc_auc_score(y_true, prob_pred) #area under curve score.
#betas = np.hstack([item["beta"] for item in values]).T
# threshold betas to compute fleiss_kappa and DICE
#betas_t = np.vstack([array_utils.arr_threshold_from_norm2_ratio(betas[i, :], .99)[0] for i in range(betas.shape[0])])
#Compute pvalue
success = r * s
success = success.astype('int')
prob_class1 = np.count_nonzero(y_true) / float(len(y_true))
pvalue_recall0_true_prob = binom_test(success[0], s[0], 1 - prob_class1,alternative = 'greater')
pvalue_recall1_true_prob = binom_test(success[1], s[1], prob_class1,alternative = 'greater')
pvalue_recall0_unknwon_prob = binom_test(success[0], s[0], 0.5,alternative = 'greater')
pvalue_recall1_unknown_prob = binom_test(success[1], s[1], 0.5,alternative = 'greater')
pvalue_recall_mean = binom_test(success[0]+success[1], s[0] + s[1], p=0.5,alternative = 'greater')
scores = OrderedDict()
try:
a, l1, l2 , tv = [float(par) for par in key.split("_")]
scores['a'] = a
scores['l1'] = l1
scores['l2'] = l2
scores['tv'] = tv
left = float(1 - tv)
if left == 0: left = 1.
scores['l1_ratio'] = float(l1) / left
except:
pass
scores['recall_0'] = r[0]
scores['recall_1'] = r[1]
scores['recall_mean'] = r.mean()
scores["auc"] = auc
scores['pvalue_recall0_true_prob_one_sided'] = pvalue_recall0_true_prob
scores['pvalue_recall1_true_prob_one_sided'] = pvalue_recall1_true_prob
scores['pvalue_recall0_unknwon_prob_one_sided'] = pvalue_recall0_unknwon_prob
scores['pvalue_recall1_unknown_prob_one_sided'] = pvalue_recall1_unknown_prob
scores['pvalue_recall_mean'] = pvalue_recall_mean
#scores['prop_non_zeros_mean'] = float(np.count_nonzero(betas_t)) / \
# float(np.prod(betas.shape))
scores['param_key'] = key
return scores
def reducer(key, values):
import os, glob, pandas as pd
os.chdir(os.path.dirname(config_filename()))
config = json.load(open(config_filename()))
paths = glob.glob(os.path.join(config['map_output'], "*", "*", "*"))
#paths = [p for p in paths if not p.count("0.8_-1")]
def close(vec, val, tol=1e-4):
return np.abs(vec - val) < tol
def groupby_paths(paths, pos):
groups = {g:[] for g in set([p.split("/")[pos] for p in paths])}
for p in paths:
groups[p.split("/")[pos]].append(p)
return groups
def argmaxscore_bygroup(data, groupby='fold', param_key="param_key", score="recall_mean"):
arg_max_byfold = list()
for fold, data_fold in data.groupby(groupby):
assert len(data_fold) == len(set(data_fold[param_key])) # ensure all param are diff
arg_max_byfold.append([fold, data_fold.ix[data_fold[score].argmax()][param_key], data_fold[score].max()])
return pd.DataFrame(arg_max_byfold, columns=[groupby, param_key, score])
print('## Refit scores')
print('## ------------')
byparams = groupby_paths([p for p in paths if p.count("all") and not p.count("all/all")],3)
byparams_scores = {k:scores(k, v, config) for k, v in byparams.items()}
data = [list(byparams_scores[k].values()) for k in byparams_scores]
columns = list(byparams_scores[list(byparams_scores.keys())[0]].keys())
scores_refit = pd.DataFrame(data, columns=columns)
print('## doublecv scores by outer-cv and by params')
print('## -----------------------------------------')
data = list()
bycv = groupby_paths([p for p in paths if p.count("cvnested")],1)
for fold, paths_fold in bycv.items():
print(fold)
byparams = groupby_paths([p for p in paths_fold], 3)
byparams_scores = {k:scores(k, v, config) for k, v in byparams.items()}
data += [[fold] + list(byparams_scores[k].values()) for k in byparams_scores]
scores_dcv_byparams = pd.DataFrame(data, columns=["fold"] + columns)
print('## Model selection')
print('## ---------------')
svm = argmaxscore_bygroup(scores_dcv_byparams); svm["method"] = "svm"
scores_argmax_byfold = svm
print('## Apply best model on refited')
print('## ---------------------------')
scores_svm = scores("nestedcv", [os.path.join(config['map_output'], row["fold"], "all", row["param_key"]) for index, row in svm.iterrows()], config)
scores_cv = pd.DataFrame([["svm"] + list(scores_svm.values())], columns=["method"] + list(scores_svm.keys()))
with pd.ExcelWriter(results_filename()) as writer:
scores_refit.to_excel(writer, sheet_name='cv_by_param', index=False)
scores_dcv_byparams.to_excel(writer, sheet_name='cv_cv_byparam', index=False)
scores_argmax_byfold.to_excel(writer, sheet_name='cv_argmax', index=False)
scores_cv.to_excel(writer, sheet_name='dcv', index=False)
##############################################################################
if __name__ == "__main__":
WD = '/neurospin/brainomics/2016_schizConnect/analysis/NMorphCH/VBM/results_30yo/svm/svm_NMorphCH_30yo'
INPUT_DATA_X = '/neurospin/brainomics/2016_schizConnect/analysis/NMorphCH/VBM/data/data_30yo/X.npy'
INPUT_DATA_y = '/neurospin/brainomics/2016_schizConnect/analysis/NMorphCH/VBM/data/data_30yo/y.npy'
INPUT_MASK_PATH = '/neurospin/brainomics/2016_schizConnect/analysis/NMorphCH/VBM/data/data_30yo/mask.nii'
INPUT_CSV = '/neurospin/brainomics/2016_schizConnect/analysis/NMorphCH/VBM/population_30yo.csv'
pop = pd.read_csv(INPUT_CSV,delimiter=' ')
number_subjects = pop.shape[0]
NFOLDS_OUTER = 5
NFOLDS_INNER = 5
shutil.copy(INPUT_DATA_X, WD)
shutil.copy(INPUT_DATA_y, WD)
shutil.copy(INPUT_MASK_PATH, WD)
#############################################################################
## Create config file
y = np.load(INPUT_DATA_y)
cv_outer = [[tr, te] for tr,te in StratifiedKFold(y.ravel(), n_folds=NFOLDS_OUTER, random_state=42)]
if cv_outer[0] is not None: # Make sure first fold is None
cv_outer.insert(0, None)
null_resampling = list(); null_resampling.append(np.arange(0,len(y))),null_resampling.append(np.arange(0,len(y)))
cv_outer[0] = null_resampling
import collections
cv = collections.OrderedDict()
for cv_outer_i, (tr_val, te) in enumerate(cv_outer):
if cv_outer_i == 0:
cv["all/all"] = [tr_val, te]
else:
cv["cv%02d/all" % (cv_outer_i -1)] = [tr_val, te]
cv_inner = StratifiedKFold(y[tr_val].ravel(), n_folds=NFOLDS_INNER, random_state=42)
for cv_inner_i, (tr, val) in enumerate(cv_inner):
cv["cv%02d/cvnested%02d" % ((cv_outer_i-1), cv_inner_i)] = [tr_val[tr], tr_val[val]]
for k in cv:
cv[k] = [cv[k][0].tolist(), cv[k][1].tolist()]
C_range = [[100],[10],[1],[1e-1],[1e-2],[1e-3],[1e-4],[1e-5],[1e-6],[1e-7],[1e-8],[1e-9]]
user_func_filename = "/home/ad247405/git/scripts/2016_schizConnect/supervised_analysis/NMorphCH/VBM/30yo_scripts/03_svm_NMorphCH.py"
config = dict(data=dict(X="X.npy", y="y.npy"),
params=C_range, resample=cv,
structure="mask.nii",
map_output="model_selectionCV",
user_func=user_func_filename,
reduce_input="results/*/*",
reduce_group_by="params",
reduce_output="model_selectionCV.csv")
json.dump(config, open(os.path.join(WD, "config_dCV.json"), "w"))
# Build utils files: sync (push/pull) and PBS
import brainomics.cluster_gabriel as clust_utils
sync_push_filename, sync_pull_filename, WD_CLUSTER = \
clust_utils.gabriel_make_sync_data_files(WD)
cmd = "mapreduce.py --map %s/config_dCV.json" % WD_CLUSTER
clust_utils.gabriel_make_qsub_job_files(WD, cmd,walltime = "250:00:00") | [
"[email protected]"
]
| |
ae2a4491e45e20f804e4e6339f271af09b072786 | 931a3304ea280d0a160acb87e770d353368d7d7d | /vendor/swagger_client/models/get_characters_character_id_attributes_ok.py | b7705fa3e23be56e5043bffcb69cf65b385f96b8 | []
| no_license | LukeS5310/Broadsword | c44786054e1911a96b02bf46fe4bdd0f5ad02f19 | 3ba53d446b382c79253dd3f92c397cca17623155 | refs/heads/master | 2021-09-08T00:05:26.296092 | 2017-10-24T07:01:48 | 2017-10-24T07:01:48 | 105,143,152 | 0 | 1 | null | 2017-11-03T14:29:38 | 2017-09-28T12:03:19 | Python | UTF-8 | Python | false | false | 9,633 | py | # coding: utf-8
"""
EVE Swagger Interface
An OpenAPI for EVE Online
OpenAPI spec version: 0.6.2
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from pprint import pformat
from six import iteritems
import re
class GetCharactersCharacterIdAttributesOk(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
def __init__(self, accrued_remap_cooldown_date=None, bonus_remaps=None, charisma=None, intelligence=None, last_remap_date=None, memory=None, perception=None, willpower=None):
"""
GetCharactersCharacterIdAttributesOk - a model defined in Swagger
:param dict swaggerTypes: The key is attribute name
and the value is attribute type.
:param dict attributeMap: The key is attribute name
and the value is json key in definition.
"""
self.swagger_types = {
'accrued_remap_cooldown_date': 'datetime',
'bonus_remaps': 'int',
'charisma': 'int',
'intelligence': 'int',
'last_remap_date': 'datetime',
'memory': 'int',
'perception': 'int',
'willpower': 'int'
}
self.attribute_map = {
'accrued_remap_cooldown_date': 'accrued_remap_cooldown_date',
'bonus_remaps': 'bonus_remaps',
'charisma': 'charisma',
'intelligence': 'intelligence',
'last_remap_date': 'last_remap_date',
'memory': 'memory',
'perception': 'perception',
'willpower': 'willpower'
}
self._accrued_remap_cooldown_date = accrued_remap_cooldown_date
self._bonus_remaps = bonus_remaps
self._charisma = charisma
self._intelligence = intelligence
self._last_remap_date = last_remap_date
self._memory = memory
self._perception = perception
self._willpower = willpower
@property
def accrued_remap_cooldown_date(self):
"""
Gets the accrued_remap_cooldown_date of this GetCharactersCharacterIdAttributesOk.
Neural remapping cooldown after a character uses remap accrued over time
:return: The accrued_remap_cooldown_date of this GetCharactersCharacterIdAttributesOk.
:rtype: datetime
"""
return self._accrued_remap_cooldown_date
@accrued_remap_cooldown_date.setter
def accrued_remap_cooldown_date(self, accrued_remap_cooldown_date):
"""
Sets the accrued_remap_cooldown_date of this GetCharactersCharacterIdAttributesOk.
Neural remapping cooldown after a character uses remap accrued over time
:param accrued_remap_cooldown_date: The accrued_remap_cooldown_date of this GetCharactersCharacterIdAttributesOk.
:type: datetime
"""
self._accrued_remap_cooldown_date = accrued_remap_cooldown_date
@property
def bonus_remaps(self):
"""
Gets the bonus_remaps of this GetCharactersCharacterIdAttributesOk.
Number of available bonus character neural remaps
:return: The bonus_remaps of this GetCharactersCharacterIdAttributesOk.
:rtype: int
"""
return self._bonus_remaps
@bonus_remaps.setter
def bonus_remaps(self, bonus_remaps):
"""
Sets the bonus_remaps of this GetCharactersCharacterIdAttributesOk.
Number of available bonus character neural remaps
:param bonus_remaps: The bonus_remaps of this GetCharactersCharacterIdAttributesOk.
:type: int
"""
self._bonus_remaps = bonus_remaps
@property
def charisma(self):
"""
Gets the charisma of this GetCharactersCharacterIdAttributesOk.
charisma integer
:return: The charisma of this GetCharactersCharacterIdAttributesOk.
:rtype: int
"""
return self._charisma
@charisma.setter
def charisma(self, charisma):
"""
Sets the charisma of this GetCharactersCharacterIdAttributesOk.
charisma integer
:param charisma: The charisma of this GetCharactersCharacterIdAttributesOk.
:type: int
"""
if charisma is None:
raise ValueError("Invalid value for `charisma`, must not be `None`")
self._charisma = charisma
@property
def intelligence(self):
"""
Gets the intelligence of this GetCharactersCharacterIdAttributesOk.
intelligence integer
:return: The intelligence of this GetCharactersCharacterIdAttributesOk.
:rtype: int
"""
return self._intelligence
@intelligence.setter
def intelligence(self, intelligence):
"""
Sets the intelligence of this GetCharactersCharacterIdAttributesOk.
intelligence integer
:param intelligence: The intelligence of this GetCharactersCharacterIdAttributesOk.
:type: int
"""
if intelligence is None:
raise ValueError("Invalid value for `intelligence`, must not be `None`")
self._intelligence = intelligence
@property
def last_remap_date(self):
"""
Gets the last_remap_date of this GetCharactersCharacterIdAttributesOk.
Datetime of last neural remap, including usage of bonus remaps
:return: The last_remap_date of this GetCharactersCharacterIdAttributesOk.
:rtype: datetime
"""
return self._last_remap_date
@last_remap_date.setter
def last_remap_date(self, last_remap_date):
"""
Sets the last_remap_date of this GetCharactersCharacterIdAttributesOk.
Datetime of last neural remap, including usage of bonus remaps
:param last_remap_date: The last_remap_date of this GetCharactersCharacterIdAttributesOk.
:type: datetime
"""
self._last_remap_date = last_remap_date
@property
def memory(self):
"""
Gets the memory of this GetCharactersCharacterIdAttributesOk.
memory integer
:return: The memory of this GetCharactersCharacterIdAttributesOk.
:rtype: int
"""
return self._memory
@memory.setter
def memory(self, memory):
"""
Sets the memory of this GetCharactersCharacterIdAttributesOk.
memory integer
:param memory: The memory of this GetCharactersCharacterIdAttributesOk.
:type: int
"""
if memory is None:
raise ValueError("Invalid value for `memory`, must not be `None`")
self._memory = memory
@property
def perception(self):
"""
Gets the perception of this GetCharactersCharacterIdAttributesOk.
perception integer
:return: The perception of this GetCharactersCharacterIdAttributesOk.
:rtype: int
"""
return self._perception
@perception.setter
def perception(self, perception):
"""
Sets the perception of this GetCharactersCharacterIdAttributesOk.
perception integer
:param perception: The perception of this GetCharactersCharacterIdAttributesOk.
:type: int
"""
if perception is None:
raise ValueError("Invalid value for `perception`, must not be `None`")
self._perception = perception
@property
def willpower(self):
"""
Gets the willpower of this GetCharactersCharacterIdAttributesOk.
willpower integer
:return: The willpower of this GetCharactersCharacterIdAttributesOk.
:rtype: int
"""
return self._willpower
@willpower.setter
def willpower(self, willpower):
"""
Sets the willpower of this GetCharactersCharacterIdAttributesOk.
willpower integer
:param willpower: The willpower of this GetCharactersCharacterIdAttributesOk.
:type: int
"""
if willpower is None:
raise ValueError("Invalid value for `willpower`, must not be `None`")
self._willpower = willpower
def to_dict(self):
"""
Returns the model properties as a dict
"""
result = {}
for attr, _ in iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""
Returns the string representation of the model
"""
return pformat(self.to_dict())
def __repr__(self):
"""
For `print` and `pprint`
"""
return self.to_str()
def __eq__(self, other):
"""
Returns true if both objects are equal
"""
if not isinstance(other, GetCharactersCharacterIdAttributesOk):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""
Returns true if both objects are not equal
"""
return not self == other
| [
"[email protected]"
]
| |
dba98931ab1055fbc8aa7f09f7f007a014124723 | 687928e5bc8d5cf68d543005bb24c862460edcfc | /nssrc/com/citrix/netscaler/nitro/resource/config/lb/lbvserver_dospolicy_binding.py | 465c32d9a481652819921910b414eaf9319e4bd3 | [
"Apache-2.0",
"LicenseRef-scancode-unknown-license-reference",
"Python-2.0"
]
| permissive | mbs91/nitro | c6c81665d6abd04de8b9f09554e5e8e541f4a2b8 | be74e1e177f5c205c16126bc9b023f2348788409 | refs/heads/master | 2021-05-29T19:24:04.520762 | 2015-06-26T02:03:09 | 2015-06-26T02:03:09 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,123 | py | #
# Copyright (c) 2008-2015 Citrix Systems, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License")
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from nssrc.com.citrix.netscaler.nitro.resource.base.base_resource import base_resource
from nssrc.com.citrix.netscaler.nitro.resource.base.base_resource import base_response
from nssrc.com.citrix.netscaler.nitro.service.options import options
from nssrc.com.citrix.netscaler.nitro.exception.nitro_exception import nitro_exception
from nssrc.com.citrix.netscaler.nitro.util.nitro_util import nitro_util
class lbvserver_dospolicy_binding(base_resource) :
""" Binding class showing the dospolicy that can be bound to lbvserver.
"""
def __init__(self) :
self._policyname = ""
self._priority = 0
self._name = ""
self.___count = 0
@property
def priority(self) :
"""Priority.
"""
try :
return self._priority
except Exception as e:
raise e
@priority.setter
def priority(self, priority) :
"""Priority.
"""
try :
self._priority = priority
except Exception as e:
raise e
@property
def policyname(self) :
"""Name of the policy bound to the LB vserver.
"""
try :
return self._policyname
except Exception as e:
raise e
@policyname.setter
def policyname(self, policyname) :
"""Name of the policy bound to the LB vserver.
"""
try :
self._policyname = policyname
except Exception as e:
raise e
@property
def name(self) :
"""Name for the virtual server. Must begin with an ASCII alphanumeric or underscore (_) character, and must contain only ASCII alphanumeric, underscore, hash (#), period (.), space, colon (:), at sign (@), equal sign (=), and hyphen (-) characters. Can be changed after the virtual server is created.
CLI Users: If the name includes one or more spaces, enclose the name in double or single quotation marks (for example, "my vserver" or 'my vserver'). .<br/>Minimum length = 1.
"""
try :
return self._name
except Exception as e:
raise e
@name.setter
def name(self, name) :
"""Name for the virtual server. Must begin with an ASCII alphanumeric or underscore (_) character, and must contain only ASCII alphanumeric, underscore, hash (#), period (.), space, colon (:), at sign (@), equal sign (=), and hyphen (-) characters. Can be changed after the virtual server is created.
CLI Users: If the name includes one or more spaces, enclose the name in double or single quotation marks (for example, "my vserver" or 'my vserver'). .<br/>Minimum length = 1
"""
try :
self._name = name
except Exception as e:
raise e
def _get_nitro_response(self, service, response) :
""" converts nitro response into object and returns the object array in case of get request.
"""
try :
result = service.payload_formatter.string_to_resource(lbvserver_dospolicy_binding_response, response, self.__class__.__name__)
if(result.errorcode != 0) :
if (result.errorcode == 444) :
service.clear_session(self)
if result.severity :
if (result.severity == "ERROR") :
raise nitro_exception(result.errorcode, str(result.message), str(result.severity))
else :
raise nitro_exception(result.errorcode, str(result.message), str(result.severity))
return result.lbvserver_dospolicy_binding
except Exception as e :
raise e
def _get_object_name(self) :
""" Returns the value of object identifier argument
"""
try :
if (self.name) :
return str(self.name)
return None
except Exception as e :
raise e
@classmethod
def get(cls, service, name) :
""" Use this API to fetch lbvserver_dospolicy_binding resources.
"""
try :
obj = lbvserver_dospolicy_binding()
obj.name = name
response = obj.get_resources(service)
return response
except Exception as e:
raise e
@classmethod
def get_filtered(cls, service, name, filter_) :
""" Use this API to fetch filtered set of lbvserver_dospolicy_binding resources.
Filter string should be in JSON format.eg: "port:80,servicetype:HTTP".
"""
try :
obj = lbvserver_dospolicy_binding()
obj.name = name
option_ = options()
option_.filter = filter_
response = obj.getfiltered(service, option_)
return response
except Exception as e:
raise e
@classmethod
def count(cls, service, name) :
""" Use this API to count lbvserver_dospolicy_binding resources configued on NetScaler.
"""
try :
obj = lbvserver_dospolicy_binding()
obj.name = name
option_ = options()
option_.count = True
response = obj.get_resources(service, option_)
if response :
return response[0].__dict__['___count']
return 0
except Exception as e:
raise e
@classmethod
def count_filtered(cls, service, name, filter_) :
""" Use this API to count the filtered set of lbvserver_dospolicy_binding resources.
Filter string should be in JSON format.eg: "port:80,servicetype:HTTP".
"""
try :
obj = lbvserver_dospolicy_binding()
obj.name = name
option_ = options()
option_.count = True
option_.filter = filter_
response = obj.getfiltered(service, option_)
if response :
return response[0].__dict__['___count']
return 0
except Exception as e:
raise e
class Bindpoint:
REQUEST = "REQUEST"
RESPONSE = "RESPONSE"
class Labeltype:
reqvserver = "reqvserver"
resvserver = "resvserver"
policylabel = "policylabel"
class lbvserver_dospolicy_binding_response(base_response) :
def __init__(self, length=1) :
self.lbvserver_dospolicy_binding = []
self.errorcode = 0
self.message = ""
self.severity = ""
self.sessionid = ""
self.lbvserver_dospolicy_binding = [lbvserver_dospolicy_binding() for _ in range(length)]
| [
"[email protected]"
]
| |
568adf917a33a914cba15a49c8c76eec78d9e70c | 8fa8ded3772dd7a124c1bbb91fc109ed2b63574b | /mycelium/apps/data_import/ajax_backends.py | 1db6a810c321f46ba03880b7a3f42cb1ee69194c | []
| no_license | skoczen/mycelium | 3642b0f5e5ea03d609a3e499c7ad68092101dce0 | da0f169163f4dc93e2dc2b0d934abf4f18c18af0 | refs/heads/master | 2020-04-10T09:21:46.893254 | 2014-05-20T02:27:06 | 2014-05-20T02:27:06 | 2,114,887 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,301 | py | from ajaxuploader.backends.s3 import S3UploadBackend
from django.core.files.storage import default_storage
from spreadsheets.spreadsheet import SpreadsheetAbstraction
import time
class DataImportUploadBackend(S3UploadBackend):
def update_filename(self, request, filename):
return "import/%s/%s.%s" % (request.account.pk, int(time.time()), filename, )
def upload_complete(self, request, filename, **kwargs):
self._pool.close()
self._pool.join()
self._mp.complete_upload()
# filename is a file at s3. Get it.
f = default_storage.open(filename, 'r')
# parse the file.
s = SpreadsheetAbstraction(request.account, f, request.import_type, filename=filename)
f.close()
# get the number of rows
num_rows = s.num_rows
# see if it has a header
header_row = []
has_header = s.has_header
if s.has_header:
header_row = s.header_row
# get the first five columns
first_rows = s.get_rows(0,8)
return_dict = {
'num_rows': num_rows,
'first_rows': first_rows,
'header_row': header_row,
'has_header': has_header,
'filename':filename,
}
return return_dict | [
"[email protected]"
]
| |
53d7a1c756ba1e532f3b3fc6092768370b3a8b40 | 8eac548c15cdabeb662c9af2ca67994f92c255ee | /词性标注&词性提取/Word_Marking_test.py | 75c73dfd2590d58fbea3ac14a141dd71b9fe05c0 | []
| no_license | yaolinxia/Chinese-word-segmentation | f7de7317509dc7ed53bb40e5a1367206bd36abc1 | 42d619ec838fe2f8c98822b15c69c640972b984e | refs/heads/master | 2021-07-06T19:52:58.916128 | 2019-04-15T14:08:54 | 2019-04-15T14:08:54 | 117,522,537 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,607 | py | #!/usr/bin/env python
# _*_ coding:utf-8 _*_
#1.先分好词,存在一个字符数组里面
#2.遍历字符数组,进行词性标注
import sys
import glob
import os
import xml.dom.minidom
import jieba
import jieba.posseg as pseg
#遍历某个文件夹下所有xml文件,path为存放xml的文件夹路径
#词性标注
def WorkMark(path):
#textCut=jieba.cut(text,cut_all=False)
#词性标注
with open(path, encoding="utf-8") as file_object:
contents = file_object.read()
textCut = pseg.cut(contents)
for ele in textCut:
print(ele)
result = ''
for word in textCut:
result +=word+' '
print('%s' % (word))
print('sucess WorkMark')
return result
#路径path下的内容写入进text中
def write_WorkMark(path,text):
f=open(path,'w',encoding='utf-8')
f.write(text)
f.close()
print('success write_WorkMark')
if __name__=='__main__':
#path1 = r'G:\研究生\法律文书\民事一审测试集\民事一审测试集'
#输出的结果路径
path2 = r'H:\python-workspace\test-path\test_QW_1-29.txt'
#path3 = r'H:\python-workspace\\1-5-testWenShu\\stopword.dic'
#path4:提取的字段路径
path4 = r'H:\python-workspace\1-12-testWenShu\test_QW_addDic.txt'
#path4=r'C:\Users\LFK\Desktop\1.txt'
#text = read_XMLFile(path1)
#write_segmentFile(path4, text)
# text=read_txt(path4)
result = WorkMark(path4)
write_WorkMark(path2,result)
"""
import jieba.posseg as pseg
words = pseg.cut("我爱北京天安门")
for word,flag in words:
print('%s %s' % (word, flag))
""" | [
"[email protected]"
]
| |
f919c9e9ce31b4f2ba4ee925104693ef602a991d | f2b860cd107681925cf58c004001c71a8ec5b2bd | /antspynet/architectures/create_convolutional_autoencoder_model.py | 2bd714ea3d3389ae2bd5ba767fca6522f6261b7c | []
| no_license | zwmJohn/ANTsPyNet | dfb7f5a841bb8506d6c5f809af38937bbc2725f5 | 3bd658a8d8fc6467612a3419f38dfc65895fc679 | refs/heads/master | 2022-04-28T04:37:24.995937 | 2020-05-01T00:27:41 | 2020-05-01T00:27:41 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 8,274 | py |
from keras.models import Model
from keras.layers import Input, Conv2D, Conv2DTranspose, Dense, Flatten, Reshape
import numpy as np
import math
def create_convolutional_autoencoder_model_2d(input_image_size,
number_of_filters_per_layer=(32, 64, 128, 10),
convolution_kernel_size=(5, 5),
deconvolution_kernel_size=(5, 5)
):
"""
Function for creating a 2-D symmetric convolutional autoencoder model.
Builds an autoencoder based on the specified array definining the
number of units in the encoding branch. Ported from the Keras python
implementation here:
https://github.com/XifengGuo/DEC-keras
Arguments
---------
input_image_size : tuple
A tuple defining the shape of the 2-D input image
number_of_units_per_layer : tuple
A tuple defining the number of units in the encoding branch.
convolution_kernel_size : tuple or scalar
Kernel size for convolution
deconvolution_kernel_size : tuple or scalar
Kernel size for deconvolution
Returns
-------
Keras models
A convolutional encoder and autoencoder Keras model.
Example
-------
>>> autoencoder, encoder = create_convolutional_autoencoder_model_2d((128, 128, 3))
>>> autoencoder.summary()
>>> encoder.summary()
"""
activation = 'relu'
strides = (2, 2)
number_of_encoding_layers = len(number_of_filters_per_layer) - 1
factor = 2 ** number_of_encoding_layers
padding = 'valid'
if input_image_size[0] % factor == 0:
padding = 'same'
inputs = Input(shape = input_image_size)
encoder = inputs
for i in range(number_of_encoding_layers):
local_padding = 'same'
kernel_size = convolution_kernel_size
if i == (number_of_encoding_layers - 1):
local_padding = padding
kernel_size = tuple(np.array(convolution_kernel_size) - 2)
encoder = Conv2D(filters=number_of_filters_per_layer[i],
kernel_size=kernel_size,
strides=strides,
activation=activation,
padding=local_padding)(encoder)
encoder = Flatten()(encoder)
encoder = Dense(units=number_of_filters_per_layer[-1])(encoder)
autoencoder = encoder
penultimate_number_of_filters = \
number_of_filters_per_layer[number_of_encoding_layers-1]
input_image_size_factored = ((math.floor(input_image_size[0] / factor)),
(math.floor(input_image_size[1] / factor)))
number_of_units_for_encoder_output = (penultimate_number_of_filters *
input_image_size_factored[0] * input_image_size_factored[1])
autoencoder = Dense(units=number_of_units_for_encoder_output,
activation=activation)(autoencoder)
autoencoder = Reshape(target_shape=(*input_image_size_factored, penultimate_number_of_filters))(autoencoder)
for i in range(number_of_encoding_layers, 1, -1):
local_padding = 'same'
kernel_size = convolution_kernel_size
if i == number_of_encoding_layers:
local_padding = padding
kernel_size = tuple(np.array(deconvolution_kernel_size) - 2)
autoencoder = Conv2DTranspose(filters=number_of_filters_per_layer[i-2],
kernel_size=kernel_size,
strides=strides,
activation=activation,
padding=local_padding)(autoencoder)
autoencoder = Conv2DTranspose(input_image_size[-1],
kernel_size=deconvolution_kernel_size,
strides=strides,
padding='same')(autoencoder)
autoencoder_model = Model(inputs=inputs, outputs=autoencoder)
encoder_model = Model(inputs=inputs, outputs=encoder)
return([autoencoder_model, encoder_model])
def create_convolutional_autoencoder_model_3d(input_image_size,
number_of_filters_per_layer=(32, 64, 128, 10),
convolution_kernel_size=(5, 5, 5),
deconvolution_kernel_size=(5, 5, 5)
):
"""
Function for creating a 3-D symmetric convolutional autoencoder model.
Builds an autoencoder based on the specified array definining the
number of units in the encoding branch. Ported from the Keras python
implementation here:
https://github.com/XifengGuo/DEC-keras
Arguments
---------
input_image_size : tuple
A tuple defining the shape of the 3-D input image
number_of_units_per_layer : tuple
A tuple defining the number of units in the encoding branch.
convolution_kernel_size : tuple or scalar
Kernel size for convolution
deconvolution_kernel_size : tuple or scalar
Kernel size for deconvolution
Returns
-------
Keras models
A convolutional encoder and autoencoder Keras model.
Example
-------
>>> autoencoder, encoder = create_convolutional_autoencoder_model_3d((128, 128, 128, 3))
>>> autoencoder.summary()
>>> encoder.summary()
"""
activation = 'relu'
strides = (2, 2, 2)
number_of_encoding_layers = len(number_of_filters_per_layer) - 1
factor = 2 ** number_of_encoding_layers
padding = 'valid'
if input_image_size[0] % factor == 0:
padding = 'same'
inputs = Input(shape = input_image_size)
encoder = inputs
for i in range(number_of_encoding_layers):
local_padding = 'same'
kernel_size = convolution_kernel_size
if i == (number_of_encoding_layers - 1):
local_padding = padding
kernel_size = tuple(np.array(convolution_kernel_size) - 2)
encoder = Conv3D(filters=number_of_filters_per_layer[i],
kernel_size=kernel_size,
strides=strides,
activation=activation,
padding=local_padding)(encoder)
encoder = Flatten()(encoder)
encoder = Dense(units=number_of_filters_per_layer[-1])(encoder)
autoencoder = encoder
penultimate_number_of_filters = \
number_of_filters_per_layer[number_of_encoding_layers-1]
input_image_size_factored = ((math.floor(input_image_size[0] / factor)),
(math.floor(input_image_size[1] / factor)),
(math.floor(input_image_size[2] / factor)))
number_of_units_for_encoder_output = (penultimate_number_of_filters *
input_image_size_factored[0] * input_image_size_factored[1] *
input_image_size_factored[2])
autoencoder = Dense(units=number_of_units_for_encoder_output,
activation=activation)(autoencoder)
autoencoder = Reshape(target_shape=(*input_image_size_factored, penultimate_number_of_filters))(autoencoder)
for i in range(number_of_encoding_layers, 1, -1):
local_padding = 'same'
kernel_size = convolution_kernel_size
if i == number_of_encoding_layers:
local_padding = padding
kernel_size = tuple(np.array(deconvolution_kernel_size) - 2)
autoencoder = Conv3DTranspose(filters=number_of_filters_per_layer[i-2],
kernel_size=kernel_size,
strides=strides,
activation=activation,
padding=local_padding)(autoencoder)
autoencoder = Conv3DTranspose(input_image_size[-1],
kernel_size=deconvolution_kernel_size,
strides=strides,
padding='same')(autoencoder)
autoencoder_model = Model(inputs=inputs, outputs=autoencoder)
encoder_model = Model(inputs=inputs, outputs=encoder)
return([autoencoder_model, encoder_model])
| [
"[email protected]"
]
| |
9c12bac03eea6ed28261ea89f8c3810743a52f26 | 2ca88d41f1bb5042338faec50b2af11931db0bdd | /src/gluonts/nursery/tsbench/src/cli/analysis/__init__.py | b939423a365224aa385b570ac9ecec6deacdf291 | [
"Apache-2.0"
]
| permissive | canerturkmen/gluon-ts | 2f2d46f9b01f5ee07a51a11e822b1c72c2475caa | 57ae07f571ff123eac04af077870c1f216f99d5c | refs/heads/master | 2022-09-10T23:30:26.162245 | 2022-04-20T12:44:01 | 2022-04-20T12:44:01 | 192,873,578 | 1 | 2 | Apache-2.0 | 2020-08-04T16:58:48 | 2019-06-20T07:43:07 | Python | UTF-8 | Python | false | false | 848 | py | # Copyright 2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License").
# You may not use this file except in compliance with the License.
# A copy of the License is located at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# or in the "license" file accompanying this file. This file is distributed
# on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
# express or implied. See the License for the specific language governing
# permissions and limitations under the License.
from ._main import analysis
from .ensemble import ensemble # type: ignore
from .ensemble_recommender import ensemble_recommender # type: ignore
from .recommender import recommender # type: ignore
from .surrogate import surrogate # type: ignore
__all__ = ["analysis"]
| [
"[email protected]"
]
| |
94ef7ad99668a3d0c890a8be2fd256bf28ab9194 | 1b8530ef1c108e098edfa3755e96824b31d4a2ad | /scripts/fixup_recommender_v1beta1_keywords.py | c0fe44525fc4175047ea2372ca698f42c4445c7e | [
"Apache-2.0"
]
| permissive | renovate-bot/python-recommender | 4b3d0b9e0332eab0f71bd044a6832b67fe6827fa | d0ff05f566d2a7bfe6c9f403252a833fe4bb776b | refs/heads/master | 2023-06-08T00:27:33.316110 | 2021-08-18T13:40:32 | 2021-08-18T13:40:32 | 239,139,952 | 0 | 0 | Apache-2.0 | 2020-02-08T13:52:09 | 2020-02-08T13:52:08 | null | UTF-8 | Python | false | false | 6,468 | py | #! /usr/bin/env python3
# -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import argparse
import os
import libcst as cst
import pathlib
import sys
from typing import (Any, Callable, Dict, List, Sequence, Tuple)
def partition(
predicate: Callable[[Any], bool],
iterator: Sequence[Any]
) -> Tuple[List[Any], List[Any]]:
"""A stable, out-of-place partition."""
results = ([], [])
for i in iterator:
results[int(predicate(i))].append(i)
# Returns trueList, falseList
return results[1], results[0]
class recommenderCallTransformer(cst.CSTTransformer):
CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata')
METHOD_TO_PARAMS: Dict[str, Tuple[str]] = {
'get_insight': ('name', ),
'get_recommendation': ('name', ),
'list_insights': ('parent', 'page_size', 'page_token', 'filter', ),
'list_recommendations': ('parent', 'page_size', 'page_token', 'filter', ),
'mark_insight_accepted': ('name', 'etag', 'state_metadata', ),
'mark_recommendation_claimed': ('name', 'etag', 'state_metadata', ),
'mark_recommendation_failed': ('name', 'etag', 'state_metadata', ),
'mark_recommendation_succeeded': ('name', 'etag', 'state_metadata', ),
}
def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode:
try:
key = original.func.attr.value
kword_params = self.METHOD_TO_PARAMS[key]
except (AttributeError, KeyError):
# Either not a method from the API or too convoluted to be sure.
return updated
# If the existing code is valid, keyword args come after positional args.
# Therefore, all positional args must map to the first parameters.
args, kwargs = partition(lambda a: not bool(a.keyword), updated.args)
if any(k.keyword.value == "request" for k in kwargs):
# We've already fixed this file, don't fix it again.
return updated
kwargs, ctrl_kwargs = partition(
lambda a: not a.keyword.value in self.CTRL_PARAMS,
kwargs
)
args, ctrl_args = args[:len(kword_params)], args[len(kword_params):]
ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl))
for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS))
request_arg = cst.Arg(
value=cst.Dict([
cst.DictElement(
cst.SimpleString("'{}'".format(name)),
cst.Element(value=arg.value)
)
# Note: the args + kwargs looks silly, but keep in mind that
# the control parameters had to be stripped out, and that
# those could have been passed positionally or by keyword.
for name, arg in zip(kword_params, args + kwargs)]),
keyword=cst.Name("request")
)
return updated.with_changes(
args=[request_arg] + ctrl_kwargs
)
def fix_files(
in_dir: pathlib.Path,
out_dir: pathlib.Path,
*,
transformer=recommenderCallTransformer(),
):
"""Duplicate the input dir to the output dir, fixing file method calls.
Preconditions:
* in_dir is a real directory
* out_dir is a real, empty directory
"""
pyfile_gen = (
pathlib.Path(os.path.join(root, f))
for root, _, files in os.walk(in_dir)
for f in files if os.path.splitext(f)[1] == ".py"
)
for fpath in pyfile_gen:
with open(fpath, 'r') as f:
src = f.read()
# Parse the code and insert method call fixes.
tree = cst.parse_module(src)
updated = tree.visit(transformer)
# Create the path and directory structure for the new file.
updated_path = out_dir.joinpath(fpath.relative_to(in_dir))
updated_path.parent.mkdir(parents=True, exist_ok=True)
# Generate the updated source file at the corresponding path.
with open(updated_path, 'w') as f:
f.write(updated.code)
if __name__ == '__main__':
parser = argparse.ArgumentParser(
description="""Fix up source that uses the recommender client library.
The existing sources are NOT overwritten but are copied to output_dir with changes made.
Note: This tool operates at a best-effort level at converting positional
parameters in client method calls to keyword based parameters.
Cases where it WILL FAIL include
A) * or ** expansion in a method call.
B) Calls via function or method alias (includes free function calls)
C) Indirect or dispatched calls (e.g. the method is looked up dynamically)
These all constitute false negatives. The tool will also detect false
positives when an API method shares a name with another method.
""")
parser.add_argument(
'-d',
'--input-directory',
required=True,
dest='input_dir',
help='the input directory to walk for python files to fix up',
)
parser.add_argument(
'-o',
'--output-directory',
required=True,
dest='output_dir',
help='the directory to output files fixed via un-flattening',
)
args = parser.parse_args()
input_dir = pathlib.Path(args.input_dir)
output_dir = pathlib.Path(args.output_dir)
if not input_dir.is_dir():
print(
f"input directory '{input_dir}' does not exist or is not a directory",
file=sys.stderr,
)
sys.exit(-1)
if not output_dir.is_dir():
print(
f"output directory '{output_dir}' does not exist or is not a directory",
file=sys.stderr,
)
sys.exit(-1)
if os.listdir(output_dir):
print(
f"output directory '{output_dir}' is not empty",
file=sys.stderr,
)
sys.exit(-1)
fix_files(input_dir, output_dir)
| [
"[email protected]"
]
| |
ebbfa4ce3921743a2cac5d388e06a808086b00de | 71cc62fe3fec8441794a725b7ce3037dc2723107 | /ifreewallpapers/apps/profile/views/profileviews.py | 71d4bc4644c89e4257d2ab7d6120ca761ceb5375 | []
| no_license | tooxie/django-ifreewallpapers | bda676dc5a6c45329ad6763862fe696b3e0c354b | 75d8f41a4c6aec5c1091203823c824c4223674a6 | refs/heads/master | 2020-05-21T12:50:36.907948 | 2011-01-19T04:28:33 | 2011-01-19T04:28:33 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 643 | py | # coding=UTF-8
from profile.models import Profile
# from profile import settings as _settings
from utils.decorators import render_response
to_response = render_response('profile/')
# from django.contrib.auth.decorators import login_required
from django.contrib.auth.models import User
# from django.core.urlresolvers import reverse
# from django.http import HttpResponseRedirect, Http404
from django.shortcuts import get_object_or_404
"""
@to_response
def overview(request, ):
return 'profile.html'
"""
@to_response
def public(request, slug):
profile = Profile.objects.get(slug=slug)
return 'public.html', {'profile': profile}
| [
"[email protected]"
]
| |
742c4ddf5eaa9d24d8ab85cf042455635e024227 | ff692d927c95f7337339599d523f986f720449f5 | /plugins/init.py | 692fec3901386220bb48bf4cea4ae5a20c1c2897 | []
| no_license | mwesterhof/pyjeeves | de567966636954aed7d88a5d51e74df85feeaba3 | 46b35f56056603330f7636a745e13fa045c884f1 | refs/heads/master | 2022-12-16T11:11:06.276555 | 2019-08-09T09:10:21 | 2019-08-09T09:10:21 | 296,265,384 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 268 | py | import os
from plugin import BasePlugin
class Plugin(BasePlugin):
'''
Initialize an HQ in the current directory
'''
def run_command(self, args):
print('creating jeeves headquarters in {0}'.format(os.getcwd()))
os.makedirs('.jeeves')
| [
"[email protected]"
]
| |
eeb6eb58ee42c5bc5f72743af750f3d566f3361e | aaa204ad7f134b526593c785eaa739bff9fc4d2a | /tests/providers/amazon/aws/hooks/test_glacier.py | 4ed3f6aaa2e24f18b4e5a28d34007275140c31de | [
"Apache-2.0",
"BSD-3-Clause",
"MIT"
]
| permissive | cfei18/incubator-airflow | 913b40efa3d9f1fdfc5e299ce2693492c9a92dd4 | ffb2078eb5546420864229cdc6ee361f89cab7bd | refs/heads/master | 2022-09-28T14:44:04.250367 | 2022-09-19T16:50:23 | 2022-09-19T16:50:23 | 88,665,367 | 0 | 1 | Apache-2.0 | 2021-02-05T16:29:42 | 2017-04-18T20:00:03 | Python | UTF-8 | Python | false | false | 5,075 | py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import unittest
from unittest import mock
from airflow.providers.amazon.aws.hooks.glacier import GlacierHook
CREDENTIALS = "aws_conn"
VAULT_NAME = "airflow"
JOB_ID = "1234abcd"
REQUEST_RESULT = {"jobId": "1234abcd"}
RESPONSE_BODY = {"body": "data"}
JOB_STATUS = {"Action": "", "StatusCode": "Succeeded"}
class TestAmazonGlacierHook(unittest.TestCase):
def setUp(self):
with mock.patch("airflow.providers.amazon.aws.hooks.glacier.GlacierHook.__init__", return_value=None):
self.hook = GlacierHook(aws_conn_id="aws_default")
@mock.patch("airflow.providers.amazon.aws.hooks.glacier.GlacierHook.get_conn")
def test_retrieve_inventory_should_return_job_id(self, mock_conn):
# Given
job_id = {"jobId": "1234abcd"}
# when
mock_conn.return_value.initiate_job.return_value = job_id
result = self.hook.retrieve_inventory(VAULT_NAME)
# then
mock_conn.assert_called_once_with()
assert job_id == result
@mock.patch("airflow.providers.amazon.aws.hooks.glacier.GlacierHook.get_conn")
def test_retrieve_inventory_should_log_mgs(self, mock_conn):
# given
job_id = {"jobId": "1234abcd"}
# when
with self.assertLogs() as log:
mock_conn.return_value.initiate_job.return_value = job_id
self.hook.retrieve_inventory(VAULT_NAME)
# then
self.assertEqual(
log.output,
[
'INFO:airflow.providers.amazon.aws.hooks.glacier.GlacierHook:'
f"Retrieving inventory for vault: {VAULT_NAME}",
'INFO:airflow.providers.amazon.aws.hooks.glacier.GlacierHook:'
f"Initiated inventory-retrieval job for: {VAULT_NAME}",
'INFO:airflow.providers.amazon.aws.hooks.glacier.GlacierHook:'
f"Retrieval Job ID: {job_id.get('jobId')}",
],
)
@mock.patch("airflow.providers.amazon.aws.hooks.glacier.GlacierHook.get_conn")
def test_retrieve_inventory_results_should_return_response(self, mock_conn):
# when
mock_conn.return_value.get_job_output.return_value = RESPONSE_BODY
response = self.hook.retrieve_inventory_results(VAULT_NAME, JOB_ID)
# then
mock_conn.assert_called_once_with()
assert response == RESPONSE_BODY
@mock.patch("airflow.providers.amazon.aws.hooks.glacier.GlacierHook.get_conn")
def test_retrieve_inventory_results_should_log_mgs(self, mock_conn):
# when
with self.assertLogs() as log:
mock_conn.return_value.get_job_output.return_value = REQUEST_RESULT
self.hook.retrieve_inventory_results(VAULT_NAME, JOB_ID)
# then
self.assertEqual(
log.output,
[
'INFO:airflow.providers.amazon.aws.hooks.glacier.GlacierHook:'
f"Retrieving the job results for vault: {VAULT_NAME}...",
],
)
@mock.patch("airflow.providers.amazon.aws.hooks.glacier.GlacierHook.get_conn")
def test_describe_job_should_return_status_succeeded(self, mock_conn):
# when
mock_conn.return_value.describe_job.return_value = JOB_STATUS
response = self.hook.describe_job(VAULT_NAME, JOB_ID)
# then
mock_conn.assert_called_once_with()
assert response == JOB_STATUS
@mock.patch("airflow.providers.amazon.aws.hooks.glacier.GlacierHook.get_conn")
def test_describe_job_should_log_mgs(self, mock_conn):
# when
with self.assertLogs() as log:
mock_conn.return_value.describe_job.return_value = JOB_STATUS
self.hook.describe_job(VAULT_NAME, JOB_ID)
# then
self.assertEqual(
log.output,
[
'INFO:airflow.providers.amazon.aws.hooks.glacier.GlacierHook:'
f"Retrieving status for vault: {VAULT_NAME} and job {JOB_ID}",
'INFO:airflow.providers.amazon.aws.hooks.glacier.GlacierHook:'
f"Job status: {JOB_STATUS.get('Action')}, code status: {JOB_STATUS.get('StatusCode')}",
],
)
| [
"[email protected]"
]
| |
59655e6a2e6c1bf1df975866337b053b89e1ae57 | 111866dd2150170e90e3717df008aa703d7ef30c | /filemanager/domain/__init__.py | 7f428c6590b0064d5266fb7018c0cdcc07f789b4 | []
| no_license | arXiv/arxiv-filemanager | 106c572a6551445a2109c279ce086b7c96a0bcd5 | dfb71a40125324b1c1f4eb865c84cd9d2e512e6c | refs/heads/develop | 2023-04-18T09:45:35.338067 | 2020-03-09T14:59:19 | 2020-03-09T14:59:19 | 113,456,994 | 5 | 6 | null | 2022-12-08T05:50:07 | 2017-12-07T13:55:34 | PostScript | UTF-8 | Python | false | false | 388 | py | """Core concepts and constraints of the file manager service."""
from .uploads import UserFile, Workspace, IChecker, SourceLog, SourceType, \
IStorageAdapter, SourcePackage, ICheckableWorkspace, Readiness, \
Status, LockState
from .file_type import FileType
from .uploads import ICheckingStrategy
from .error import Error, Severity, Code
from .index import NoSuchFile, FileIndex
| [
"[email protected]"
]
| |
7adeb154143a4cfd6b5b6ee2b93edaf9c86afaa2 | b526aecc3aeb35c0931339ede80397f8f1561fbc | /src/dascasi/__init__.py | b262c04aa7f3b2df6609d02475bf132c1456c87d | [
"Apache-2.0"
]
| permissive | space-physics/dascasi | 30e021976529dfc4072ea96181db8d9d1921a07c | 4d72aa91e471a495566044c3fc387344dd12461f | refs/heads/main | 2023-04-17T09:24:22.325605 | 2023-03-21T02:18:44 | 2023-03-21T02:30:15 | 51,016,067 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 768 | py | # Copyright 2023 SciVision, Inc.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
__version__ = "3.0.0"
from .web import download
from .io import load, loadcal
from .hdf5 import save_hdf5
__all__ = ["download", "load", "loadcal", "save_hdf5"]
| [
"[email protected]"
]
| |
e726984d4019bc6974ee4b2702b243d18c0669f7 | 73758dde83d1a1823c103e1a4ba71e7c95168f71 | /nsd2006/devops/day02/local_mail.py | 3533d97f07484696810b548f9f0931ac688dde15 | []
| no_license | tonggh220/md_5_nsd_notes | 07ffdee7c23963a7a461f2a2340143b0e97bd9e1 | a58a021ad4c7fbdf7df327424dc518f4044c5116 | refs/heads/master | 2023-07-02T01:34:38.798929 | 2021-05-12T08:48:40 | 2021-05-12T08:48:40 | 393,885,415 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 457 | py | from email.mime.text import MIMEText
from email.header import Header
import smtplib
# 准备邮件, plain表示纯文本
message = MIMEText("python local email test.\n", 'plain', 'utf8') # 正文
message['From'] = Header('root', 'utf8')
message['To'] = Header('zhangsan', 'utf8')
message['Subject'] = Header('py test', 'utf8')
# 发送邮件
smtp = smtplib.SMTP()
smtp.connect('localhost')
smtp.sendmail('root', ['root', 'zhangsan'], message.as_bytes())
| [
"[email protected]"
]
| |
aacc2ade3e3b635267e0895250241964852a07f3 | 51b20543e5ed606636bcde9fba329e5fa948de2e | /communityprofiles/census/remote_file.py | 2bab92682c5bab5ea49d416570d119d3a9aaa99d | [
"MIT"
]
| permissive | 216software/Profiles | b821112225e8522b7b558cab87ae1c12c68c653b | 651da880a3d4295243205bdae4de88504edc91de | refs/heads/dev | 2023-03-16T04:49:01.389186 | 2023-03-09T17:04:04 | 2023-03-09T17:04:04 | 59,139,518 | 3 | 0 | null | 2016-05-18T18:02:53 | 2016-05-18T18:02:53 | null | UTF-8 | Python | false | false | 3,612 | py | # for RemoteFileObject
from os import SEEK_SET, SEEK_CUR, SEEK_END
from time import time
from datetime import timedelta
from urlparse import urlparse, urljoin
from cStringIO import StringIO
from httplib import HTTPConnection
from urllib import urlopen
from zipfile import ZipFile
from itertools import izip
class RemoteFileObject:
""" Implement enough of this to be useful:
http://docs.python.org/release/2.5.2/lib/bltin-file-objects.html
Pull data from a remote URL with HTTP range headers.
"""
def __init__(self, url, verbose=False, block_size=(16 * 1024)):
self.verbose = verbose
# scheme://host/path;parameters?query#fragment
(scheme, host, path, parameters, query, fragment) = urlparse(url)
self.host = host
self.rest = path + (query and ('?' + query) or '')
self.offset = 0
self.length = self.get_length()
self.chunks = {}
self.block_size = block_size
self.start_time = time()
def get_length(self):
"""
"""
conn = HTTPConnection(self.host)
conn.request('GET', self.rest, headers={'Range': '0-1'})
length = int(conn.getresponse().getheader('content-length'))
if self.verbose:
print >> stderr, length, 'bytes in', basename(self.rest)
return length
def get_range(self, start, end):
"""
"""
headers = {'Range': 'bytes=%(start)d-%(end)d' % locals()}
conn = HTTPConnection(self.host)
conn.request('GET', self.rest, headers=headers)
return conn.getresponse().read()
def read(self, count=None):
""" Read /count/ bytes from the resource at the current offset.
"""
if count is None:
# to the end
count = self.length - self.offset
out = StringIO()
while count:
chunk_offset = self.block_size * (self.offset / self.block_size)
if chunk_offset not in self.chunks:
range = chunk_offset, min(self.length, self.offset + self.block_size) - 1
self.chunks[chunk_offset] = StringIO(self.get_range(*range))
if self.verbose:
loaded = float(self.block_size) * len(self.chunks) / self.length
expect = (time() - self.start_time) / loaded
remain = max(0, int(expect * (1 - loaded)))
print >> stderr, '%.1f%%' % min(100, 100 * loaded),
print >> stderr, 'of', basename(self.rest),
print >> stderr, 'with', timedelta(seconds=remain), 'to go'
chunk = self.chunks[chunk_offset]
in_chunk_offset = self.offset % self.block_size
in_chunk_count = min(count, self.block_size - in_chunk_offset)
chunk.seek(in_chunk_offset, SEEK_SET)
out.write(chunk.read(in_chunk_count))
count -= in_chunk_count
self.offset += in_chunk_count
out.seek(0)
return out.read()
def seek(self, offset, whence=SEEK_SET):
""" Seek to the specified offset.
/whence/ behaves as with other file-like objects:
http://docs.python.org/lib/bltin-file-objects.html
"""
if whence == SEEK_SET:
self.offset = offset
elif whence == SEEK_CUR:
self.offset += offset
elif whence == SEEK_END:
self.offset = self.length + offset
def tell(self):
return self.offset
| [
"[email protected]"
]
| |
667c8b4b904d7c226e66d67f7273f4a22805337a | 2f98aa7e5bfc2fc5ef25e4d5cfa1d7802e3a7fae | /python/python_27051.py | 21e1599a8f2a20678ec565a2b8443285bc4a4d16 | []
| no_license | AK-1121/code_extraction | cc812b6832b112e3ffcc2bb7eb4237fd85c88c01 | 5297a4a3aab3bb37efa24a89636935da04a1f8b6 | refs/heads/master | 2020-05-23T08:04:11.789141 | 2015-10-22T19:19:40 | 2015-10-22T19:19:40 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 63 | py | # Matplotlib legend relative to figure in multiplot
matplotlib
| [
"[email protected]"
]
| |
7058f2f37989ff337436d6ecf89c51ed574d82ee | 7f33c02743fbfd18726ffef08924f528354372dd | /Python_Projects/python3_selfstudy/priklady_z_knihy/k04/digit_names.py | 1167be7a4ec6d9440194cb8be9928866a345010e | []
| no_license | zabojnikp/study | a524eb9c2265a73e1db0b5f0e76b359c123a397b | 43424bfc6641cd8fa13ab119ce283fb460b4ffc1 | refs/heads/master | 2020-04-06T14:21:55.786353 | 2018-11-27T22:10:48 | 2018-11-27T22:10:48 | 157,538,244 | 0 | 0 | null | 2018-11-27T22:10:49 | 2018-11-14T11:24:20 | Python | UTF-8 | Python | false | false | 1,390 | py | #!/usr/bin/env python3
# Copyright (c) 2008-9 Qtrac Ltd. All rights reserved.
# This program or module is free software: you can redistribute it and/or
# modify it under the terms of the GNU General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version. It is provided for educational
# purposes and is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
import sys
Language = "cs"
ENGLISH = {0: "zero", 1: "one", 2: "two", 3: "three", 4: "four",
5: "five", 6: "six", 7: "seven", 8: "eight", 9: "nine"}
CZECH = {0: "nula", 1: "jedna", 2: "dvě", 3: "tři", 4: "čtyři",
5: "pět", 6: "šest", 7: "sedm", 8: "osm", 9: "devět"}
def main():
if len(sys.argv) == 1 or sys.argv[1] in {"-h", "--help"}:
print("použití: {0} [en|cs] číslo".format(sys.argv[0]))
sys.exit()
args = sys.argv[1:]
if args[0] in {"en", "cs"}:
global Language
Language = args.pop(0)
print_digits(args.pop(0))
def print_digits(digits):
dictionary = ENGLISH if Language == "en" else CZECH
for digit in digits:
print(dictionary[int(digit)], end=" ")
print()
main()
| [
"[email protected]"
]
| |
8b20ab0b23ca75e102d3f7c1bd8017bf3ac1b485 | 22dcbf9595c28279b681caac26e43113ce75de5c | /automl/cloud-client/import_dataset_test.py | 35d23edc7e8fc745ed598a895c037a49b9cc7f90 | [
"LicenseRef-scancode-unknown-license-reference",
"Apache-2.0"
]
| permissive | jerry-enebeli/python-docs-samples | 2e61672e9819405733c94ef824ba0b0f92b3e930 | 0d78724126ce25dd6203cfd2ee3467b88e5c27b9 | refs/heads/master | 2022-12-12T18:05:16.899492 | 2020-09-01T22:35:40 | 2020-09-01T22:35:40 | 292,189,370 | 1 | 0 | Apache-2.0 | 2020-09-02T05:39:23 | 2020-09-02T05:39:22 | null | UTF-8 | Python | false | false | 1,497 | py | # Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import import_dataset
PROJECT_ID = os.environ["AUTOML_PROJECT_ID"]
BUCKET_ID = "{}-lcm".format(PROJECT_ID)
DATASET_ID = "TEN0000000000000000000"
def test_import_dataset(capsys):
# As importing a dataset can take a long time and only four operations can
# be run on a dataset at once. Try to import into a nonexistent dataset and
# confirm that the dataset was not found, but other elements of the request
# were valid.
try:
data = "gs://{}/sentiment-analysis/dataset.csv".format(BUCKET_ID)
import_dataset.import_dataset(PROJECT_ID, DATASET_ID, data)
out, _ = capsys.readouterr()
assert (
"The Dataset doesn't exist or is inaccessible for use with AutoMl."
in out
)
except Exception as e:
assert (
"The Dataset doesn't exist or is inaccessible for use with AutoMl."
in e.message
)
| [
"[email protected]"
]
| |
886d0997531024402470f77c621befe7e97b1e63 | c64d6d1fce81212965b1df7d4f4d4e72f218243e | /dot.py | 4b4b054e57b178f594ca931bb299f515610ea5a3 | []
| no_license | saipoojavr/saipoojacodekata | 384b0aa19f29c8b66f9498ebfbdf162cda4ddc97 | 3e38f1f06c4348f4262f654526c86d64e6893029 | refs/heads/master | 2020-05-23T01:01:54.615085 | 2019-12-06T15:29:54 | 2019-12-06T15:29:54 | 186,580,757 | 0 | 2 | null | null | null | null | UTF-8 | Python | false | false | 33 | py | astr=str(input())
print(astr+'.') | [
"[email protected]"
]
| |
e658bf448865024182e1f4fcc107da7498d979d6 | 9743d5fd24822f79c156ad112229e25adb9ed6f6 | /xai/brain/wordbase/otherforms/_sponging.py | 3002cc59058a485eaef0fe654fc8482c1eaff6ca | [
"MIT"
]
| permissive | cash2one/xai | de7adad1758f50dd6786bf0111e71a903f039b64 | e76f12c9f4dcf3ac1c7c08b0cc8844c0b0a104b6 | refs/heads/master | 2021-01-19T12:33:54.964379 | 2017-01-28T02:00:50 | 2017-01-28T02:00:50 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 224 | py |
#calss header
class _SPONGING():
def __init__(self,):
self.name = "SPONGING"
self.definitions = sponge
self.parents = []
self.childen = []
self.properties = []
self.jsondata = {}
self.basic = ['sponge']
| [
"[email protected]"
]
| |
eb5007714d3e169fc67ff7de612d97edbdde15ae | ca805265bbc8d9b3f5ccb8dd343524843fc0f776 | /scaling/commands/bench_results_processer.py | d94add1e14ef3dfdd535d410b4a504c6f1e7200a | []
| no_license | alenzhao/QIIME-Scaling | 8dc7b4b99da404c016e59e48197b8f938df1cf14 | 29408a3a0ff2a74039f78a04fff831dabb23fa1a | refs/heads/master | 2021-01-12T10:46:22.961035 | 2016-06-18T16:56:48 | 2016-06-18T16:56:48 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,825 | py | #!/usr/bin/env python
from __future__ import division
__author__ = "Jose Antonio Navas Molina"
__copyright__ = "Copyright 2013, The QIIME Scaling Project"
__credits__ = ["Jose Antonio Navas Molina"]
__license__ = "BSD"
__version__ = "0.0.2-dev"
__maintainer__ = "Jose Antonio Navas Molina"
__email__ = "[email protected]"
__status__ = "Development"
from pyqi.core.command import (Command, CommandIn, CommandOut,
ParameterCollection)
from scaling.process_results import (process_benchmark_results, CompData)
from scaling.cluster_util import wait_on
class BenchResultsProcesser(Command):
"""Subclassing the pyqi.core.command.Command class"""
BriefDescription = "Processes the benchmark suite results"
LongDescription = ("Takes the benchmark suite output directory and "
"processes the benchmark measurements, creating plots "
"and collapsing results in a usable form.")
CommandIns = ParameterCollection([
CommandIn(Name='bench_results', DataType=list,
Description='List with the benchmark results',
Required=True),
CommandIn(Name='job_ids', DataType=list,
Description='List of job ids to wait for if running in a '
'pbs cluster', Required=False)
])
CommandOuts = ParameterCollection([
CommandOut(Name="bench_data", DataType=CompData,
Description="Dictionary with the benchmark results"),
])
def run(self, **kwargs):
bench_results = kwargs['bench_results']
job_ids = kwargs['job_ids']
if job_ids:
wait_on(job_ids)
data = process_benchmark_results(bench_results)
return {'bench_data': data}
CommandConstructor = BenchResultsProcesser
| [
"[email protected]"
]
| |
0d3e7da7d35dc0e85ff5002ba1b008ceca4e07f2 | d489eb7998aa09e17ce8d8aef085a65f799e6a02 | /lib/modules/powershell/situational_awareness/network/powerview/share_finder.py | b7eb7430bb04ecd40ca90099bbefd0f114040073 | [
"MIT"
]
| permissive | fengjixuchui/invader | d36078bbef3d740f95930d9896b2d7dd7227474c | 68153dafbe25e7bb821c8545952d0cc15ae35a3e | refs/heads/master | 2020-07-21T19:45:10.479388 | 2019-09-26T11:32:38 | 2019-09-26T11:32:38 | 206,958,809 | 2 | 1 | MIT | 2019-09-26T11:32:39 | 2019-09-07T11:32:17 | PowerShell | UTF-8 | Python | false | false | 6,300 | py | from lib.common import helpers
class Module:
def __init__(self, mainMenu, params=[]):
self.info = {
'Name': 'Find-DomainShare',
'Author': ['@harmj0y'],
'Description': ('Finds shares on machines in the domain. Part of PowerView.'),
'Background' : True,
'OutputExtension' : None,
'NeedsAdmin' : False,
'OpsecSafe' : True,
'Language' : 'powershell',
'MinLanguageVersion' : '2',
'Comments': [
'https://github.com/PowerShellMafia/PowerSploit/blob/dev/Recon/'
]
}
# any options needed by the module, settable during runtime
self.options = {
# format:
# value_name : {description, required, default_value}
'Agent' : {
'Description' : 'Agent to run module on.',
'Required' : True,
'Value' : ''
},
'ComputerName' : {
'Description' : 'Hosts to enumerate.',
'Required' : False,
'Value' : ''
},
'ComputerLDAPFilter' : {
'Description' : 'Host filter name to query AD for, wildcards accepted.',
'Required' : False,
'Value' : ''
},
'ComputerSearchBase' : {
'Description' : 'Specifies the LDAP source to search through for computers',
'Required' : False,
'Value' : ''
},
'ComputerOperatingSystem' : {
'Description' : 'Return computers with a specific operating system, wildcards accepted.',
'Required' : False,
'Value' : ''
},
'ComputerServicePack' : {
'Description' : 'Return computers with the specified service pack, wildcards accepted.',
'Required' : False,
'Value' : ''
},
'ComputerSiteName' : {
'Description' : 'Return computers in the specific AD Site name, wildcards accepted.',
'Required' : False,
'Value' : ''
},
'CheckShareAccess' : {
'Description' : 'Switch. Only display found shares that the local user has access to.',
'Required' : False,
'Value' : ''
},
'Server' : {
'Description' : 'Specifies an active directory server (domain controller) to bind to',
'Required' : False,
'Value' : ''
},
'SearchScope' : {
'Description' : 'Specifies the scope to search under, Base/OneLevel/Subtree (default of Subtree)',
'Required' : False,
'Value' : ''
},
'ResultPageSize' : {
'Description' : 'Specifies the PageSize to set for the LDAP searcher object.',
'Required' : False,
'Value' : ''
},
'ServerTimeLimit' : {
'Description' : 'Specifies the maximum amount of time the server spends searching. Default of 120 seconds.',
'Required' : False,
'Value' : ''
},
'Tombstone' : {
'Description' : 'Switch. Specifies that the search should also return deleted/tombstoned objects.',
'Required' : False,
'Value' : ''
},
'Delay' : {
'Description' : 'Delay between enumerating hosts, defaults to 0.',
'Required' : False,
'Value' : ''
},
'Jitter' : {
'Description' : 'Specifies the jitter (0-1.0) to apply to any specified -Delay, defaults to +/- 0.3.',
'Required' : False,
'Value' : ''
},
'Threads' : {
'Description' : 'The maximum concurrent threads to execute.',
'Required' : False,
'Value' : ''
}
}
# save off a copy of the mainMenu object to access external functionality
# like listeners/agent handlers/etc.
self.mainMenu = mainMenu
for param in params:
# parameter format is [Name, Value]
option, value = param
if option in self.options:
self.options[option]['Value'] = value
def generate(self, obfuscate=False, obfuscationCommand=""):
moduleName = self.info["Name"]
# read in the common powerview.ps1 module source code
moduleSource = self.mainMenu.installPath + "/data/module_source/situational_awareness/network/powerview.ps1"
try:
f = open(moduleSource, 'r')
except:
print helpers.color("[!] Could not read module source path at: " + str(moduleSource))
return ""
moduleCode = f.read()
f.close()
# get just the code needed for the specified function
script = helpers.strip_powershell_comments(moduleCode)
script += "\n" + moduleName + " "
for option,values in self.options.iteritems():
if option.lower() != "agent":
if values['Value'] and values['Value'] != '':
if values['Value'].lower() == "true":
# if we're just adding a switch
script += " -" + str(option)
else:
script += " -" + str(option) + " " + str(values['Value'])
script += ' | Out-String | %{$_ + \"`n\"};"`n'+str(moduleName)+' completed!"'
if obfuscate:
script = helpers.obfuscate(self.mainMenu.installPath, psScript=script, obfuscationCommand=obfuscationCommand)
return script
| [
"[email protected]"
]
| |
fa1ad882fd1595df3715ec3227356ed30c4c6017 | fc212767c6c838360b62a3dcd8030a1dfcbf62fc | /muddery/utils/quest_handler.py | 7d19069f731f83b3001d9318edf55756332a4a5f | [
"BSD-3-Clause"
]
| permissive | caibingcheng/muddery | 24d6eba76358621736e6a3d66333361239c35472 | dcbf55f4e1c18a2c69576fd0edcec4699c1519b9 | refs/heads/master | 2021-05-19T09:49:19.319735 | 2020-03-29T03:55:51 | 2020-03-29T03:55:51 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,984 | py | """
QuestHandler handles a character's quests.
"""
from django.conf import settings
from django.apps import apps
from django.core.exceptions import ObjectDoesNotExist
from evennia.utils import logger
from muddery.utils.builder import build_object
from muddery.statements.statement_handler import STATEMENT_HANDLER
from muddery.utils.localized_strings_handler import _
from muddery.utils.exception import MudderyError
from muddery.utils.game_settings import GAME_SETTINGS
from muddery.worlddata.dao.quest_dependencies_mapper import QUEST_DEPENDENCIES
from muddery.mappings.quest_status_set import QUEST_STATUS_SET
from muddery.mappings.typeclass_set import TYPECLASS
class QuestHandler(object):
"""
Handles a character's quests.
"""
def __init__(self, owner):
"""
Initialize handler
"""
self.owner = owner
self.current_quests = owner.db.current_quests
self.finished_quests = owner.db.finished_quests
def accept(self, quest_key):
"""
Accept a quest.
Args:
quest_key: (string) quest's key
Returns:
None
"""
if quest_key in self.current_quests:
return
# Create quest object.
new_quest = build_object(quest_key)
if not new_quest:
return
new_quest.set_owner(self.owner)
self.current_quests[quest_key] = new_quest
self.owner.msg({"msg": _("Accepted quest {C%s{n.") % new_quest.get_name()})
self.show_quests()
self.owner.show_location()
def remove_all(self):
"""
Remove all quests.
It will be called when quests' owner will be deleted.
"""
for quest_key in self.current_quests:
self.current_quests[quest_key].delete()
self.current_quests = []
def give_up(self, quest_key):
"""
Accept a quest.
Args:
quest_key: (string) quest's key
Returns:
None
"""
if not GAME_SETTINGS.get("can_give_up_quests"):
logger.log_tracemsg("Can not give up quests.")
raise MudderyError(_("Can not give up this quest."))
if quest_key not in self.current_quests:
raise MudderyError(_("Can not find this quest."))
self.current_quests[quest_key].delete()
del(self.current_quests[quest_key])
if quest_key in self.finished_quests:
self.finished_quests.remove(quest_key)
self.show_quests()
def turn_in(self, quest_key):
"""
Turn in a quest.
Args:
quest_key: (string) quest's key
Returns:
None
"""
if quest_key not in self.current_quests:
return
if not self.current_quests[quest_key].is_accomplished:
return
# Get quest's name.
name = self.current_quests[quest_key].get_name()
# Call turn in function in the quest.
self.current_quests[quest_key].turn_in()
# Delete the quest.
self.current_quests[quest_key].delete()
del (self.current_quests[quest_key])
self.finished_quests.add(quest_key)
self.owner.msg({"msg": _("Turned in quest {C%s{n.") % name})
self.show_quests()
self.owner.show_location()
def get_accomplished_quests(self):
"""
Get all quests that their objectives are accomplished.
"""
quests = set()
for quest in self.current_quests:
if self.current_quests[quest].is_accomplished():
quests.add(quest)
return quests
def is_accomplished(self, quest_key):
"""
Whether the character accomplished this quest or not.
Args:
quest_key: (string) quest's key
Returns:
None
"""
if quest_key not in self.current_quests:
return False
return self.current_quests[quest_key].is_accomplished()
def is_not_accomplished(self, quest_key):
"""
Whether the character accomplished this quest or not.
Args:
quest_key: (string) quest's key
Returns:
None
"""
if quest_key not in self.current_quests:
return False
return not self.current_quests[quest_key].is_accomplished()
def is_finished(self, quest_key):
"""
Whether the character finished this quest or not.
Args:
quest_key: (string) quest's key
Returns:
None
"""
return quest_key in self.finished_quests
def is_in_progress(self, quest_key):
"""
If the character is doing this quest.
Args:
quest_key: (string) quest's key
Returns:
None
"""
return quest_key in self.current_quests
def can_provide(self, quest_key):
"""
If can provide this quest to the owner.
Args:
quest_key: (string) quest's key
Returns:
None
"""
if self.is_finished(quest_key):
return False
if self.is_in_progress(quest_key):
return False
if not self.match_dependencies(quest_key):
return False
if not self.match_condition(quest_key):
return False
return True
def match_dependencies(self, quest_key):
"""
Check quest's dependencies
Args:
quest_key: (string) quest's key
Returns:
(boolean) result
"""
for dep in QUEST_DEPENDENCIES.filter(quest_key):
status = QUEST_STATUS_SET.get(dep.type)
if not status.match(self.owner, dep.dependency):
return False
return True
def match_condition(self, quest_key):
"""
Check if the quest matches its condition.
Args:
quest_key: (string) quest's key
Returns:
(boolean) result
"""
# Get quest's record.
model_name = TYPECLASS("QUEST").model_name
if not model_name:
return False
model_quest = apps.get_model(settings.WORLD_DATA_APP, model_name)
try:
record = model_quest.objects.get(key=quest_key)
return STATEMENT_HANDLER.match_condition(record.condition, self.owner, None)
except Exception as e:
logger.log_errmsg("Can't get quest %s's condition: %s" % (quest_key, e))
return False
def show_quests(self):
"""
Send quests to player.
"""
quests = self.return_quests()
self.owner.msg({"quests": quests})
def return_quests(self):
"""
Get quests' data.
"""
quests = []
for quest in self.current_quests.values():
info = {"dbref": quest.dbref,
"name": quest.name,
"desc": quest.db.desc,
"objectives": quest.return_objectives(),
"accomplished": quest.is_accomplished()}
quests.append(info)
return quests
def at_objective(self, object_type, object_key, number=1):
"""
Called when the owner may complete some objectives.
Call relative hooks.
Args:
object_type: (type) objective's type
object_key: (string) object's key
number: (int) objective's number
Returns:
None
"""
status_changed = False
for quest in self.current_quests.values():
if quest.at_objective(object_type, object_key, number):
status_changed = True
if quest.is_accomplished():
self.owner.msg({"msg":
_("Quest {C%s{n's goals are accomplished.") % quest.name})
if status_changed:
self.show_quests()
| [
"[email protected]"
]
| |
6e8855e96569e6e5c38f9d5ad1ce4910d477b9c4 | f576f0ea3725d54bd2551883901b25b863fe6688 | /sdk/containerregistry/azure-mgmt-containerregistry/azure/mgmt/containerregistry/v2021_06_01_preview/models/_models_py3.py | 4007eb90a0326a819655f715f93c6d9bbbc685cf | [
"MIT",
"LicenseRef-scancode-generic-cla",
"LGPL-2.1-or-later"
]
| permissive | Azure/azure-sdk-for-python | 02e3838e53a33d8ba27e9bcc22bd84e790e4ca7c | c2ca191e736bb06bfbbbc9493e8325763ba990bb | refs/heads/main | 2023-09-06T09:30:13.135012 | 2023-09-06T01:08:06 | 2023-09-06T01:08:06 | 4,127,088 | 4,046 | 2,755 | MIT | 2023-09-14T21:48:49 | 2012-04-24T16:46:12 | Python | UTF-8 | Python | false | false | 206,121 | py | # coding=utf-8
# pylint: disable=too-many-lines
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
import datetime
from typing import Any, Dict, List, Optional, TYPE_CHECKING, Union
from ... import _serialization
if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from .. import models as _models
class ActivationProperties(_serialization.Model):
"""The activation properties of the connected registry.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar status: The activation status of the connected registry. Known values are: "Active" and
"Inactive".
:vartype status: str or
~azure.mgmt.containerregistry.v2021_06_01_preview.models.ActivationStatus
"""
_validation = {
"status": {"readonly": True},
}
_attribute_map = {
"status": {"key": "status", "type": "str"},
}
def __init__(self, **kwargs: Any) -> None:
""" """
super().__init__(**kwargs)
self.status = None
class ActiveDirectoryObject(_serialization.Model):
"""The Active Directory Object that will be used for authenticating the token of a container
registry.
:ivar object_id: The user/group/application object ID for Active Directory Object that will be
used for authenticating the token of a container registry.
:vartype object_id: str
:ivar tenant_id: The tenant ID of user/group/application object Active Directory Object that
will be used for authenticating the token of a container registry.
:vartype tenant_id: str
"""
_attribute_map = {
"object_id": {"key": "objectId", "type": "str"},
"tenant_id": {"key": "tenantId", "type": "str"},
}
def __init__(self, *, object_id: Optional[str] = None, tenant_id: Optional[str] = None, **kwargs: Any) -> None:
"""
:keyword object_id: The user/group/application object ID for Active Directory Object that will
be used for authenticating the token of a container registry.
:paramtype object_id: str
:keyword tenant_id: The tenant ID of user/group/application object Active Directory Object that
will be used for authenticating the token of a container registry.
:paramtype tenant_id: str
"""
super().__init__(**kwargs)
self.object_id = object_id
self.tenant_id = tenant_id
class Actor(_serialization.Model):
"""The agent that initiated the event. For most situations, this could be from the authorization
context of the request.
:ivar name: The subject or username associated with the request context that generated the
event.
:vartype name: str
"""
_attribute_map = {
"name": {"key": "name", "type": "str"},
}
def __init__(self, *, name: Optional[str] = None, **kwargs: Any) -> None:
"""
:keyword name: The subject or username associated with the request context that generated the
event.
:paramtype name: str
"""
super().__init__(**kwargs)
self.name = name
class CallbackConfig(_serialization.Model):
"""The configuration of service URI and custom headers for the webhook.
All required parameters must be populated in order to send to Azure.
:ivar service_uri: The service URI for the webhook to post notifications. Required.
:vartype service_uri: str
:ivar custom_headers: Custom headers that will be added to the webhook notifications.
:vartype custom_headers: dict[str, str]
"""
_validation = {
"service_uri": {"required": True},
}
_attribute_map = {
"service_uri": {"key": "serviceUri", "type": "str"},
"custom_headers": {"key": "customHeaders", "type": "{str}"},
}
def __init__(self, *, service_uri: str, custom_headers: Optional[Dict[str, str]] = None, **kwargs: Any) -> None:
"""
:keyword service_uri: The service URI for the webhook to post notifications. Required.
:paramtype service_uri: str
:keyword custom_headers: Custom headers that will be added to the webhook notifications.
:paramtype custom_headers: dict[str, str]
"""
super().__init__(**kwargs)
self.service_uri = service_uri
self.custom_headers = custom_headers
class ProxyResource(_serialization.Model):
"""The resource model definition for a ARM proxy resource. It will have everything other than
required location and tags.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: The resource ID.
:vartype id: str
:ivar name: The name of the resource.
:vartype name: str
:ivar type: The type of the resource.
:vartype type: str
:ivar system_data: Metadata pertaining to creation and last modification of the resource.
:vartype system_data: ~azure.mgmt.containerregistry.v2021_06_01_preview.models.SystemData
"""
_validation = {
"id": {"readonly": True},
"name": {"readonly": True},
"type": {"readonly": True},
"system_data": {"readonly": True},
}
_attribute_map = {
"id": {"key": "id", "type": "str"},
"name": {"key": "name", "type": "str"},
"type": {"key": "type", "type": "str"},
"system_data": {"key": "systemData", "type": "SystemData"},
}
def __init__(self, **kwargs: Any) -> None:
""" """
super().__init__(**kwargs)
self.id = None
self.name = None
self.type = None
self.system_data = None
class ConnectedRegistry(ProxyResource): # pylint: disable=too-many-instance-attributes
"""An object that represents a connected registry for a container registry.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: The resource ID.
:vartype id: str
:ivar name: The name of the resource.
:vartype name: str
:ivar type: The type of the resource.
:vartype type: str
:ivar system_data: Metadata pertaining to creation and last modification of the resource.
:vartype system_data: ~azure.mgmt.containerregistry.v2021_06_01_preview.models.SystemData
:ivar provisioning_state: Provisioning state of the resource. Known values are: "Creating",
"Updating", "Deleting", "Succeeded", "Failed", and "Canceled".
:vartype provisioning_state: str or
~azure.mgmt.containerregistry.v2021_06_01_preview.models.ProvisioningState
:ivar mode: The mode of the connected registry resource that indicates the permissions of the
registry. Known values are: "Registry" and "Mirror".
:vartype mode: str or
~azure.mgmt.containerregistry.v2021_06_01_preview.models.ConnectedRegistryMode
:ivar version: The current version of ACR runtime on the connected registry.
:vartype version: str
:ivar connection_state: The current connection state of the connected registry. Known values
are: "Online", "Offline", "Syncing", and "Unhealthy".
:vartype connection_state: str or
~azure.mgmt.containerregistry.v2021_06_01_preview.models.ConnectionState
:ivar last_activity_time: The last activity time of the connected registry.
:vartype last_activity_time: ~datetime.datetime
:ivar activation: The activation properties of the connected registry.
:vartype activation:
~azure.mgmt.containerregistry.v2021_06_01_preview.models.ActivationProperties
:ivar parent: The parent of the connected registry.
:vartype parent: ~azure.mgmt.containerregistry.v2021_06_01_preview.models.ParentProperties
:ivar client_token_ids: The list of the ACR token resource IDs used to authenticate clients to
the connected registry.
:vartype client_token_ids: list[str]
:ivar login_server: The login server properties of the connected registry.
:vartype login_server:
~azure.mgmt.containerregistry.v2021_06_01_preview.models.LoginServerProperties
:ivar logging: The logging properties of the connected registry.
:vartype logging: ~azure.mgmt.containerregistry.v2021_06_01_preview.models.LoggingProperties
:ivar status_details: The list of current statuses of the connected registry.
:vartype status_details:
list[~azure.mgmt.containerregistry.v2021_06_01_preview.models.StatusDetailProperties]
"""
_validation = {
"id": {"readonly": True},
"name": {"readonly": True},
"type": {"readonly": True},
"system_data": {"readonly": True},
"provisioning_state": {"readonly": True},
"version": {"readonly": True},
"connection_state": {"readonly": True},
"last_activity_time": {"readonly": True},
"activation": {"readonly": True},
"status_details": {"readonly": True},
}
_attribute_map = {
"id": {"key": "id", "type": "str"},
"name": {"key": "name", "type": "str"},
"type": {"key": "type", "type": "str"},
"system_data": {"key": "systemData", "type": "SystemData"},
"provisioning_state": {"key": "properties.provisioningState", "type": "str"},
"mode": {"key": "properties.mode", "type": "str"},
"version": {"key": "properties.version", "type": "str"},
"connection_state": {"key": "properties.connectionState", "type": "str"},
"last_activity_time": {"key": "properties.lastActivityTime", "type": "iso-8601"},
"activation": {"key": "properties.activation", "type": "ActivationProperties"},
"parent": {"key": "properties.parent", "type": "ParentProperties"},
"client_token_ids": {"key": "properties.clientTokenIds", "type": "[str]"},
"login_server": {"key": "properties.loginServer", "type": "LoginServerProperties"},
"logging": {"key": "properties.logging", "type": "LoggingProperties"},
"status_details": {"key": "properties.statusDetails", "type": "[StatusDetailProperties]"},
}
def __init__(
self,
*,
mode: Optional[Union[str, "_models.ConnectedRegistryMode"]] = None,
parent: Optional["_models.ParentProperties"] = None,
client_token_ids: Optional[List[str]] = None,
login_server: Optional["_models.LoginServerProperties"] = None,
logging: Optional["_models.LoggingProperties"] = None,
**kwargs: Any
) -> None:
"""
:keyword mode: The mode of the connected registry resource that indicates the permissions of
the registry. Known values are: "Registry" and "Mirror".
:paramtype mode: str or
~azure.mgmt.containerregistry.v2021_06_01_preview.models.ConnectedRegistryMode
:keyword parent: The parent of the connected registry.
:paramtype parent: ~azure.mgmt.containerregistry.v2021_06_01_preview.models.ParentProperties
:keyword client_token_ids: The list of the ACR token resource IDs used to authenticate clients
to the connected registry.
:paramtype client_token_ids: list[str]
:keyword login_server: The login server properties of the connected registry.
:paramtype login_server:
~azure.mgmt.containerregistry.v2021_06_01_preview.models.LoginServerProperties
:keyword logging: The logging properties of the connected registry.
:paramtype logging: ~azure.mgmt.containerregistry.v2021_06_01_preview.models.LoggingProperties
"""
super().__init__(**kwargs)
self.provisioning_state = None
self.mode = mode
self.version = None
self.connection_state = None
self.last_activity_time = None
self.activation = None
self.parent = parent
self.client_token_ids = client_token_ids
self.login_server = login_server
self.logging = logging
self.status_details = None
class ConnectedRegistryListResult(_serialization.Model):
"""The result of a request to list connected registries for a container registry.
:ivar value: The list of connected registries. Since this list may be incomplete, the nextLink
field should be used to request the next list of connected registries.
:vartype value:
list[~azure.mgmt.containerregistry.v2021_06_01_preview.models.ConnectedRegistry]
:ivar next_link: The URI that can be used to request the next list of connected registries.
:vartype next_link: str
"""
_attribute_map = {
"value": {"key": "value", "type": "[ConnectedRegistry]"},
"next_link": {"key": "nextLink", "type": "str"},
}
def __init__(
self,
*,
value: Optional[List["_models.ConnectedRegistry"]] = None,
next_link: Optional[str] = None,
**kwargs: Any
) -> None:
"""
:keyword value: The list of connected registries. Since this list may be incomplete, the
nextLink field should be used to request the next list of connected registries.
:paramtype value:
list[~azure.mgmt.containerregistry.v2021_06_01_preview.models.ConnectedRegistry]
:keyword next_link: The URI that can be used to request the next list of connected registries.
:paramtype next_link: str
"""
super().__init__(**kwargs)
self.value = value
self.next_link = next_link
class ConnectedRegistryUpdateParameters(_serialization.Model):
"""The parameters for updating a connected registry.
:ivar sync_properties: The sync properties of the connected registry with its parent.
:vartype sync_properties:
~azure.mgmt.containerregistry.v2021_06_01_preview.models.SyncUpdateProperties
:ivar logging: The logging properties of the connected registry.
:vartype logging: ~azure.mgmt.containerregistry.v2021_06_01_preview.models.LoggingProperties
:ivar client_token_ids: The list of the ACR token resource IDs used to authenticate clients to
the connected registry.
:vartype client_token_ids: list[str]
"""
_attribute_map = {
"sync_properties": {"key": "properties.syncProperties", "type": "SyncUpdateProperties"},
"logging": {"key": "properties.logging", "type": "LoggingProperties"},
"client_token_ids": {"key": "properties.clientTokenIds", "type": "[str]"},
}
def __init__(
self,
*,
sync_properties: Optional["_models.SyncUpdateProperties"] = None,
logging: Optional["_models.LoggingProperties"] = None,
client_token_ids: Optional[List[str]] = None,
**kwargs: Any
) -> None:
"""
:keyword sync_properties: The sync properties of the connected registry with its parent.
:paramtype sync_properties:
~azure.mgmt.containerregistry.v2021_06_01_preview.models.SyncUpdateProperties
:keyword logging: The logging properties of the connected registry.
:paramtype logging: ~azure.mgmt.containerregistry.v2021_06_01_preview.models.LoggingProperties
:keyword client_token_ids: The list of the ACR token resource IDs used to authenticate clients
to the connected registry.
:paramtype client_token_ids: list[str]
"""
super().__init__(**kwargs)
self.sync_properties = sync_properties
self.logging = logging
self.client_token_ids = client_token_ids
class EncryptionProperty(_serialization.Model):
"""EncryptionProperty.
:ivar status: Indicates whether or not the encryption is enabled for container registry. Known
values are: "enabled" and "disabled".
:vartype status: str or
~azure.mgmt.containerregistry.v2021_06_01_preview.models.EncryptionStatus
:ivar key_vault_properties: Key vault properties.
:vartype key_vault_properties:
~azure.mgmt.containerregistry.v2021_06_01_preview.models.KeyVaultProperties
"""
_attribute_map = {
"status": {"key": "status", "type": "str"},
"key_vault_properties": {"key": "keyVaultProperties", "type": "KeyVaultProperties"},
}
def __init__(
self,
*,
status: Optional[Union[str, "_models.EncryptionStatus"]] = None,
key_vault_properties: Optional["_models.KeyVaultProperties"] = None,
**kwargs: Any
) -> None:
"""
:keyword status: Indicates whether or not the encryption is enabled for container registry.
Known values are: "enabled" and "disabled".
:paramtype status: str or
~azure.mgmt.containerregistry.v2021_06_01_preview.models.EncryptionStatus
:keyword key_vault_properties: Key vault properties.
:paramtype key_vault_properties:
~azure.mgmt.containerregistry.v2021_06_01_preview.models.KeyVaultProperties
"""
super().__init__(**kwargs)
self.status = status
self.key_vault_properties = key_vault_properties
class ErrorResponse(_serialization.Model):
"""An error response from the Azure Container Registry service.
:ivar error: Azure container registry build API error body.
:vartype error: ~azure.mgmt.containerregistry.v2021_06_01_preview.models.ErrorResponseBody
"""
_attribute_map = {
"error": {"key": "error", "type": "ErrorResponseBody"},
}
def __init__(self, *, error: Optional["_models.ErrorResponseBody"] = None, **kwargs: Any) -> None:
"""
:keyword error: Azure container registry build API error body.
:paramtype error: ~azure.mgmt.containerregistry.v2021_06_01_preview.models.ErrorResponseBody
"""
super().__init__(**kwargs)
self.error = error
class ErrorResponseBody(_serialization.Model):
"""An error response from the Azure Container Registry service.
All required parameters must be populated in order to send to Azure.
:ivar code: error code. Required.
:vartype code: str
:ivar message: error message. Required.
:vartype message: str
:ivar target: target of the particular error.
:vartype target: str
:ivar details: an array of additional nested error response info objects, as described by this
contract.
:vartype details:
list[~azure.mgmt.containerregistry.v2021_06_01_preview.models.InnerErrorDescription]
"""
_validation = {
"code": {"required": True},
"message": {"required": True},
}
_attribute_map = {
"code": {"key": "code", "type": "str"},
"message": {"key": "message", "type": "str"},
"target": {"key": "target", "type": "str"},
"details": {"key": "details", "type": "[InnerErrorDescription]"},
}
def __init__(
self,
*,
code: str,
message: str,
target: Optional[str] = None,
details: Optional[List["_models.InnerErrorDescription"]] = None,
**kwargs: Any
) -> None:
"""
:keyword code: error code. Required.
:paramtype code: str
:keyword message: error message. Required.
:paramtype message: str
:keyword target: target of the particular error.
:paramtype target: str
:keyword details: an array of additional nested error response info objects, as described by
this contract.
:paramtype details:
list[~azure.mgmt.containerregistry.v2021_06_01_preview.models.InnerErrorDescription]
"""
super().__init__(**kwargs)
self.code = code
self.message = message
self.target = target
self.details = details
class EventInfo(_serialization.Model):
"""The basic information of an event.
:ivar id: The event ID.
:vartype id: str
"""
_attribute_map = {
"id": {"key": "id", "type": "str"},
}
def __init__(self, *, id: Optional[str] = None, **kwargs: Any) -> None: # pylint: disable=redefined-builtin
"""
:keyword id: The event ID.
:paramtype id: str
"""
super().__init__(**kwargs)
self.id = id
class Event(EventInfo):
"""The event for a webhook.
:ivar id: The event ID.
:vartype id: str
:ivar event_request_message: The event request message sent to the service URI.
:vartype event_request_message:
~azure.mgmt.containerregistry.v2021_06_01_preview.models.EventRequestMessage
:ivar event_response_message: The event response message received from the service URI.
:vartype event_response_message:
~azure.mgmt.containerregistry.v2021_06_01_preview.models.EventResponseMessage
"""
_attribute_map = {
"id": {"key": "id", "type": "str"},
"event_request_message": {"key": "eventRequestMessage", "type": "EventRequestMessage"},
"event_response_message": {"key": "eventResponseMessage", "type": "EventResponseMessage"},
}
def __init__(
self,
*,
id: Optional[str] = None, # pylint: disable=redefined-builtin
event_request_message: Optional["_models.EventRequestMessage"] = None,
event_response_message: Optional["_models.EventResponseMessage"] = None,
**kwargs: Any
) -> None:
"""
:keyword id: The event ID.
:paramtype id: str
:keyword event_request_message: The event request message sent to the service URI.
:paramtype event_request_message:
~azure.mgmt.containerregistry.v2021_06_01_preview.models.EventRequestMessage
:keyword event_response_message: The event response message received from the service URI.
:paramtype event_response_message:
~azure.mgmt.containerregistry.v2021_06_01_preview.models.EventResponseMessage
"""
super().__init__(id=id, **kwargs)
self.event_request_message = event_request_message
self.event_response_message = event_response_message
class EventContent(_serialization.Model):
"""The content of the event request message.
:ivar id: The event ID.
:vartype id: str
:ivar timestamp: The time at which the event occurred.
:vartype timestamp: ~datetime.datetime
:ivar action: The action that encompasses the provided event.
:vartype action: str
:ivar target: The target of the event.
:vartype target: ~azure.mgmt.containerregistry.v2021_06_01_preview.models.Target
:ivar request: The request that generated the event.
:vartype request: ~azure.mgmt.containerregistry.v2021_06_01_preview.models.Request
:ivar actor: The agent that initiated the event. For most situations, this could be from the
authorization context of the request.
:vartype actor: ~azure.mgmt.containerregistry.v2021_06_01_preview.models.Actor
:ivar source: The registry node that generated the event. Put differently, while the actor
initiates the event, the source generates it.
:vartype source: ~azure.mgmt.containerregistry.v2021_06_01_preview.models.Source
"""
_attribute_map = {
"id": {"key": "id", "type": "str"},
"timestamp": {"key": "timestamp", "type": "iso-8601"},
"action": {"key": "action", "type": "str"},
"target": {"key": "target", "type": "Target"},
"request": {"key": "request", "type": "Request"},
"actor": {"key": "actor", "type": "Actor"},
"source": {"key": "source", "type": "Source"},
}
def __init__(
self,
*,
id: Optional[str] = None, # pylint: disable=redefined-builtin
timestamp: Optional[datetime.datetime] = None,
action: Optional[str] = None,
target: Optional["_models.Target"] = None,
request: Optional["_models.Request"] = None,
actor: Optional["_models.Actor"] = None,
source: Optional["_models.Source"] = None,
**kwargs: Any
) -> None:
"""
:keyword id: The event ID.
:paramtype id: str
:keyword timestamp: The time at which the event occurred.
:paramtype timestamp: ~datetime.datetime
:keyword action: The action that encompasses the provided event.
:paramtype action: str
:keyword target: The target of the event.
:paramtype target: ~azure.mgmt.containerregistry.v2021_06_01_preview.models.Target
:keyword request: The request that generated the event.
:paramtype request: ~azure.mgmt.containerregistry.v2021_06_01_preview.models.Request
:keyword actor: The agent that initiated the event. For most situations, this could be from the
authorization context of the request.
:paramtype actor: ~azure.mgmt.containerregistry.v2021_06_01_preview.models.Actor
:keyword source: The registry node that generated the event. Put differently, while the actor
initiates the event, the source generates it.
:paramtype source: ~azure.mgmt.containerregistry.v2021_06_01_preview.models.Source
"""
super().__init__(**kwargs)
self.id = id
self.timestamp = timestamp
self.action = action
self.target = target
self.request = request
self.actor = actor
self.source = source
class EventListResult(_serialization.Model):
"""The result of a request to list events for a webhook.
:ivar value: The list of events. Since this list may be incomplete, the nextLink field should
be used to request the next list of events.
:vartype value: list[~azure.mgmt.containerregistry.v2021_06_01_preview.models.Event]
:ivar next_link: The URI that can be used to request the next list of events.
:vartype next_link: str
"""
_attribute_map = {
"value": {"key": "value", "type": "[Event]"},
"next_link": {"key": "nextLink", "type": "str"},
}
def __init__(
self, *, value: Optional[List["_models.Event"]] = None, next_link: Optional[str] = None, **kwargs: Any
) -> None:
"""
:keyword value: The list of events. Since this list may be incomplete, the nextLink field
should be used to request the next list of events.
:paramtype value: list[~azure.mgmt.containerregistry.v2021_06_01_preview.models.Event]
:keyword next_link: The URI that can be used to request the next list of events.
:paramtype next_link: str
"""
super().__init__(**kwargs)
self.value = value
self.next_link = next_link
class EventRequestMessage(_serialization.Model):
"""The event request message sent to the service URI.
:ivar content: The content of the event request message.
:vartype content: ~azure.mgmt.containerregistry.v2021_06_01_preview.models.EventContent
:ivar headers: The headers of the event request message.
:vartype headers: dict[str, str]
:ivar method: The HTTP method used to send the event request message.
:vartype method: str
:ivar request_uri: The URI used to send the event request message.
:vartype request_uri: str
:ivar version: The HTTP message version.
:vartype version: str
"""
_attribute_map = {
"content": {"key": "content", "type": "EventContent"},
"headers": {"key": "headers", "type": "{str}"},
"method": {"key": "method", "type": "str"},
"request_uri": {"key": "requestUri", "type": "str"},
"version": {"key": "version", "type": "str"},
}
def __init__(
self,
*,
content: Optional["_models.EventContent"] = None,
headers: Optional[Dict[str, str]] = None,
method: Optional[str] = None,
request_uri: Optional[str] = None,
version: Optional[str] = None,
**kwargs: Any
) -> None:
"""
:keyword content: The content of the event request message.
:paramtype content: ~azure.mgmt.containerregistry.v2021_06_01_preview.models.EventContent
:keyword headers: The headers of the event request message.
:paramtype headers: dict[str, str]
:keyword method: The HTTP method used to send the event request message.
:paramtype method: str
:keyword request_uri: The URI used to send the event request message.
:paramtype request_uri: str
:keyword version: The HTTP message version.
:paramtype version: str
"""
super().__init__(**kwargs)
self.content = content
self.headers = headers
self.method = method
self.request_uri = request_uri
self.version = version
class EventResponseMessage(_serialization.Model):
"""The event response message received from the service URI.
:ivar content: The content of the event response message.
:vartype content: str
:ivar headers: The headers of the event response message.
:vartype headers: dict[str, str]
:ivar reason_phrase: The reason phrase of the event response message.
:vartype reason_phrase: str
:ivar status_code: The status code of the event response message.
:vartype status_code: str
:ivar version: The HTTP message version.
:vartype version: str
"""
_attribute_map = {
"content": {"key": "content", "type": "str"},
"headers": {"key": "headers", "type": "{str}"},
"reason_phrase": {"key": "reasonPhrase", "type": "str"},
"status_code": {"key": "statusCode", "type": "str"},
"version": {"key": "version", "type": "str"},
}
def __init__(
self,
*,
content: Optional[str] = None,
headers: Optional[Dict[str, str]] = None,
reason_phrase: Optional[str] = None,
status_code: Optional[str] = None,
version: Optional[str] = None,
**kwargs: Any
) -> None:
"""
:keyword content: The content of the event response message.
:paramtype content: str
:keyword headers: The headers of the event response message.
:paramtype headers: dict[str, str]
:keyword reason_phrase: The reason phrase of the event response message.
:paramtype reason_phrase: str
:keyword status_code: The status code of the event response message.
:paramtype status_code: str
:keyword version: The HTTP message version.
:paramtype version: str
"""
super().__init__(**kwargs)
self.content = content
self.headers = headers
self.reason_phrase = reason_phrase
self.status_code = status_code
self.version = version
class ExportPipeline(ProxyResource):
"""An object that represents an export pipeline for a container registry.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: The resource ID.
:vartype id: str
:ivar name: The name of the resource.
:vartype name: str
:ivar type: The type of the resource.
:vartype type: str
:ivar system_data: Metadata pertaining to creation and last modification of the resource.
:vartype system_data: ~azure.mgmt.containerregistry.v2021_06_01_preview.models.SystemData
:ivar location: The location of the export pipeline.
:vartype location: str
:ivar identity: The identity of the export pipeline.
:vartype identity: ~azure.mgmt.containerregistry.v2021_06_01_preview.models.IdentityProperties
:ivar target: The target properties of the export pipeline.
:vartype target:
~azure.mgmt.containerregistry.v2021_06_01_preview.models.ExportPipelineTargetProperties
:ivar options: The list of all options configured for the pipeline.
:vartype options: list[str or
~azure.mgmt.containerregistry.v2021_06_01_preview.models.PipelineOptions]
:ivar provisioning_state: The provisioning state of the pipeline at the time the operation was
called. Known values are: "Creating", "Updating", "Deleting", "Succeeded", "Failed", and
"Canceled".
:vartype provisioning_state: str or
~azure.mgmt.containerregistry.v2021_06_01_preview.models.ProvisioningState
"""
_validation = {
"id": {"readonly": True},
"name": {"readonly": True},
"type": {"readonly": True},
"system_data": {"readonly": True},
"provisioning_state": {"readonly": True},
}
_attribute_map = {
"id": {"key": "id", "type": "str"},
"name": {"key": "name", "type": "str"},
"type": {"key": "type", "type": "str"},
"system_data": {"key": "systemData", "type": "SystemData"},
"location": {"key": "location", "type": "str"},
"identity": {"key": "identity", "type": "IdentityProperties"},
"target": {"key": "properties.target", "type": "ExportPipelineTargetProperties"},
"options": {"key": "properties.options", "type": "[str]"},
"provisioning_state": {"key": "properties.provisioningState", "type": "str"},
}
def __init__(
self,
*,
location: Optional[str] = None,
identity: Optional["_models.IdentityProperties"] = None,
target: Optional["_models.ExportPipelineTargetProperties"] = None,
options: Optional[List[Union[str, "_models.PipelineOptions"]]] = None,
**kwargs: Any
) -> None:
"""
:keyword location: The location of the export pipeline.
:paramtype location: str
:keyword identity: The identity of the export pipeline.
:paramtype identity:
~azure.mgmt.containerregistry.v2021_06_01_preview.models.IdentityProperties
:keyword target: The target properties of the export pipeline.
:paramtype target:
~azure.mgmt.containerregistry.v2021_06_01_preview.models.ExportPipelineTargetProperties
:keyword options: The list of all options configured for the pipeline.
:paramtype options: list[str or
~azure.mgmt.containerregistry.v2021_06_01_preview.models.PipelineOptions]
"""
super().__init__(**kwargs)
self.location = location
self.identity = identity
self.target = target
self.options = options
self.provisioning_state = None
class ExportPipelineListResult(_serialization.Model):
"""The result of a request to list export pipelines for a container registry.
:ivar value: The list of export pipelines. Since this list may be incomplete, the nextLink
field should be used to request the next list of export pipelines.
:vartype value: list[~azure.mgmt.containerregistry.v2021_06_01_preview.models.ExportPipeline]
:ivar next_link: The URI that can be used to request the next list of pipeline runs.
:vartype next_link: str
"""
_attribute_map = {
"value": {"key": "value", "type": "[ExportPipeline]"},
"next_link": {"key": "nextLink", "type": "str"},
}
def __init__(
self, *, value: Optional[List["_models.ExportPipeline"]] = None, next_link: Optional[str] = None, **kwargs: Any
) -> None:
"""
:keyword value: The list of export pipelines. Since this list may be incomplete, the nextLink
field should be used to request the next list of export pipelines.
:paramtype value: list[~azure.mgmt.containerregistry.v2021_06_01_preview.models.ExportPipeline]
:keyword next_link: The URI that can be used to request the next list of pipeline runs.
:paramtype next_link: str
"""
super().__init__(**kwargs)
self.value = value
self.next_link = next_link
class ExportPipelineTargetProperties(_serialization.Model):
"""The properties of the export pipeline target.
All required parameters must be populated in order to send to Azure.
:ivar type: The type of target for the export pipeline.
:vartype type: str
:ivar uri: The target uri of the export pipeline.
When 'AzureStorageBlob': "https://accountName.blob.core.windows.net/containerName/blobName"
When 'AzureStorageBlobContainer': "https://accountName.blob.core.windows.net/containerName".
:vartype uri: str
:ivar key_vault_uri: They key vault secret uri to obtain the target storage SAS token.
Required.
:vartype key_vault_uri: str
"""
_validation = {
"key_vault_uri": {"required": True},
}
_attribute_map = {
"type": {"key": "type", "type": "str"},
"uri": {"key": "uri", "type": "str"},
"key_vault_uri": {"key": "keyVaultUri", "type": "str"},
}
def __init__(
self, *, key_vault_uri: str, type: Optional[str] = None, uri: Optional[str] = None, **kwargs: Any
) -> None:
"""
:keyword type: The type of target for the export pipeline.
:paramtype type: str
:keyword uri: The target uri of the export pipeline.
When 'AzureStorageBlob': "https://accountName.blob.core.windows.net/containerName/blobName"
When 'AzureStorageBlobContainer': "https://accountName.blob.core.windows.net/containerName".
:paramtype uri: str
:keyword key_vault_uri: They key vault secret uri to obtain the target storage SAS token.
Required.
:paramtype key_vault_uri: str
"""
super().__init__(**kwargs)
self.type = type
self.uri = uri
self.key_vault_uri = key_vault_uri
class ExportPolicy(_serialization.Model):
"""The export policy for a container registry.
:ivar status: The value that indicates whether the policy is enabled or not. Known values are:
"enabled" and "disabled".
:vartype status: str or
~azure.mgmt.containerregistry.v2021_06_01_preview.models.ExportPolicyStatus
"""
_attribute_map = {
"status": {"key": "status", "type": "str"},
}
def __init__(self, *, status: Union[str, "_models.ExportPolicyStatus"] = "enabled", **kwargs: Any) -> None:
"""
:keyword status: The value that indicates whether the policy is enabled or not. Known values
are: "enabled" and "disabled".
:paramtype status: str or
~azure.mgmt.containerregistry.v2021_06_01_preview.models.ExportPolicyStatus
"""
super().__init__(**kwargs)
self.status = status
class GenerateCredentialsParameters(_serialization.Model):
"""The parameters used to generate credentials for a specified token or user of a container
registry.
:ivar token_id: The resource ID of the token for which credentials have to be generated.
:vartype token_id: str
:ivar expiry: The expiry date of the generated credentials after which the credentials become
invalid.
:vartype expiry: ~datetime.datetime
:ivar name: Specifies name of the password which should be regenerated if any -- password1 or
password2. Known values are: "password1" and "password2".
:vartype name: str or
~azure.mgmt.containerregistry.v2021_06_01_preview.models.TokenPasswordName
"""
_attribute_map = {
"token_id": {"key": "tokenId", "type": "str"},
"expiry": {"key": "expiry", "type": "iso-8601"},
"name": {"key": "name", "type": "str"},
}
def __init__(
self,
*,
token_id: Optional[str] = None,
expiry: Optional[datetime.datetime] = None,
name: Optional[Union[str, "_models.TokenPasswordName"]] = None,
**kwargs: Any
) -> None:
"""
:keyword token_id: The resource ID of the token for which credentials have to be generated.
:paramtype token_id: str
:keyword expiry: The expiry date of the generated credentials after which the credentials
become invalid.
:paramtype expiry: ~datetime.datetime
:keyword name: Specifies name of the password which should be regenerated if any -- password1
or password2. Known values are: "password1" and "password2".
:paramtype name: str or
~azure.mgmt.containerregistry.v2021_06_01_preview.models.TokenPasswordName
"""
super().__init__(**kwargs)
self.token_id = token_id
self.expiry = expiry
self.name = name
class GenerateCredentialsResult(_serialization.Model):
"""The response from the GenerateCredentials operation.
:ivar username: The username for a container registry.
:vartype username: str
:ivar passwords: The list of passwords for a container registry.
:vartype passwords:
list[~azure.mgmt.containerregistry.v2021_06_01_preview.models.TokenPassword]
"""
_attribute_map = {
"username": {"key": "username", "type": "str"},
"passwords": {"key": "passwords", "type": "[TokenPassword]"},
}
def __init__(
self,
*,
username: Optional[str] = None,
passwords: Optional[List["_models.TokenPassword"]] = None,
**kwargs: Any
) -> None:
"""
:keyword username: The username for a container registry.
:paramtype username: str
:keyword passwords: The list of passwords for a container registry.
:paramtype passwords:
list[~azure.mgmt.containerregistry.v2021_06_01_preview.models.TokenPassword]
"""
super().__init__(**kwargs)
self.username = username
self.passwords = passwords
class IdentityProperties(_serialization.Model):
"""Managed identity for the resource.
:ivar principal_id: The principal ID of resource identity.
:vartype principal_id: str
:ivar tenant_id: The tenant ID of resource.
:vartype tenant_id: str
:ivar type: The identity type. Known values are: "SystemAssigned", "UserAssigned",
"SystemAssigned, UserAssigned", and "None".
:vartype type: str or
~azure.mgmt.containerregistry.v2021_06_01_preview.models.ResourceIdentityType
:ivar user_assigned_identities: The list of user identities associated with the resource. The
user identity
dictionary key references will be ARM resource ids in the form:
'/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/
providers/Microsoft.ManagedIdentity/userAssignedIdentities/{identityName}'.
:vartype user_assigned_identities: dict[str,
~azure.mgmt.containerregistry.v2021_06_01_preview.models.UserIdentityProperties]
"""
_attribute_map = {
"principal_id": {"key": "principalId", "type": "str"},
"tenant_id": {"key": "tenantId", "type": "str"},
"type": {"key": "type", "type": "str"},
"user_assigned_identities": {"key": "userAssignedIdentities", "type": "{UserIdentityProperties}"},
}
def __init__(
self,
*,
principal_id: Optional[str] = None,
tenant_id: Optional[str] = None,
type: Optional[Union[str, "_models.ResourceIdentityType"]] = None,
user_assigned_identities: Optional[Dict[str, "_models.UserIdentityProperties"]] = None,
**kwargs: Any
) -> None:
"""
:keyword principal_id: The principal ID of resource identity.
:paramtype principal_id: str
:keyword tenant_id: The tenant ID of resource.
:paramtype tenant_id: str
:keyword type: The identity type. Known values are: "SystemAssigned", "UserAssigned",
"SystemAssigned, UserAssigned", and "None".
:paramtype type: str or
~azure.mgmt.containerregistry.v2021_06_01_preview.models.ResourceIdentityType
:keyword user_assigned_identities: The list of user identities associated with the resource.
The user identity
dictionary key references will be ARM resource ids in the form:
'/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/
providers/Microsoft.ManagedIdentity/userAssignedIdentities/{identityName}'.
:paramtype user_assigned_identities: dict[str,
~azure.mgmt.containerregistry.v2021_06_01_preview.models.UserIdentityProperties]
"""
super().__init__(**kwargs)
self.principal_id = principal_id
self.tenant_id = tenant_id
self.type = type
self.user_assigned_identities = user_assigned_identities
class ImportImageParameters(_serialization.Model):
"""ImportImageParameters.
All required parameters must be populated in order to send to Azure.
:ivar source: The source of the image. Required.
:vartype source: ~azure.mgmt.containerregistry.v2021_06_01_preview.models.ImportSource
:ivar target_tags: List of strings of the form repo[:tag]. When tag is omitted the source will
be used (or 'latest' if source tag is also omitted).
:vartype target_tags: list[str]
:ivar untagged_target_repositories: List of strings of repository names to do a manifest only
copy. No tag will be created.
:vartype untagged_target_repositories: list[str]
:ivar mode: When Force, any existing target tags will be overwritten. When NoForce, any
existing target tags will fail the operation before any copying begins. Known values are:
"NoForce" and "Force".
:vartype mode: str or ~azure.mgmt.containerregistry.v2021_06_01_preview.models.ImportMode
"""
_validation = {
"source": {"required": True},
}
_attribute_map = {
"source": {"key": "source", "type": "ImportSource"},
"target_tags": {"key": "targetTags", "type": "[str]"},
"untagged_target_repositories": {"key": "untaggedTargetRepositories", "type": "[str]"},
"mode": {"key": "mode", "type": "str"},
}
def __init__(
self,
*,
source: "_models.ImportSource",
target_tags: Optional[List[str]] = None,
untagged_target_repositories: Optional[List[str]] = None,
mode: Union[str, "_models.ImportMode"] = "NoForce",
**kwargs: Any
) -> None:
"""
:keyword source: The source of the image. Required.
:paramtype source: ~azure.mgmt.containerregistry.v2021_06_01_preview.models.ImportSource
:keyword target_tags: List of strings of the form repo[:tag]. When tag is omitted the source
will be used (or 'latest' if source tag is also omitted).
:paramtype target_tags: list[str]
:keyword untagged_target_repositories: List of strings of repository names to do a manifest
only copy. No tag will be created.
:paramtype untagged_target_repositories: list[str]
:keyword mode: When Force, any existing target tags will be overwritten. When NoForce, any
existing target tags will fail the operation before any copying begins. Known values are:
"NoForce" and "Force".
:paramtype mode: str or ~azure.mgmt.containerregistry.v2021_06_01_preview.models.ImportMode
"""
super().__init__(**kwargs)
self.source = source
self.target_tags = target_tags
self.untagged_target_repositories = untagged_target_repositories
self.mode = mode
class ImportPipeline(ProxyResource):
"""An object that represents an import pipeline for a container registry.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: The resource ID.
:vartype id: str
:ivar name: The name of the resource.
:vartype name: str
:ivar type: The type of the resource.
:vartype type: str
:ivar system_data: Metadata pertaining to creation and last modification of the resource.
:vartype system_data: ~azure.mgmt.containerregistry.v2021_06_01_preview.models.SystemData
:ivar location: The location of the import pipeline.
:vartype location: str
:ivar identity: The identity of the import pipeline.
:vartype identity: ~azure.mgmt.containerregistry.v2021_06_01_preview.models.IdentityProperties
:ivar source: The source properties of the import pipeline.
:vartype source:
~azure.mgmt.containerregistry.v2021_06_01_preview.models.ImportPipelineSourceProperties
:ivar trigger: The properties that describe the trigger of the import pipeline.
:vartype trigger:
~azure.mgmt.containerregistry.v2021_06_01_preview.models.PipelineTriggerProperties
:ivar options: The list of all options configured for the pipeline.
:vartype options: list[str or
~azure.mgmt.containerregistry.v2021_06_01_preview.models.PipelineOptions]
:ivar provisioning_state: The provisioning state of the pipeline at the time the operation was
called. Known values are: "Creating", "Updating", "Deleting", "Succeeded", "Failed", and
"Canceled".
:vartype provisioning_state: str or
~azure.mgmt.containerregistry.v2021_06_01_preview.models.ProvisioningState
"""
_validation = {
"id": {"readonly": True},
"name": {"readonly": True},
"type": {"readonly": True},
"system_data": {"readonly": True},
"provisioning_state": {"readonly": True},
}
_attribute_map = {
"id": {"key": "id", "type": "str"},
"name": {"key": "name", "type": "str"},
"type": {"key": "type", "type": "str"},
"system_data": {"key": "systemData", "type": "SystemData"},
"location": {"key": "location", "type": "str"},
"identity": {"key": "identity", "type": "IdentityProperties"},
"source": {"key": "properties.source", "type": "ImportPipelineSourceProperties"},
"trigger": {"key": "properties.trigger", "type": "PipelineTriggerProperties"},
"options": {"key": "properties.options", "type": "[str]"},
"provisioning_state": {"key": "properties.provisioningState", "type": "str"},
}
def __init__(
self,
*,
location: Optional[str] = None,
identity: Optional["_models.IdentityProperties"] = None,
source: Optional["_models.ImportPipelineSourceProperties"] = None,
trigger: Optional["_models.PipelineTriggerProperties"] = None,
options: Optional[List[Union[str, "_models.PipelineOptions"]]] = None,
**kwargs: Any
) -> None:
"""
:keyword location: The location of the import pipeline.
:paramtype location: str
:keyword identity: The identity of the import pipeline.
:paramtype identity:
~azure.mgmt.containerregistry.v2021_06_01_preview.models.IdentityProperties
:keyword source: The source properties of the import pipeline.
:paramtype source:
~azure.mgmt.containerregistry.v2021_06_01_preview.models.ImportPipelineSourceProperties
:keyword trigger: The properties that describe the trigger of the import pipeline.
:paramtype trigger:
~azure.mgmt.containerregistry.v2021_06_01_preview.models.PipelineTriggerProperties
:keyword options: The list of all options configured for the pipeline.
:paramtype options: list[str or
~azure.mgmt.containerregistry.v2021_06_01_preview.models.PipelineOptions]
"""
super().__init__(**kwargs)
self.location = location
self.identity = identity
self.source = source
self.trigger = trigger
self.options = options
self.provisioning_state = None
class ImportPipelineListResult(_serialization.Model):
"""The result of a request to list import pipelines for a container registry.
:ivar value: The list of import pipelines. Since this list may be incomplete, the nextLink
field should be used to request the next list of import pipelines.
:vartype value: list[~azure.mgmt.containerregistry.v2021_06_01_preview.models.ImportPipeline]
:ivar next_link: The URI that can be used to request the next list of pipeline runs.
:vartype next_link: str
"""
_attribute_map = {
"value": {"key": "value", "type": "[ImportPipeline]"},
"next_link": {"key": "nextLink", "type": "str"},
}
def __init__(
self, *, value: Optional[List["_models.ImportPipeline"]] = None, next_link: Optional[str] = None, **kwargs: Any
) -> None:
"""
:keyword value: The list of import pipelines. Since this list may be incomplete, the nextLink
field should be used to request the next list of import pipelines.
:paramtype value: list[~azure.mgmt.containerregistry.v2021_06_01_preview.models.ImportPipeline]
:keyword next_link: The URI that can be used to request the next list of pipeline runs.
:paramtype next_link: str
"""
super().__init__(**kwargs)
self.value = value
self.next_link = next_link
class ImportPipelineSourceProperties(_serialization.Model):
"""The properties of the import pipeline source.
All required parameters must be populated in order to send to Azure.
:ivar type: The type of source for the import pipeline. "AzureStorageBlobContainer"
:vartype type: str or
~azure.mgmt.containerregistry.v2021_06_01_preview.models.PipelineSourceType
:ivar uri: The source uri of the import pipeline.
When 'AzureStorageBlob': "https://accountName.blob.core.windows.net/containerName/blobName"
When 'AzureStorageBlobContainer': "https://accountName.blob.core.windows.net/containerName".
:vartype uri: str
:ivar key_vault_uri: They key vault secret uri to obtain the source storage SAS token.
Required.
:vartype key_vault_uri: str
"""
_validation = {
"key_vault_uri": {"required": True},
}
_attribute_map = {
"type": {"key": "type", "type": "str"},
"uri": {"key": "uri", "type": "str"},
"key_vault_uri": {"key": "keyVaultUri", "type": "str"},
}
def __init__(
self,
*,
key_vault_uri: str,
type: Union[str, "_models.PipelineSourceType"] = "AzureStorageBlobContainer",
uri: Optional[str] = None,
**kwargs: Any
) -> None:
"""
:keyword type: The type of source for the import pipeline. "AzureStorageBlobContainer"
:paramtype type: str or
~azure.mgmt.containerregistry.v2021_06_01_preview.models.PipelineSourceType
:keyword uri: The source uri of the import pipeline.
When 'AzureStorageBlob': "https://accountName.blob.core.windows.net/containerName/blobName"
When 'AzureStorageBlobContainer': "https://accountName.blob.core.windows.net/containerName".
:paramtype uri: str
:keyword key_vault_uri: They key vault secret uri to obtain the source storage SAS token.
Required.
:paramtype key_vault_uri: str
"""
super().__init__(**kwargs)
self.type = type
self.uri = uri
self.key_vault_uri = key_vault_uri
class ImportSource(_serialization.Model):
"""ImportSource.
All required parameters must be populated in order to send to Azure.
:ivar resource_id: The resource identifier of the source Azure Container Registry.
:vartype resource_id: str
:ivar registry_uri: The address of the source registry (e.g. 'mcr.microsoft.com').
:vartype registry_uri: str
:ivar credentials: Credentials used when importing from a registry uri.
:vartype credentials:
~azure.mgmt.containerregistry.v2021_06_01_preview.models.ImportSourceCredentials
:ivar source_image: Repository name of the source image.
Specify an image by repository ('hello-world'). This will use the 'latest' tag.
Specify an image by tag ('hello-world:latest').
Specify an image by sha256-based manifest digest ('hello-world@sha256:abc123'). Required.
:vartype source_image: str
"""
_validation = {
"source_image": {"required": True},
}
_attribute_map = {
"resource_id": {"key": "resourceId", "type": "str"},
"registry_uri": {"key": "registryUri", "type": "str"},
"credentials": {"key": "credentials", "type": "ImportSourceCredentials"},
"source_image": {"key": "sourceImage", "type": "str"},
}
def __init__(
self,
*,
source_image: str,
resource_id: Optional[str] = None,
registry_uri: Optional[str] = None,
credentials: Optional["_models.ImportSourceCredentials"] = None,
**kwargs: Any
) -> None:
"""
:keyword resource_id: The resource identifier of the source Azure Container Registry.
:paramtype resource_id: str
:keyword registry_uri: The address of the source registry (e.g. 'mcr.microsoft.com').
:paramtype registry_uri: str
:keyword credentials: Credentials used when importing from a registry uri.
:paramtype credentials:
~azure.mgmt.containerregistry.v2021_06_01_preview.models.ImportSourceCredentials
:keyword source_image: Repository name of the source image.
Specify an image by repository ('hello-world'). This will use the 'latest' tag.
Specify an image by tag ('hello-world:latest').
Specify an image by sha256-based manifest digest ('hello-world@sha256:abc123'). Required.
:paramtype source_image: str
"""
super().__init__(**kwargs)
self.resource_id = resource_id
self.registry_uri = registry_uri
self.credentials = credentials
self.source_image = source_image
class ImportSourceCredentials(_serialization.Model):
"""ImportSourceCredentials.
All required parameters must be populated in order to send to Azure.
:ivar username: The username to authenticate with the source registry.
:vartype username: str
:ivar password: The password used to authenticate with the source registry. Required.
:vartype password: str
"""
_validation = {
"password": {"required": True},
}
_attribute_map = {
"username": {"key": "username", "type": "str"},
"password": {"key": "password", "type": "str"},
}
def __init__(self, *, password: str, username: Optional[str] = None, **kwargs: Any) -> None:
"""
:keyword username: The username to authenticate with the source registry.
:paramtype username: str
:keyword password: The password used to authenticate with the source registry. Required.
:paramtype password: str
"""
super().__init__(**kwargs)
self.username = username
self.password = password
class InnerErrorDescription(_serialization.Model):
"""inner error.
All required parameters must be populated in order to send to Azure.
:ivar code: error code. Required.
:vartype code: str
:ivar message: error message. Required.
:vartype message: str
:ivar target: target of the particular error.
:vartype target: str
"""
_validation = {
"code": {"required": True},
"message": {"required": True},
}
_attribute_map = {
"code": {"key": "code", "type": "str"},
"message": {"key": "message", "type": "str"},
"target": {"key": "target", "type": "str"},
}
def __init__(self, *, code: str, message: str, target: Optional[str] = None, **kwargs: Any) -> None:
"""
:keyword code: error code. Required.
:paramtype code: str
:keyword message: error message. Required.
:paramtype message: str
:keyword target: target of the particular error.
:paramtype target: str
"""
super().__init__(**kwargs)
self.code = code
self.message = message
self.target = target
class IPRule(_serialization.Model):
"""IP rule with specific IP or IP range in CIDR format.
All required parameters must be populated in order to send to Azure.
:ivar action: The action of IP ACL rule. "Allow"
:vartype action: str or ~azure.mgmt.containerregistry.v2021_06_01_preview.models.Action
:ivar ip_address_or_range: Specifies the IP or IP range in CIDR format. Only IPV4 address is
allowed. Required.
:vartype ip_address_or_range: str
"""
_validation = {
"ip_address_or_range": {"required": True},
}
_attribute_map = {
"action": {"key": "action", "type": "str"},
"ip_address_or_range": {"key": "value", "type": "str"},
}
def __init__(
self, *, ip_address_or_range: str, action: Optional[Union[str, "_models.Action"]] = None, **kwargs: Any
) -> None:
"""
:keyword action: The action of IP ACL rule. "Allow"
:paramtype action: str or ~azure.mgmt.containerregistry.v2021_06_01_preview.models.Action
:keyword ip_address_or_range: Specifies the IP or IP range in CIDR format. Only IPV4 address is
allowed. Required.
:paramtype ip_address_or_range: str
"""
super().__init__(**kwargs)
self.action = action
self.ip_address_or_range = ip_address_or_range
class KeyVaultProperties(_serialization.Model):
"""KeyVaultProperties.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar key_identifier: Key vault uri to access the encryption key.
:vartype key_identifier: str
:ivar versioned_key_identifier: The fully qualified key identifier that includes the version of
the key that is actually used for encryption.
:vartype versioned_key_identifier: str
:ivar identity: The client id of the identity which will be used to access key vault.
:vartype identity: str
:ivar key_rotation_enabled: Auto key rotation status for a CMK enabled registry.
:vartype key_rotation_enabled: bool
:ivar last_key_rotation_timestamp: Timestamp of the last successful key rotation.
:vartype last_key_rotation_timestamp: ~datetime.datetime
"""
_validation = {
"versioned_key_identifier": {"readonly": True},
"key_rotation_enabled": {"readonly": True},
"last_key_rotation_timestamp": {"readonly": True},
}
_attribute_map = {
"key_identifier": {"key": "keyIdentifier", "type": "str"},
"versioned_key_identifier": {"key": "versionedKeyIdentifier", "type": "str"},
"identity": {"key": "identity", "type": "str"},
"key_rotation_enabled": {"key": "keyRotationEnabled", "type": "bool"},
"last_key_rotation_timestamp": {"key": "lastKeyRotationTimestamp", "type": "iso-8601"},
}
def __init__(self, *, key_identifier: Optional[str] = None, identity: Optional[str] = None, **kwargs: Any) -> None:
"""
:keyword key_identifier: Key vault uri to access the encryption key.
:paramtype key_identifier: str
:keyword identity: The client id of the identity which will be used to access key vault.
:paramtype identity: str
"""
super().__init__(**kwargs)
self.key_identifier = key_identifier
self.versioned_key_identifier = None
self.identity = identity
self.key_rotation_enabled = None
self.last_key_rotation_timestamp = None
class LoggingProperties(_serialization.Model):
"""The logging properties of the connected registry.
:ivar log_level: The verbosity of logs persisted on the connected registry. Known values are:
"Debug", "Information", "Warning", "Error", and "None".
:vartype log_level: str or ~azure.mgmt.containerregistry.v2021_06_01_preview.models.LogLevel
:ivar audit_log_status: Indicates whether audit logs are enabled on the connected registry.
Known values are: "Enabled" and "Disabled".
:vartype audit_log_status: str or
~azure.mgmt.containerregistry.v2021_06_01_preview.models.AuditLogStatus
"""
_attribute_map = {
"log_level": {"key": "logLevel", "type": "str"},
"audit_log_status": {"key": "auditLogStatus", "type": "str"},
}
def __init__(
self,
*,
log_level: Union[str, "_models.LogLevel"] = "Information",
audit_log_status: Union[str, "_models.AuditLogStatus"] = "Disabled",
**kwargs: Any
) -> None:
"""
:keyword log_level: The verbosity of logs persisted on the connected registry. Known values
are: "Debug", "Information", "Warning", "Error", and "None".
:paramtype log_level: str or ~azure.mgmt.containerregistry.v2021_06_01_preview.models.LogLevel
:keyword audit_log_status: Indicates whether audit logs are enabled on the connected registry.
Known values are: "Enabled" and "Disabled".
:paramtype audit_log_status: str or
~azure.mgmt.containerregistry.v2021_06_01_preview.models.AuditLogStatus
"""
super().__init__(**kwargs)
self.log_level = log_level
self.audit_log_status = audit_log_status
class LoginServerProperties(_serialization.Model):
"""The login server properties of the connected registry.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar host: The host of the connected registry. Can be FQDN or IP.
:vartype host: str
:ivar tls: The TLS properties of the connected registry login server.
:vartype tls: ~azure.mgmt.containerregistry.v2021_06_01_preview.models.TlsProperties
"""
_validation = {
"host": {"readonly": True},
"tls": {"readonly": True},
}
_attribute_map = {
"host": {"key": "host", "type": "str"},
"tls": {"key": "tls", "type": "TlsProperties"},
}
def __init__(self, **kwargs: Any) -> None:
""" """
super().__init__(**kwargs)
self.host = None
self.tls = None
class NetworkRuleSet(_serialization.Model):
"""The network rule set for a container registry.
All required parameters must be populated in order to send to Azure.
:ivar default_action: The default action of allow or deny when no other rules match. Known
values are: "Allow" and "Deny".
:vartype default_action: str or
~azure.mgmt.containerregistry.v2021_06_01_preview.models.DefaultAction
:ivar virtual_network_rules: The virtual network rules.
:vartype virtual_network_rules:
list[~azure.mgmt.containerregistry.v2021_06_01_preview.models.VirtualNetworkRule]
:ivar ip_rules: The IP ACL rules.
:vartype ip_rules: list[~azure.mgmt.containerregistry.v2021_06_01_preview.models.IPRule]
"""
_validation = {
"default_action": {"required": True},
}
_attribute_map = {
"default_action": {"key": "defaultAction", "type": "str"},
"virtual_network_rules": {"key": "virtualNetworkRules", "type": "[VirtualNetworkRule]"},
"ip_rules": {"key": "ipRules", "type": "[IPRule]"},
}
def __init__(
self,
*,
default_action: Union[str, "_models.DefaultAction"] = "Allow",
virtual_network_rules: Optional[List["_models.VirtualNetworkRule"]] = None,
ip_rules: Optional[List["_models.IPRule"]] = None,
**kwargs: Any
) -> None:
"""
:keyword default_action: The default action of allow or deny when no other rules match. Known
values are: "Allow" and "Deny".
:paramtype default_action: str or
~azure.mgmt.containerregistry.v2021_06_01_preview.models.DefaultAction
:keyword virtual_network_rules: The virtual network rules.
:paramtype virtual_network_rules:
list[~azure.mgmt.containerregistry.v2021_06_01_preview.models.VirtualNetworkRule]
:keyword ip_rules: The IP ACL rules.
:paramtype ip_rules: list[~azure.mgmt.containerregistry.v2021_06_01_preview.models.IPRule]
"""
super().__init__(**kwargs)
self.default_action = default_action
self.virtual_network_rules = virtual_network_rules
self.ip_rules = ip_rules
class OperationDefinition(_serialization.Model):
"""The definition of a container registry operation.
:ivar origin: The origin information of the container registry operation.
:vartype origin: str
:ivar name: Operation name: {provider}/{resource}/{operation}.
:vartype name: str
:ivar display: The display information for the container registry operation.
:vartype display:
~azure.mgmt.containerregistry.v2021_06_01_preview.models.OperationDisplayDefinition
:ivar is_data_action: This property indicates if the operation is an action or a data action
ref:
https://docs.microsoft.com/en-us/azure/role-based-access-control/role-definitions#management-and-data-operations.
:vartype is_data_action: bool
:ivar service_specification: The definition of Azure Monitoring service.
:vartype service_specification:
~azure.mgmt.containerregistry.v2021_06_01_preview.models.OperationServiceSpecificationDefinition
"""
_attribute_map = {
"origin": {"key": "origin", "type": "str"},
"name": {"key": "name", "type": "str"},
"display": {"key": "display", "type": "OperationDisplayDefinition"},
"is_data_action": {"key": "isDataAction", "type": "bool"},
"service_specification": {
"key": "properties.serviceSpecification",
"type": "OperationServiceSpecificationDefinition",
},
}
def __init__(
self,
*,
origin: Optional[str] = None,
name: Optional[str] = None,
display: Optional["_models.OperationDisplayDefinition"] = None,
is_data_action: Optional[bool] = None,
service_specification: Optional["_models.OperationServiceSpecificationDefinition"] = None,
**kwargs: Any
) -> None:
"""
:keyword origin: The origin information of the container registry operation.
:paramtype origin: str
:keyword name: Operation name: {provider}/{resource}/{operation}.
:paramtype name: str
:keyword display: The display information for the container registry operation.
:paramtype display:
~azure.mgmt.containerregistry.v2021_06_01_preview.models.OperationDisplayDefinition
:keyword is_data_action: This property indicates if the operation is an action or a data action
ref:
https://docs.microsoft.com/en-us/azure/role-based-access-control/role-definitions#management-and-data-operations.
:paramtype is_data_action: bool
:keyword service_specification: The definition of Azure Monitoring service.
:paramtype service_specification:
~azure.mgmt.containerregistry.v2021_06_01_preview.models.OperationServiceSpecificationDefinition
"""
super().__init__(**kwargs)
self.origin = origin
self.name = name
self.display = display
self.is_data_action = is_data_action
self.service_specification = service_specification
class OperationDisplayDefinition(_serialization.Model):
"""The display information for a container registry operation.
:ivar provider: The resource provider name: Microsoft.ContainerRegistry.
:vartype provider: str
:ivar resource: The resource on which the operation is performed.
:vartype resource: str
:ivar operation: The operation that users can perform.
:vartype operation: str
:ivar description: The description for the operation.
:vartype description: str
"""
_attribute_map = {
"provider": {"key": "provider", "type": "str"},
"resource": {"key": "resource", "type": "str"},
"operation": {"key": "operation", "type": "str"},
"description": {"key": "description", "type": "str"},
}
def __init__(
self,
*,
provider: Optional[str] = None,
resource: Optional[str] = None,
operation: Optional[str] = None,
description: Optional[str] = None,
**kwargs: Any
) -> None:
"""
:keyword provider: The resource provider name: Microsoft.ContainerRegistry.
:paramtype provider: str
:keyword resource: The resource on which the operation is performed.
:paramtype resource: str
:keyword operation: The operation that users can perform.
:paramtype operation: str
:keyword description: The description for the operation.
:paramtype description: str
"""
super().__init__(**kwargs)
self.provider = provider
self.resource = resource
self.operation = operation
self.description = description
class OperationListResult(_serialization.Model):
"""The result of a request to list container registry operations.
:ivar value: The list of container registry operations. Since this list may be incomplete, the
nextLink field should be used to request the next list of operations.
:vartype value:
list[~azure.mgmt.containerregistry.v2021_06_01_preview.models.OperationDefinition]
:ivar next_link: The URI that can be used to request the next list of container registry
operations.
:vartype next_link: str
"""
_attribute_map = {
"value": {"key": "value", "type": "[OperationDefinition]"},
"next_link": {"key": "nextLink", "type": "str"},
}
def __init__(
self,
*,
value: Optional[List["_models.OperationDefinition"]] = None,
next_link: Optional[str] = None,
**kwargs: Any
) -> None:
"""
:keyword value: The list of container registry operations. Since this list may be incomplete,
the nextLink field should be used to request the next list of operations.
:paramtype value:
list[~azure.mgmt.containerregistry.v2021_06_01_preview.models.OperationDefinition]
:keyword next_link: The URI that can be used to request the next list of container registry
operations.
:paramtype next_link: str
"""
super().__init__(**kwargs)
self.value = value
self.next_link = next_link
class OperationLogSpecificationDefinition(_serialization.Model):
"""The definition of Azure Monitoring log.
:ivar name: Log name.
:vartype name: str
:ivar display_name: Log display name.
:vartype display_name: str
:ivar blob_duration: Log blob duration.
:vartype blob_duration: str
"""
_attribute_map = {
"name": {"key": "name", "type": "str"},
"display_name": {"key": "displayName", "type": "str"},
"blob_duration": {"key": "blobDuration", "type": "str"},
}
def __init__(
self,
*,
name: Optional[str] = None,
display_name: Optional[str] = None,
blob_duration: Optional[str] = None,
**kwargs: Any
) -> None:
"""
:keyword name: Log name.
:paramtype name: str
:keyword display_name: Log display name.
:paramtype display_name: str
:keyword blob_duration: Log blob duration.
:paramtype blob_duration: str
"""
super().__init__(**kwargs)
self.name = name
self.display_name = display_name
self.blob_duration = blob_duration
class OperationMetricSpecificationDefinition(_serialization.Model):
"""The definition of Azure Monitoring metric.
:ivar name: Metric name.
:vartype name: str
:ivar display_name: Metric display name.
:vartype display_name: str
:ivar display_description: Metric description.
:vartype display_description: str
:ivar unit: Metric unit.
:vartype unit: str
:ivar aggregation_type: Metric aggregation type.
:vartype aggregation_type: str
:ivar internal_metric_name: Internal metric name.
:vartype internal_metric_name: str
"""
_attribute_map = {
"name": {"key": "name", "type": "str"},
"display_name": {"key": "displayName", "type": "str"},
"display_description": {"key": "displayDescription", "type": "str"},
"unit": {"key": "unit", "type": "str"},
"aggregation_type": {"key": "aggregationType", "type": "str"},
"internal_metric_name": {"key": "internalMetricName", "type": "str"},
}
def __init__(
self,
*,
name: Optional[str] = None,
display_name: Optional[str] = None,
display_description: Optional[str] = None,
unit: Optional[str] = None,
aggregation_type: Optional[str] = None,
internal_metric_name: Optional[str] = None,
**kwargs: Any
) -> None:
"""
:keyword name: Metric name.
:paramtype name: str
:keyword display_name: Metric display name.
:paramtype display_name: str
:keyword display_description: Metric description.
:paramtype display_description: str
:keyword unit: Metric unit.
:paramtype unit: str
:keyword aggregation_type: Metric aggregation type.
:paramtype aggregation_type: str
:keyword internal_metric_name: Internal metric name.
:paramtype internal_metric_name: str
"""
super().__init__(**kwargs)
self.name = name
self.display_name = display_name
self.display_description = display_description
self.unit = unit
self.aggregation_type = aggregation_type
self.internal_metric_name = internal_metric_name
class OperationServiceSpecificationDefinition(_serialization.Model):
"""The definition of Azure Monitoring list.
:ivar metric_specifications: A list of Azure Monitoring metrics definition.
:vartype metric_specifications:
list[~azure.mgmt.containerregistry.v2021_06_01_preview.models.OperationMetricSpecificationDefinition]
:ivar log_specifications: A list of Azure Monitoring log definitions.
:vartype log_specifications:
list[~azure.mgmt.containerregistry.v2021_06_01_preview.models.OperationLogSpecificationDefinition]
"""
_attribute_map = {
"metric_specifications": {"key": "metricSpecifications", "type": "[OperationMetricSpecificationDefinition]"},
"log_specifications": {"key": "logSpecifications", "type": "[OperationLogSpecificationDefinition]"},
}
def __init__(
self,
*,
metric_specifications: Optional[List["_models.OperationMetricSpecificationDefinition"]] = None,
log_specifications: Optional[List["_models.OperationLogSpecificationDefinition"]] = None,
**kwargs: Any
) -> None:
"""
:keyword metric_specifications: A list of Azure Monitoring metrics definition.
:paramtype metric_specifications:
list[~azure.mgmt.containerregistry.v2021_06_01_preview.models.OperationMetricSpecificationDefinition]
:keyword log_specifications: A list of Azure Monitoring log definitions.
:paramtype log_specifications:
list[~azure.mgmt.containerregistry.v2021_06_01_preview.models.OperationLogSpecificationDefinition]
"""
super().__init__(**kwargs)
self.metric_specifications = metric_specifications
self.log_specifications = log_specifications
class ParentProperties(_serialization.Model):
"""The properties of the connected registry parent.
All required parameters must be populated in order to send to Azure.
:ivar id: The resource ID of the parent to which the connected registry will be associated.
:vartype id: str
:ivar sync_properties: The sync properties of the connected registry with its parent. Required.
:vartype sync_properties:
~azure.mgmt.containerregistry.v2021_06_01_preview.models.SyncProperties
"""
_validation = {
"sync_properties": {"required": True},
}
_attribute_map = {
"id": {"key": "id", "type": "str"},
"sync_properties": {"key": "syncProperties", "type": "SyncProperties"},
}
def __init__(
self,
*,
sync_properties: "_models.SyncProperties",
id: Optional[str] = None, # pylint: disable=redefined-builtin
**kwargs: Any
) -> None:
"""
:keyword id: The resource ID of the parent to which the connected registry will be associated.
:paramtype id: str
:keyword sync_properties: The sync properties of the connected registry with its parent.
Required.
:paramtype sync_properties:
~azure.mgmt.containerregistry.v2021_06_01_preview.models.SyncProperties
"""
super().__init__(**kwargs)
self.id = id
self.sync_properties = sync_properties
class PipelineRun(ProxyResource):
"""An object that represents a pipeline run for a container registry.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: The resource ID.
:vartype id: str
:ivar name: The name of the resource.
:vartype name: str
:ivar type: The type of the resource.
:vartype type: str
:ivar system_data: Metadata pertaining to creation and last modification of the resource.
:vartype system_data: ~azure.mgmt.containerregistry.v2021_06_01_preview.models.SystemData
:ivar provisioning_state: The provisioning state of a pipeline run. Known values are:
"Creating", "Updating", "Deleting", "Succeeded", "Failed", and "Canceled".
:vartype provisioning_state: str or
~azure.mgmt.containerregistry.v2021_06_01_preview.models.ProvisioningState
:ivar request: The request parameters for a pipeline run.
:vartype request: ~azure.mgmt.containerregistry.v2021_06_01_preview.models.PipelineRunRequest
:ivar response: The response of a pipeline run.
:vartype response: ~azure.mgmt.containerregistry.v2021_06_01_preview.models.PipelineRunResponse
:ivar force_update_tag: How the pipeline run should be forced to recreate even if the pipeline
run configuration has not changed.
:vartype force_update_tag: str
"""
_validation = {
"id": {"readonly": True},
"name": {"readonly": True},
"type": {"readonly": True},
"system_data": {"readonly": True},
"provisioning_state": {"readonly": True},
"response": {"readonly": True},
}
_attribute_map = {
"id": {"key": "id", "type": "str"},
"name": {"key": "name", "type": "str"},
"type": {"key": "type", "type": "str"},
"system_data": {"key": "systemData", "type": "SystemData"},
"provisioning_state": {"key": "properties.provisioningState", "type": "str"},
"request": {"key": "properties.request", "type": "PipelineRunRequest"},
"response": {"key": "properties.response", "type": "PipelineRunResponse"},
"force_update_tag": {"key": "properties.forceUpdateTag", "type": "str"},
}
def __init__(
self,
*,
request: Optional["_models.PipelineRunRequest"] = None,
force_update_tag: Optional[str] = None,
**kwargs: Any
) -> None:
"""
:keyword request: The request parameters for a pipeline run.
:paramtype request: ~azure.mgmt.containerregistry.v2021_06_01_preview.models.PipelineRunRequest
:keyword force_update_tag: How the pipeline run should be forced to recreate even if the
pipeline run configuration has not changed.
:paramtype force_update_tag: str
"""
super().__init__(**kwargs)
self.provisioning_state = None
self.request = request
self.response = None
self.force_update_tag = force_update_tag
class PipelineRunListResult(_serialization.Model):
"""The result of a request to list pipeline runs for a container registry.
:ivar value: The list of pipeline runs. Since this list may be incomplete, the nextLink field
should be used to request the next list of pipeline runs.
:vartype value: list[~azure.mgmt.containerregistry.v2021_06_01_preview.models.PipelineRun]
:ivar next_link: The URI that can be used to request the next list of pipeline runs.
:vartype next_link: str
"""
_attribute_map = {
"value": {"key": "value", "type": "[PipelineRun]"},
"next_link": {"key": "nextLink", "type": "str"},
}
def __init__(
self, *, value: Optional[List["_models.PipelineRun"]] = None, next_link: Optional[str] = None, **kwargs: Any
) -> None:
"""
:keyword value: The list of pipeline runs. Since this list may be incomplete, the nextLink
field should be used to request the next list of pipeline runs.
:paramtype value: list[~azure.mgmt.containerregistry.v2021_06_01_preview.models.PipelineRun]
:keyword next_link: The URI that can be used to request the next list of pipeline runs.
:paramtype next_link: str
"""
super().__init__(**kwargs)
self.value = value
self.next_link = next_link
class PipelineRunRequest(_serialization.Model):
"""The request properties provided for a pipeline run.
:ivar pipeline_resource_id: The resource ID of the pipeline to run.
:vartype pipeline_resource_id: str
:ivar artifacts: List of source artifacts to be transferred by the pipeline.
Specify an image by repository ('hello-world'). This will use the 'latest' tag.
Specify an image by tag ('hello-world:latest').
Specify an image by sha256-based manifest digest ('hello-world@sha256:abc123').
:vartype artifacts: list[str]
:ivar source: The source properties of the pipeline run.
:vartype source:
~azure.mgmt.containerregistry.v2021_06_01_preview.models.PipelineRunSourceProperties
:ivar target: The target properties of the pipeline run.
:vartype target:
~azure.mgmt.containerregistry.v2021_06_01_preview.models.PipelineRunTargetProperties
:ivar catalog_digest: The digest of the tar used to transfer the artifacts.
:vartype catalog_digest: str
"""
_attribute_map = {
"pipeline_resource_id": {"key": "pipelineResourceId", "type": "str"},
"artifacts": {"key": "artifacts", "type": "[str]"},
"source": {"key": "source", "type": "PipelineRunSourceProperties"},
"target": {"key": "target", "type": "PipelineRunTargetProperties"},
"catalog_digest": {"key": "catalogDigest", "type": "str"},
}
def __init__(
self,
*,
pipeline_resource_id: Optional[str] = None,
artifacts: Optional[List[str]] = None,
source: Optional["_models.PipelineRunSourceProperties"] = None,
target: Optional["_models.PipelineRunTargetProperties"] = None,
catalog_digest: Optional[str] = None,
**kwargs: Any
) -> None:
"""
:keyword pipeline_resource_id: The resource ID of the pipeline to run.
:paramtype pipeline_resource_id: str
:keyword artifacts: List of source artifacts to be transferred by the pipeline.
Specify an image by repository ('hello-world'). This will use the 'latest' tag.
Specify an image by tag ('hello-world:latest').
Specify an image by sha256-based manifest digest ('hello-world@sha256:abc123').
:paramtype artifacts: list[str]
:keyword source: The source properties of the pipeline run.
:paramtype source:
~azure.mgmt.containerregistry.v2021_06_01_preview.models.PipelineRunSourceProperties
:keyword target: The target properties of the pipeline run.
:paramtype target:
~azure.mgmt.containerregistry.v2021_06_01_preview.models.PipelineRunTargetProperties
:keyword catalog_digest: The digest of the tar used to transfer the artifacts.
:paramtype catalog_digest: str
"""
super().__init__(**kwargs)
self.pipeline_resource_id = pipeline_resource_id
self.artifacts = artifacts
self.source = source
self.target = target
self.catalog_digest = catalog_digest
class PipelineRunResponse(_serialization.Model):
"""The response properties returned for a pipeline run.
:ivar status: The current status of the pipeline run.
:vartype status: str
:ivar imported_artifacts: The artifacts imported in the pipeline run.
:vartype imported_artifacts: list[str]
:ivar progress: The current progress of the copy operation.
:vartype progress: ~azure.mgmt.containerregistry.v2021_06_01_preview.models.ProgressProperties
:ivar start_time: The time the pipeline run started.
:vartype start_time: ~datetime.datetime
:ivar finish_time: The time the pipeline run finished.
:vartype finish_time: ~datetime.datetime
:ivar source: The source of the pipeline run.
:vartype source:
~azure.mgmt.containerregistry.v2021_06_01_preview.models.ImportPipelineSourceProperties
:ivar target: The target of the pipeline run.
:vartype target:
~azure.mgmt.containerregistry.v2021_06_01_preview.models.ExportPipelineTargetProperties
:ivar catalog_digest: The digest of the tar used to transfer the artifacts.
:vartype catalog_digest: str
:ivar trigger: The trigger that caused the pipeline run.
:vartype trigger:
~azure.mgmt.containerregistry.v2021_06_01_preview.models.PipelineTriggerDescriptor
:ivar pipeline_run_error_message: The detailed error message for the pipeline run in the case
of failure.
:vartype pipeline_run_error_message: str
"""
_attribute_map = {
"status": {"key": "status", "type": "str"},
"imported_artifacts": {"key": "importedArtifacts", "type": "[str]"},
"progress": {"key": "progress", "type": "ProgressProperties"},
"start_time": {"key": "startTime", "type": "iso-8601"},
"finish_time": {"key": "finishTime", "type": "iso-8601"},
"source": {"key": "source", "type": "ImportPipelineSourceProperties"},
"target": {"key": "target", "type": "ExportPipelineTargetProperties"},
"catalog_digest": {"key": "catalogDigest", "type": "str"},
"trigger": {"key": "trigger", "type": "PipelineTriggerDescriptor"},
"pipeline_run_error_message": {"key": "pipelineRunErrorMessage", "type": "str"},
}
def __init__(
self,
*,
status: Optional[str] = None,
imported_artifacts: Optional[List[str]] = None,
progress: Optional["_models.ProgressProperties"] = None,
start_time: Optional[datetime.datetime] = None,
finish_time: Optional[datetime.datetime] = None,
source: Optional["_models.ImportPipelineSourceProperties"] = None,
target: Optional["_models.ExportPipelineTargetProperties"] = None,
catalog_digest: Optional[str] = None,
trigger: Optional["_models.PipelineTriggerDescriptor"] = None,
pipeline_run_error_message: Optional[str] = None,
**kwargs: Any
) -> None:
"""
:keyword status: The current status of the pipeline run.
:paramtype status: str
:keyword imported_artifacts: The artifacts imported in the pipeline run.
:paramtype imported_artifacts: list[str]
:keyword progress: The current progress of the copy operation.
:paramtype progress:
~azure.mgmt.containerregistry.v2021_06_01_preview.models.ProgressProperties
:keyword start_time: The time the pipeline run started.
:paramtype start_time: ~datetime.datetime
:keyword finish_time: The time the pipeline run finished.
:paramtype finish_time: ~datetime.datetime
:keyword source: The source of the pipeline run.
:paramtype source:
~azure.mgmt.containerregistry.v2021_06_01_preview.models.ImportPipelineSourceProperties
:keyword target: The target of the pipeline run.
:paramtype target:
~azure.mgmt.containerregistry.v2021_06_01_preview.models.ExportPipelineTargetProperties
:keyword catalog_digest: The digest of the tar used to transfer the artifacts.
:paramtype catalog_digest: str
:keyword trigger: The trigger that caused the pipeline run.
:paramtype trigger:
~azure.mgmt.containerregistry.v2021_06_01_preview.models.PipelineTriggerDescriptor
:keyword pipeline_run_error_message: The detailed error message for the pipeline run in the
case of failure.
:paramtype pipeline_run_error_message: str
"""
super().__init__(**kwargs)
self.status = status
self.imported_artifacts = imported_artifacts
self.progress = progress
self.start_time = start_time
self.finish_time = finish_time
self.source = source
self.target = target
self.catalog_digest = catalog_digest
self.trigger = trigger
self.pipeline_run_error_message = pipeline_run_error_message
class PipelineRunSourceProperties(_serialization.Model):
"""PipelineRunSourceProperties.
:ivar type: The type of the source. "AzureStorageBlob"
:vartype type: str or
~azure.mgmt.containerregistry.v2021_06_01_preview.models.PipelineRunSourceType
:ivar name: The name of the source.
:vartype name: str
"""
_attribute_map = {
"type": {"key": "type", "type": "str"},
"name": {"key": "name", "type": "str"},
}
def __init__(
self,
*,
type: Union[str, "_models.PipelineRunSourceType"] = "AzureStorageBlob",
name: Optional[str] = None,
**kwargs: Any
) -> None:
"""
:keyword type: The type of the source. "AzureStorageBlob"
:paramtype type: str or
~azure.mgmt.containerregistry.v2021_06_01_preview.models.PipelineRunSourceType
:keyword name: The name of the source.
:paramtype name: str
"""
super().__init__(**kwargs)
self.type = type
self.name = name
class PipelineRunTargetProperties(_serialization.Model):
"""PipelineRunTargetProperties.
:ivar type: The type of the target. "AzureStorageBlob"
:vartype type: str or
~azure.mgmt.containerregistry.v2021_06_01_preview.models.PipelineRunTargetType
:ivar name: The name of the target.
:vartype name: str
"""
_attribute_map = {
"type": {"key": "type", "type": "str"},
"name": {"key": "name", "type": "str"},
}
def __init__(
self,
*,
type: Union[str, "_models.PipelineRunTargetType"] = "AzureStorageBlob",
name: Optional[str] = None,
**kwargs: Any
) -> None:
"""
:keyword type: The type of the target. "AzureStorageBlob"
:paramtype type: str or
~azure.mgmt.containerregistry.v2021_06_01_preview.models.PipelineRunTargetType
:keyword name: The name of the target.
:paramtype name: str
"""
super().__init__(**kwargs)
self.type = type
self.name = name
class PipelineSourceTriggerDescriptor(_serialization.Model):
"""PipelineSourceTriggerDescriptor.
:ivar timestamp: The timestamp when the source update happened.
:vartype timestamp: ~datetime.datetime
"""
_attribute_map = {
"timestamp": {"key": "timestamp", "type": "iso-8601"},
}
def __init__(self, *, timestamp: Optional[datetime.datetime] = None, **kwargs: Any) -> None:
"""
:keyword timestamp: The timestamp when the source update happened.
:paramtype timestamp: ~datetime.datetime
"""
super().__init__(**kwargs)
self.timestamp = timestamp
class PipelineSourceTriggerProperties(_serialization.Model):
"""PipelineSourceTriggerProperties.
All required parameters must be populated in order to send to Azure.
:ivar status: The current status of the source trigger. Known values are: "Enabled" and
"Disabled".
:vartype status: str or ~azure.mgmt.containerregistry.v2021_06_01_preview.models.TriggerStatus
"""
_validation = {
"status": {"required": True},
}
_attribute_map = {
"status": {"key": "status", "type": "str"},
}
def __init__(self, *, status: Union[str, "_models.TriggerStatus"] = "Enabled", **kwargs: Any) -> None:
"""
:keyword status: The current status of the source trigger. Known values are: "Enabled" and
"Disabled".
:paramtype status: str or
~azure.mgmt.containerregistry.v2021_06_01_preview.models.TriggerStatus
"""
super().__init__(**kwargs)
self.status = status
class PipelineTriggerDescriptor(_serialization.Model):
"""PipelineTriggerDescriptor.
:ivar source_trigger: The source trigger that caused the pipeline run.
:vartype source_trigger:
~azure.mgmt.containerregistry.v2021_06_01_preview.models.PipelineSourceTriggerDescriptor
"""
_attribute_map = {
"source_trigger": {"key": "sourceTrigger", "type": "PipelineSourceTriggerDescriptor"},
}
def __init__(
self, *, source_trigger: Optional["_models.PipelineSourceTriggerDescriptor"] = None, **kwargs: Any
) -> None:
"""
:keyword source_trigger: The source trigger that caused the pipeline run.
:paramtype source_trigger:
~azure.mgmt.containerregistry.v2021_06_01_preview.models.PipelineSourceTriggerDescriptor
"""
super().__init__(**kwargs)
self.source_trigger = source_trigger
class PipelineTriggerProperties(_serialization.Model):
"""PipelineTriggerProperties.
:ivar source_trigger: The source trigger properties of the pipeline.
:vartype source_trigger:
~azure.mgmt.containerregistry.v2021_06_01_preview.models.PipelineSourceTriggerProperties
"""
_attribute_map = {
"source_trigger": {"key": "sourceTrigger", "type": "PipelineSourceTriggerProperties"},
}
def __init__(
self, *, source_trigger: Optional["_models.PipelineSourceTriggerProperties"] = None, **kwargs: Any
) -> None:
"""
:keyword source_trigger: The source trigger properties of the pipeline.
:paramtype source_trigger:
~azure.mgmt.containerregistry.v2021_06_01_preview.models.PipelineSourceTriggerProperties
"""
super().__init__(**kwargs)
self.source_trigger = source_trigger
class Policies(_serialization.Model):
"""The policies for a container registry.
:ivar quarantine_policy: The quarantine policy for a container registry.
:vartype quarantine_policy:
~azure.mgmt.containerregistry.v2021_06_01_preview.models.QuarantinePolicy
:ivar trust_policy: The content trust policy for a container registry.
:vartype trust_policy: ~azure.mgmt.containerregistry.v2021_06_01_preview.models.TrustPolicy
:ivar retention_policy: The retention policy for a container registry.
:vartype retention_policy:
~azure.mgmt.containerregistry.v2021_06_01_preview.models.RetentionPolicy
:ivar export_policy: The export policy for a container registry.
:vartype export_policy: ~azure.mgmt.containerregistry.v2021_06_01_preview.models.ExportPolicy
"""
_attribute_map = {
"quarantine_policy": {"key": "quarantinePolicy", "type": "QuarantinePolicy"},
"trust_policy": {"key": "trustPolicy", "type": "TrustPolicy"},
"retention_policy": {"key": "retentionPolicy", "type": "RetentionPolicy"},
"export_policy": {"key": "exportPolicy", "type": "ExportPolicy"},
}
def __init__(
self,
*,
quarantine_policy: Optional["_models.QuarantinePolicy"] = None,
trust_policy: Optional["_models.TrustPolicy"] = None,
retention_policy: Optional["_models.RetentionPolicy"] = None,
export_policy: Optional["_models.ExportPolicy"] = None,
**kwargs: Any
) -> None:
"""
:keyword quarantine_policy: The quarantine policy for a container registry.
:paramtype quarantine_policy:
~azure.mgmt.containerregistry.v2021_06_01_preview.models.QuarantinePolicy
:keyword trust_policy: The content trust policy for a container registry.
:paramtype trust_policy: ~azure.mgmt.containerregistry.v2021_06_01_preview.models.TrustPolicy
:keyword retention_policy: The retention policy for a container registry.
:paramtype retention_policy:
~azure.mgmt.containerregistry.v2021_06_01_preview.models.RetentionPolicy
:keyword export_policy: The export policy for a container registry.
:paramtype export_policy: ~azure.mgmt.containerregistry.v2021_06_01_preview.models.ExportPolicy
"""
super().__init__(**kwargs)
self.quarantine_policy = quarantine_policy
self.trust_policy = trust_policy
self.retention_policy = retention_policy
self.export_policy = export_policy
class PrivateEndpoint(_serialization.Model):
"""The Private Endpoint resource.
:ivar id: This is private endpoint resource created with Microsoft.Network resource provider.
:vartype id: str
"""
_attribute_map = {
"id": {"key": "id", "type": "str"},
}
def __init__(self, *, id: Optional[str] = None, **kwargs: Any) -> None: # pylint: disable=redefined-builtin
"""
:keyword id: This is private endpoint resource created with Microsoft.Network resource
provider.
:paramtype id: str
"""
super().__init__(**kwargs)
self.id = id
class PrivateEndpointConnection(ProxyResource):
"""An object that represents a private endpoint connection for a container registry.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: The resource ID.
:vartype id: str
:ivar name: The name of the resource.
:vartype name: str
:ivar type: The type of the resource.
:vartype type: str
:ivar system_data: Metadata pertaining to creation and last modification of the resource.
:vartype system_data: ~azure.mgmt.containerregistry.v2021_06_01_preview.models.SystemData
:ivar private_endpoint: The resource of private endpoint.
:vartype private_endpoint:
~azure.mgmt.containerregistry.v2021_06_01_preview.models.PrivateEndpoint
:ivar private_link_service_connection_state: A collection of information about the state of the
connection between service consumer and provider.
:vartype private_link_service_connection_state:
~azure.mgmt.containerregistry.v2021_06_01_preview.models.PrivateLinkServiceConnectionState
:ivar provisioning_state: The provisioning state of private endpoint connection resource. Known
values are: "Creating", "Updating", "Deleting", "Succeeded", "Failed", and "Canceled".
:vartype provisioning_state: str or
~azure.mgmt.containerregistry.v2021_06_01_preview.models.ProvisioningState
"""
_validation = {
"id": {"readonly": True},
"name": {"readonly": True},
"type": {"readonly": True},
"system_data": {"readonly": True},
"provisioning_state": {"readonly": True},
}
_attribute_map = {
"id": {"key": "id", "type": "str"},
"name": {"key": "name", "type": "str"},
"type": {"key": "type", "type": "str"},
"system_data": {"key": "systemData", "type": "SystemData"},
"private_endpoint": {"key": "properties.privateEndpoint", "type": "PrivateEndpoint"},
"private_link_service_connection_state": {
"key": "properties.privateLinkServiceConnectionState",
"type": "PrivateLinkServiceConnectionState",
},
"provisioning_state": {"key": "properties.provisioningState", "type": "str"},
}
def __init__(
self,
*,
private_endpoint: Optional["_models.PrivateEndpoint"] = None,
private_link_service_connection_state: Optional["_models.PrivateLinkServiceConnectionState"] = None,
**kwargs: Any
) -> None:
"""
:keyword private_endpoint: The resource of private endpoint.
:paramtype private_endpoint:
~azure.mgmt.containerregistry.v2021_06_01_preview.models.PrivateEndpoint
:keyword private_link_service_connection_state: A collection of information about the state of
the connection between service consumer and provider.
:paramtype private_link_service_connection_state:
~azure.mgmt.containerregistry.v2021_06_01_preview.models.PrivateLinkServiceConnectionState
"""
super().__init__(**kwargs)
self.private_endpoint = private_endpoint
self.private_link_service_connection_state = private_link_service_connection_state
self.provisioning_state = None
class PrivateEndpointConnectionListResult(_serialization.Model):
"""The result of a request to list private endpoint connections for a container registry.
:ivar value: The list of private endpoint connections. Since this list may be incomplete, the
nextLink field should be used to request the next list of private endpoint connections.
:vartype value:
list[~azure.mgmt.containerregistry.v2021_06_01_preview.models.PrivateEndpointConnection]
:ivar next_link: The URI that can be used to request the next list of private endpoint
connections.
:vartype next_link: str
"""
_attribute_map = {
"value": {"key": "value", "type": "[PrivateEndpointConnection]"},
"next_link": {"key": "nextLink", "type": "str"},
}
def __init__(
self,
*,
value: Optional[List["_models.PrivateEndpointConnection"]] = None,
next_link: Optional[str] = None,
**kwargs: Any
) -> None:
"""
:keyword value: The list of private endpoint connections. Since this list may be incomplete,
the nextLink field should be used to request the next list of private endpoint connections.
:paramtype value:
list[~azure.mgmt.containerregistry.v2021_06_01_preview.models.PrivateEndpointConnection]
:keyword next_link: The URI that can be used to request the next list of private endpoint
connections.
:paramtype next_link: str
"""
super().__init__(**kwargs)
self.value = value
self.next_link = next_link
class PrivateLinkResource(_serialization.Model):
"""A resource that supports private link capabilities.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar type: The resource type is private link resource.
:vartype type: str
:ivar id: The resource ID.
:vartype id: str
:ivar name: The name of the resource.
:vartype name: str
:ivar group_id: The private link resource group id.
:vartype group_id: str
:ivar required_members: The private link resource required member names.
:vartype required_members: list[str]
:ivar required_zone_names: The private link resource Private link DNS zone name.
:vartype required_zone_names: list[str]
"""
_validation = {
"type": {"readonly": True},
}
_attribute_map = {
"type": {"key": "type", "type": "str"},
"id": {"key": "id", "type": "str"},
"name": {"key": "name", "type": "str"},
"group_id": {"key": "properties.groupId", "type": "str"},
"required_members": {"key": "properties.requiredMembers", "type": "[str]"},
"required_zone_names": {"key": "properties.requiredZoneNames", "type": "[str]"},
}
def __init__(
self,
*,
id: Optional[str] = None, # pylint: disable=redefined-builtin
name: Optional[str] = None,
group_id: Optional[str] = None,
required_members: Optional[List[str]] = None,
required_zone_names: Optional[List[str]] = None,
**kwargs: Any
) -> None:
"""
:keyword id: The resource ID.
:paramtype id: str
:keyword name: The name of the resource.
:paramtype name: str
:keyword group_id: The private link resource group id.
:paramtype group_id: str
:keyword required_members: The private link resource required member names.
:paramtype required_members: list[str]
:keyword required_zone_names: The private link resource Private link DNS zone name.
:paramtype required_zone_names: list[str]
"""
super().__init__(**kwargs)
self.type = None
self.id = id
self.name = name
self.group_id = group_id
self.required_members = required_members
self.required_zone_names = required_zone_names
class PrivateLinkResourceListResult(_serialization.Model):
"""The result of a request to list private link resources for a container registry.
:ivar value: The list of private link resources. Since this list may be incomplete, the
nextLink field should be used to request the next list of private link resources.
:vartype value:
list[~azure.mgmt.containerregistry.v2021_06_01_preview.models.PrivateLinkResource]
:ivar next_link: The URI that can be used to request the next list of private link resources.
:vartype next_link: str
"""
_attribute_map = {
"value": {"key": "value", "type": "[PrivateLinkResource]"},
"next_link": {"key": "nextLink", "type": "str"},
}
def __init__(
self,
*,
value: Optional[List["_models.PrivateLinkResource"]] = None,
next_link: Optional[str] = None,
**kwargs: Any
) -> None:
"""
:keyword value: The list of private link resources. Since this list may be incomplete, the
nextLink field should be used to request the next list of private link resources.
:paramtype value:
list[~azure.mgmt.containerregistry.v2021_06_01_preview.models.PrivateLinkResource]
:keyword next_link: The URI that can be used to request the next list of private link
resources.
:paramtype next_link: str
"""
super().__init__(**kwargs)
self.value = value
self.next_link = next_link
class PrivateLinkServiceConnectionState(_serialization.Model):
"""The state of a private link service connection.
:ivar status: The private link service connection status. Known values are: "Approved",
"Pending", "Rejected", and "Disconnected".
:vartype status: str or
~azure.mgmt.containerregistry.v2021_06_01_preview.models.ConnectionStatus
:ivar description: The description for connection status. For example if connection is rejected
it can indicate reason for rejection.
:vartype description: str
:ivar actions_required: A message indicating if changes on the service provider require any
updates on the consumer. Known values are: "None" and "Recreate".
:vartype actions_required: str or
~azure.mgmt.containerregistry.v2021_06_01_preview.models.ActionsRequired
"""
_attribute_map = {
"status": {"key": "status", "type": "str"},
"description": {"key": "description", "type": "str"},
"actions_required": {"key": "actionsRequired", "type": "str"},
}
def __init__(
self,
*,
status: Optional[Union[str, "_models.ConnectionStatus"]] = None,
description: Optional[str] = None,
actions_required: Optional[Union[str, "_models.ActionsRequired"]] = None,
**kwargs: Any
) -> None:
"""
:keyword status: The private link service connection status. Known values are: "Approved",
"Pending", "Rejected", and "Disconnected".
:paramtype status: str or
~azure.mgmt.containerregistry.v2021_06_01_preview.models.ConnectionStatus
:keyword description: The description for connection status. For example if connection is
rejected it can indicate reason for rejection.
:paramtype description: str
:keyword actions_required: A message indicating if changes on the service provider require any
updates on the consumer. Known values are: "None" and "Recreate".
:paramtype actions_required: str or
~azure.mgmt.containerregistry.v2021_06_01_preview.models.ActionsRequired
"""
super().__init__(**kwargs)
self.status = status
self.description = description
self.actions_required = actions_required
class ProgressProperties(_serialization.Model):
"""ProgressProperties.
:ivar percentage: The percentage complete of the copy operation.
:vartype percentage: str
"""
_attribute_map = {
"percentage": {"key": "percentage", "type": "str"},
}
def __init__(self, *, percentage: Optional[str] = None, **kwargs: Any) -> None:
"""
:keyword percentage: The percentage complete of the copy operation.
:paramtype percentage: str
"""
super().__init__(**kwargs)
self.percentage = percentage
class QuarantinePolicy(_serialization.Model):
"""The quarantine policy for a container registry.
:ivar status: The value that indicates whether the policy is enabled or not. Known values are:
"enabled" and "disabled".
:vartype status: str or ~azure.mgmt.containerregistry.v2021_06_01_preview.models.PolicyStatus
"""
_attribute_map = {
"status": {"key": "status", "type": "str"},
}
def __init__(self, *, status: Optional[Union[str, "_models.PolicyStatus"]] = None, **kwargs: Any) -> None:
"""
:keyword status: The value that indicates whether the policy is enabled or not. Known values
are: "enabled" and "disabled".
:paramtype status: str or ~azure.mgmt.containerregistry.v2021_06_01_preview.models.PolicyStatus
"""
super().__init__(**kwargs)
self.status = status
class RegenerateCredentialParameters(_serialization.Model):
"""The parameters used to regenerate the login credential.
All required parameters must be populated in order to send to Azure.
:ivar name: Specifies name of the password which should be regenerated -- password or
password2. Required. Known values are: "password" and "password2".
:vartype name: str or ~azure.mgmt.containerregistry.v2021_06_01_preview.models.PasswordName
"""
_validation = {
"name": {"required": True},
}
_attribute_map = {
"name": {"key": "name", "type": "str"},
}
def __init__(self, *, name: Union[str, "_models.PasswordName"], **kwargs: Any) -> None:
"""
:keyword name: Specifies name of the password which should be regenerated -- password or
password2. Required. Known values are: "password" and "password2".
:paramtype name: str or ~azure.mgmt.containerregistry.v2021_06_01_preview.models.PasswordName
"""
super().__init__(**kwargs)
self.name = name
class Resource(_serialization.Model):
"""An Azure resource.
Variables are only populated by the server, and will be ignored when sending a request.
All required parameters must be populated in order to send to Azure.
:ivar id: The resource ID.
:vartype id: str
:ivar name: The name of the resource.
:vartype name: str
:ivar type: The type of the resource.
:vartype type: str
:ivar location: The location of the resource. This cannot be changed after the resource is
created. Required.
:vartype location: str
:ivar tags: The tags of the resource.
:vartype tags: dict[str, str]
:ivar system_data: Metadata pertaining to creation and last modification of the resource.
:vartype system_data: ~azure.mgmt.containerregistry.v2021_06_01_preview.models.SystemData
"""
_validation = {
"id": {"readonly": True},
"name": {"readonly": True},
"type": {"readonly": True},
"location": {"required": True},
"system_data": {"readonly": True},
}
_attribute_map = {
"id": {"key": "id", "type": "str"},
"name": {"key": "name", "type": "str"},
"type": {"key": "type", "type": "str"},
"location": {"key": "location", "type": "str"},
"tags": {"key": "tags", "type": "{str}"},
"system_data": {"key": "systemData", "type": "SystemData"},
}
def __init__(self, *, location: str, tags: Optional[Dict[str, str]] = None, **kwargs: Any) -> None:
"""
:keyword location: The location of the resource. This cannot be changed after the resource is
created. Required.
:paramtype location: str
:keyword tags: The tags of the resource.
:paramtype tags: dict[str, str]
"""
super().__init__(**kwargs)
self.id = None
self.name = None
self.type = None
self.location = location
self.tags = tags
self.system_data = None
class Registry(Resource): # pylint: disable=too-many-instance-attributes
"""An object that represents a container registry.
Variables are only populated by the server, and will be ignored when sending a request.
All required parameters must be populated in order to send to Azure.
:ivar id: The resource ID.
:vartype id: str
:ivar name: The name of the resource.
:vartype name: str
:ivar type: The type of the resource.
:vartype type: str
:ivar location: The location of the resource. This cannot be changed after the resource is
created. Required.
:vartype location: str
:ivar tags: The tags of the resource.
:vartype tags: dict[str, str]
:ivar system_data: Metadata pertaining to creation and last modification of the resource.
:vartype system_data: ~azure.mgmt.containerregistry.v2021_06_01_preview.models.SystemData
:ivar sku: The SKU of the container registry. Required.
:vartype sku: ~azure.mgmt.containerregistry.v2021_06_01_preview.models.Sku
:ivar identity: The identity of the container registry.
:vartype identity: ~azure.mgmt.containerregistry.v2021_06_01_preview.models.IdentityProperties
:ivar login_server: The URL that can be used to log into the container registry.
:vartype login_server: str
:ivar creation_date: The creation date of the container registry in ISO8601 format.
:vartype creation_date: ~datetime.datetime
:ivar provisioning_state: The provisioning state of the container registry at the time the
operation was called. Known values are: "Creating", "Updating", "Deleting", "Succeeded",
"Failed", and "Canceled".
:vartype provisioning_state: str or
~azure.mgmt.containerregistry.v2021_06_01_preview.models.ProvisioningState
:ivar status: The status of the container registry at the time the operation was called.
:vartype status: ~azure.mgmt.containerregistry.v2021_06_01_preview.models.Status
:ivar admin_user_enabled: The value that indicates whether the admin user is enabled.
:vartype admin_user_enabled: bool
:ivar network_rule_set: The network rule set for a container registry.
:vartype network_rule_set:
~azure.mgmt.containerregistry.v2021_06_01_preview.models.NetworkRuleSet
:ivar policies: The policies for a container registry.
:vartype policies: ~azure.mgmt.containerregistry.v2021_06_01_preview.models.Policies
:ivar encryption: The encryption settings of container registry.
:vartype encryption:
~azure.mgmt.containerregistry.v2021_06_01_preview.models.EncryptionProperty
:ivar data_endpoint_enabled: Enable a single data endpoint per region for serving data.
:vartype data_endpoint_enabled: bool
:ivar data_endpoint_host_names: List of host names that will serve data when
dataEndpointEnabled is true.
:vartype data_endpoint_host_names: list[str]
:ivar private_endpoint_connections: List of private endpoint connections for a container
registry.
:vartype private_endpoint_connections:
list[~azure.mgmt.containerregistry.v2021_06_01_preview.models.PrivateEndpointConnection]
:ivar public_network_access: Whether or not public network access is allowed for the container
registry. Known values are: "Enabled" and "Disabled".
:vartype public_network_access: str or
~azure.mgmt.containerregistry.v2021_06_01_preview.models.PublicNetworkAccess
:ivar network_rule_bypass_options: Whether to allow trusted Azure services to access a network
restricted registry. Known values are: "AzureServices" and "None".
:vartype network_rule_bypass_options: str or
~azure.mgmt.containerregistry.v2021_06_01_preview.models.NetworkRuleBypassOptions
:ivar zone_redundancy: Whether or not zone redundancy is enabled for this container registry.
Known values are: "Enabled" and "Disabled".
:vartype zone_redundancy: str or
~azure.mgmt.containerregistry.v2021_06_01_preview.models.ZoneRedundancy
:ivar anonymous_pull_enabled: Enables registry-wide pull from unauthenticated clients.
:vartype anonymous_pull_enabled: bool
"""
_validation = {
"id": {"readonly": True},
"name": {"readonly": True},
"type": {"readonly": True},
"location": {"required": True},
"system_data": {"readonly": True},
"sku": {"required": True},
"login_server": {"readonly": True},
"creation_date": {"readonly": True},
"provisioning_state": {"readonly": True},
"status": {"readonly": True},
"data_endpoint_host_names": {"readonly": True},
"private_endpoint_connections": {"readonly": True},
}
_attribute_map = {
"id": {"key": "id", "type": "str"},
"name": {"key": "name", "type": "str"},
"type": {"key": "type", "type": "str"},
"location": {"key": "location", "type": "str"},
"tags": {"key": "tags", "type": "{str}"},
"system_data": {"key": "systemData", "type": "SystemData"},
"sku": {"key": "sku", "type": "Sku"},
"identity": {"key": "identity", "type": "IdentityProperties"},
"login_server": {"key": "properties.loginServer", "type": "str"},
"creation_date": {"key": "properties.creationDate", "type": "iso-8601"},
"provisioning_state": {"key": "properties.provisioningState", "type": "str"},
"status": {"key": "properties.status", "type": "Status"},
"admin_user_enabled": {"key": "properties.adminUserEnabled", "type": "bool"},
"network_rule_set": {"key": "properties.networkRuleSet", "type": "NetworkRuleSet"},
"policies": {"key": "properties.policies", "type": "Policies"},
"encryption": {"key": "properties.encryption", "type": "EncryptionProperty"},
"data_endpoint_enabled": {"key": "properties.dataEndpointEnabled", "type": "bool"},
"data_endpoint_host_names": {"key": "properties.dataEndpointHostNames", "type": "[str]"},
"private_endpoint_connections": {
"key": "properties.privateEndpointConnections",
"type": "[PrivateEndpointConnection]",
},
"public_network_access": {"key": "properties.publicNetworkAccess", "type": "str"},
"network_rule_bypass_options": {"key": "properties.networkRuleBypassOptions", "type": "str"},
"zone_redundancy": {"key": "properties.zoneRedundancy", "type": "str"},
"anonymous_pull_enabled": {"key": "properties.anonymousPullEnabled", "type": "bool"},
}
def __init__(
self,
*,
location: str,
sku: "_models.Sku",
tags: Optional[Dict[str, str]] = None,
identity: Optional["_models.IdentityProperties"] = None,
admin_user_enabled: bool = False,
network_rule_set: Optional["_models.NetworkRuleSet"] = None,
policies: Optional["_models.Policies"] = None,
encryption: Optional["_models.EncryptionProperty"] = None,
data_endpoint_enabled: Optional[bool] = None,
public_network_access: Optional[Union[str, "_models.PublicNetworkAccess"]] = None,
network_rule_bypass_options: Optional[Union[str, "_models.NetworkRuleBypassOptions"]] = None,
zone_redundancy: Optional[Union[str, "_models.ZoneRedundancy"]] = None,
anonymous_pull_enabled: bool = False,
**kwargs: Any
) -> None:
"""
:keyword location: The location of the resource. This cannot be changed after the resource is
created. Required.
:paramtype location: str
:keyword tags: The tags of the resource.
:paramtype tags: dict[str, str]
:keyword sku: The SKU of the container registry. Required.
:paramtype sku: ~azure.mgmt.containerregistry.v2021_06_01_preview.models.Sku
:keyword identity: The identity of the container registry.
:paramtype identity:
~azure.mgmt.containerregistry.v2021_06_01_preview.models.IdentityProperties
:keyword admin_user_enabled: The value that indicates whether the admin user is enabled.
:paramtype admin_user_enabled: bool
:keyword network_rule_set: The network rule set for a container registry.
:paramtype network_rule_set:
~azure.mgmt.containerregistry.v2021_06_01_preview.models.NetworkRuleSet
:keyword policies: The policies for a container registry.
:paramtype policies: ~azure.mgmt.containerregistry.v2021_06_01_preview.models.Policies
:keyword encryption: The encryption settings of container registry.
:paramtype encryption:
~azure.mgmt.containerregistry.v2021_06_01_preview.models.EncryptionProperty
:keyword data_endpoint_enabled: Enable a single data endpoint per region for serving data.
:paramtype data_endpoint_enabled: bool
:keyword public_network_access: Whether or not public network access is allowed for the
container registry. Known values are: "Enabled" and "Disabled".
:paramtype public_network_access: str or
~azure.mgmt.containerregistry.v2021_06_01_preview.models.PublicNetworkAccess
:keyword network_rule_bypass_options: Whether to allow trusted Azure services to access a
network restricted registry. Known values are: "AzureServices" and "None".
:paramtype network_rule_bypass_options: str or
~azure.mgmt.containerregistry.v2021_06_01_preview.models.NetworkRuleBypassOptions
:keyword zone_redundancy: Whether or not zone redundancy is enabled for this container
registry. Known values are: "Enabled" and "Disabled".
:paramtype zone_redundancy: str or
~azure.mgmt.containerregistry.v2021_06_01_preview.models.ZoneRedundancy
:keyword anonymous_pull_enabled: Enables registry-wide pull from unauthenticated clients.
:paramtype anonymous_pull_enabled: bool
"""
super().__init__(location=location, tags=tags, **kwargs)
self.sku = sku
self.identity = identity
self.login_server = None
self.creation_date = None
self.provisioning_state = None
self.status = None
self.admin_user_enabled = admin_user_enabled
self.network_rule_set = network_rule_set
self.policies = policies
self.encryption = encryption
self.data_endpoint_enabled = data_endpoint_enabled
self.data_endpoint_host_names = None
self.private_endpoint_connections = None
self.public_network_access = public_network_access
self.network_rule_bypass_options = network_rule_bypass_options
self.zone_redundancy = zone_redundancy
self.anonymous_pull_enabled = anonymous_pull_enabled
class RegistryListCredentialsResult(_serialization.Model):
"""The response from the ListCredentials operation.
:ivar username: The username for a container registry.
:vartype username: str
:ivar passwords: The list of passwords for a container registry.
:vartype passwords:
list[~azure.mgmt.containerregistry.v2021_06_01_preview.models.RegistryPassword]
"""
_attribute_map = {
"username": {"key": "username", "type": "str"},
"passwords": {"key": "passwords", "type": "[RegistryPassword]"},
}
def __init__(
self,
*,
username: Optional[str] = None,
passwords: Optional[List["_models.RegistryPassword"]] = None,
**kwargs: Any
) -> None:
"""
:keyword username: The username for a container registry.
:paramtype username: str
:keyword passwords: The list of passwords for a container registry.
:paramtype passwords:
list[~azure.mgmt.containerregistry.v2021_06_01_preview.models.RegistryPassword]
"""
super().__init__(**kwargs)
self.username = username
self.passwords = passwords
class RegistryListResult(_serialization.Model):
"""The result of a request to list container registries.
:ivar value: The list of container registries. Since this list may be incomplete, the nextLink
field should be used to request the next list of container registries.
:vartype value: list[~azure.mgmt.containerregistry.v2021_06_01_preview.models.Registry]
:ivar next_link: The URI that can be used to request the next list of container registries.
:vartype next_link: str
"""
_attribute_map = {
"value": {"key": "value", "type": "[Registry]"},
"next_link": {"key": "nextLink", "type": "str"},
}
def __init__(
self, *, value: Optional[List["_models.Registry"]] = None, next_link: Optional[str] = None, **kwargs: Any
) -> None:
"""
:keyword value: The list of container registries. Since this list may be incomplete, the
nextLink field should be used to request the next list of container registries.
:paramtype value: list[~azure.mgmt.containerregistry.v2021_06_01_preview.models.Registry]
:keyword next_link: The URI that can be used to request the next list of container registries.
:paramtype next_link: str
"""
super().__init__(**kwargs)
self.value = value
self.next_link = next_link
class RegistryNameCheckRequest(_serialization.Model):
"""A request to check whether a container registry name is available.
Variables are only populated by the server, and will be ignored when sending a request.
All required parameters must be populated in order to send to Azure.
:ivar name: The name of the container registry. Required.
:vartype name: str
:ivar type: The resource type of the container registry. This field must be set to
'Microsoft.ContainerRegistry/registries'. Required. Default value is
"Microsoft.ContainerRegistry/registries".
:vartype type: str
"""
_validation = {
"name": {"required": True, "max_length": 50, "min_length": 5, "pattern": r"^[a-zA-Z0-9]*$"},
"type": {"required": True, "constant": True},
}
_attribute_map = {
"name": {"key": "name", "type": "str"},
"type": {"key": "type", "type": "str"},
}
type = "Microsoft.ContainerRegistry/registries"
def __init__(self, *, name: str, **kwargs: Any) -> None:
"""
:keyword name: The name of the container registry. Required.
:paramtype name: str
"""
super().__init__(**kwargs)
self.name = name
class RegistryNameStatus(_serialization.Model):
"""The result of a request to check the availability of a container registry name.
:ivar name_available: The value that indicates whether the name is available.
:vartype name_available: bool
:ivar reason: If any, the reason that the name is not available.
:vartype reason: str
:ivar message: If any, the error message that provides more detail for the reason that the name
is not available.
:vartype message: str
"""
_attribute_map = {
"name_available": {"key": "nameAvailable", "type": "bool"},
"reason": {"key": "reason", "type": "str"},
"message": {"key": "message", "type": "str"},
}
def __init__(
self,
*,
name_available: Optional[bool] = None,
reason: Optional[str] = None,
message: Optional[str] = None,
**kwargs: Any
) -> None:
"""
:keyword name_available: The value that indicates whether the name is available.
:paramtype name_available: bool
:keyword reason: If any, the reason that the name is not available.
:paramtype reason: str
:keyword message: If any, the error message that provides more detail for the reason that the
name is not available.
:paramtype message: str
"""
super().__init__(**kwargs)
self.name_available = name_available
self.reason = reason
self.message = message
class RegistryPassword(_serialization.Model):
"""The login password for the container registry.
:ivar name: The password name. Known values are: "password" and "password2".
:vartype name: str or ~azure.mgmt.containerregistry.v2021_06_01_preview.models.PasswordName
:ivar value: The password value.
:vartype value: str
"""
_attribute_map = {
"name": {"key": "name", "type": "str"},
"value": {"key": "value", "type": "str"},
}
def __init__(
self, *, name: Optional[Union[str, "_models.PasswordName"]] = None, value: Optional[str] = None, **kwargs: Any
) -> None:
"""
:keyword name: The password name. Known values are: "password" and "password2".
:paramtype name: str or ~azure.mgmt.containerregistry.v2021_06_01_preview.models.PasswordName
:keyword value: The password value.
:paramtype value: str
"""
super().__init__(**kwargs)
self.name = name
self.value = value
class RegistryUpdateParameters(_serialization.Model): # pylint: disable=too-many-instance-attributes
"""The parameters for updating a container registry.
:ivar identity: The identity of the container registry.
:vartype identity: ~azure.mgmt.containerregistry.v2021_06_01_preview.models.IdentityProperties
:ivar tags: The tags for the container registry.
:vartype tags: dict[str, str]
:ivar sku: The SKU of the container registry.
:vartype sku: ~azure.mgmt.containerregistry.v2021_06_01_preview.models.Sku
:ivar admin_user_enabled: The value that indicates whether the admin user is enabled.
:vartype admin_user_enabled: bool
:ivar network_rule_set: The network rule set for a container registry.
:vartype network_rule_set:
~azure.mgmt.containerregistry.v2021_06_01_preview.models.NetworkRuleSet
:ivar policies: The policies for a container registry.
:vartype policies: ~azure.mgmt.containerregistry.v2021_06_01_preview.models.Policies
:ivar encryption: The encryption settings of container registry.
:vartype encryption:
~azure.mgmt.containerregistry.v2021_06_01_preview.models.EncryptionProperty
:ivar data_endpoint_enabled: Enable a single data endpoint per region for serving data.
:vartype data_endpoint_enabled: bool
:ivar public_network_access: Whether or not public network access is allowed for the container
registry. Known values are: "Enabled" and "Disabled".
:vartype public_network_access: str or
~azure.mgmt.containerregistry.v2021_06_01_preview.models.PublicNetworkAccess
:ivar network_rule_bypass_options: Whether to allow trusted Azure services to access a network
restricted registry. Known values are: "AzureServices" and "None".
:vartype network_rule_bypass_options: str or
~azure.mgmt.containerregistry.v2021_06_01_preview.models.NetworkRuleBypassOptions
:ivar anonymous_pull_enabled: Enables registry-wide pull from unauthenticated clients.
:vartype anonymous_pull_enabled: bool
"""
_attribute_map = {
"identity": {"key": "identity", "type": "IdentityProperties"},
"tags": {"key": "tags", "type": "{str}"},
"sku": {"key": "sku", "type": "Sku"},
"admin_user_enabled": {"key": "properties.adminUserEnabled", "type": "bool"},
"network_rule_set": {"key": "properties.networkRuleSet", "type": "NetworkRuleSet"},
"policies": {"key": "properties.policies", "type": "Policies"},
"encryption": {"key": "properties.encryption", "type": "EncryptionProperty"},
"data_endpoint_enabled": {"key": "properties.dataEndpointEnabled", "type": "bool"},
"public_network_access": {"key": "properties.publicNetworkAccess", "type": "str"},
"network_rule_bypass_options": {"key": "properties.networkRuleBypassOptions", "type": "str"},
"anonymous_pull_enabled": {"key": "properties.anonymousPullEnabled", "type": "bool"},
}
def __init__(
self,
*,
identity: Optional["_models.IdentityProperties"] = None,
tags: Optional[Dict[str, str]] = None,
sku: Optional["_models.Sku"] = None,
admin_user_enabled: Optional[bool] = None,
network_rule_set: Optional["_models.NetworkRuleSet"] = None,
policies: Optional["_models.Policies"] = None,
encryption: Optional["_models.EncryptionProperty"] = None,
data_endpoint_enabled: Optional[bool] = None,
public_network_access: Optional[Union[str, "_models.PublicNetworkAccess"]] = None,
network_rule_bypass_options: Optional[Union[str, "_models.NetworkRuleBypassOptions"]] = None,
anonymous_pull_enabled: Optional[bool] = None,
**kwargs: Any
) -> None:
"""
:keyword identity: The identity of the container registry.
:paramtype identity:
~azure.mgmt.containerregistry.v2021_06_01_preview.models.IdentityProperties
:keyword tags: The tags for the container registry.
:paramtype tags: dict[str, str]
:keyword sku: The SKU of the container registry.
:paramtype sku: ~azure.mgmt.containerregistry.v2021_06_01_preview.models.Sku
:keyword admin_user_enabled: The value that indicates whether the admin user is enabled.
:paramtype admin_user_enabled: bool
:keyword network_rule_set: The network rule set for a container registry.
:paramtype network_rule_set:
~azure.mgmt.containerregistry.v2021_06_01_preview.models.NetworkRuleSet
:keyword policies: The policies for a container registry.
:paramtype policies: ~azure.mgmt.containerregistry.v2021_06_01_preview.models.Policies
:keyword encryption: The encryption settings of container registry.
:paramtype encryption:
~azure.mgmt.containerregistry.v2021_06_01_preview.models.EncryptionProperty
:keyword data_endpoint_enabled: Enable a single data endpoint per region for serving data.
:paramtype data_endpoint_enabled: bool
:keyword public_network_access: Whether or not public network access is allowed for the
container registry. Known values are: "Enabled" and "Disabled".
:paramtype public_network_access: str or
~azure.mgmt.containerregistry.v2021_06_01_preview.models.PublicNetworkAccess
:keyword network_rule_bypass_options: Whether to allow trusted Azure services to access a
network restricted registry. Known values are: "AzureServices" and "None".
:paramtype network_rule_bypass_options: str or
~azure.mgmt.containerregistry.v2021_06_01_preview.models.NetworkRuleBypassOptions
:keyword anonymous_pull_enabled: Enables registry-wide pull from unauthenticated clients.
:paramtype anonymous_pull_enabled: bool
"""
super().__init__(**kwargs)
self.identity = identity
self.tags = tags
self.sku = sku
self.admin_user_enabled = admin_user_enabled
self.network_rule_set = network_rule_set
self.policies = policies
self.encryption = encryption
self.data_endpoint_enabled = data_endpoint_enabled
self.public_network_access = public_network_access
self.network_rule_bypass_options = network_rule_bypass_options
self.anonymous_pull_enabled = anonymous_pull_enabled
class RegistryUsage(_serialization.Model):
"""The quota usage for a container registry.
:ivar name: The name of the usage.
:vartype name: str
:ivar limit: The limit of the usage.
:vartype limit: int
:ivar current_value: The current value of the usage.
:vartype current_value: int
:ivar unit: The unit of measurement. Known values are: "Count" and "Bytes".
:vartype unit: str or
~azure.mgmt.containerregistry.v2021_06_01_preview.models.RegistryUsageUnit
"""
_attribute_map = {
"name": {"key": "name", "type": "str"},
"limit": {"key": "limit", "type": "int"},
"current_value": {"key": "currentValue", "type": "int"},
"unit": {"key": "unit", "type": "str"},
}
def __init__(
self,
*,
name: Optional[str] = None,
limit: Optional[int] = None,
current_value: Optional[int] = None,
unit: Optional[Union[str, "_models.RegistryUsageUnit"]] = None,
**kwargs: Any
) -> None:
"""
:keyword name: The name of the usage.
:paramtype name: str
:keyword limit: The limit of the usage.
:paramtype limit: int
:keyword current_value: The current value of the usage.
:paramtype current_value: int
:keyword unit: The unit of measurement. Known values are: "Count" and "Bytes".
:paramtype unit: str or
~azure.mgmt.containerregistry.v2021_06_01_preview.models.RegistryUsageUnit
"""
super().__init__(**kwargs)
self.name = name
self.limit = limit
self.current_value = current_value
self.unit = unit
class RegistryUsageListResult(_serialization.Model):
"""The result of a request to get container registry quota usages.
:ivar value: The list of container registry quota usages.
:vartype value: list[~azure.mgmt.containerregistry.v2021_06_01_preview.models.RegistryUsage]
"""
_attribute_map = {
"value": {"key": "value", "type": "[RegistryUsage]"},
}
def __init__(self, *, value: Optional[List["_models.RegistryUsage"]] = None, **kwargs: Any) -> None:
"""
:keyword value: The list of container registry quota usages.
:paramtype value: list[~azure.mgmt.containerregistry.v2021_06_01_preview.models.RegistryUsage]
"""
super().__init__(**kwargs)
self.value = value
class Replication(Resource):
"""An object that represents a replication for a container registry.
Variables are only populated by the server, and will be ignored when sending a request.
All required parameters must be populated in order to send to Azure.
:ivar id: The resource ID.
:vartype id: str
:ivar name: The name of the resource.
:vartype name: str
:ivar type: The type of the resource.
:vartype type: str
:ivar location: The location of the resource. This cannot be changed after the resource is
created. Required.
:vartype location: str
:ivar tags: The tags of the resource.
:vartype tags: dict[str, str]
:ivar system_data: Metadata pertaining to creation and last modification of the resource.
:vartype system_data: ~azure.mgmt.containerregistry.v2021_06_01_preview.models.SystemData
:ivar provisioning_state: The provisioning state of the replication at the time the operation
was called. Known values are: "Creating", "Updating", "Deleting", "Succeeded", "Failed", and
"Canceled".
:vartype provisioning_state: str or
~azure.mgmt.containerregistry.v2021_06_01_preview.models.ProvisioningState
:ivar status: The status of the replication at the time the operation was called.
:vartype status: ~azure.mgmt.containerregistry.v2021_06_01_preview.models.Status
:ivar region_endpoint_enabled: Specifies whether the replication's regional endpoint is
enabled. Requests will not be routed to a replication whose regional endpoint is disabled,
however its data will continue to be synced with other replications.
:vartype region_endpoint_enabled: bool
:ivar zone_redundancy: Whether or not zone redundancy is enabled for this container registry
replication. Known values are: "Enabled" and "Disabled".
:vartype zone_redundancy: str or
~azure.mgmt.containerregistry.v2021_06_01_preview.models.ZoneRedundancy
"""
_validation = {
"id": {"readonly": True},
"name": {"readonly": True},
"type": {"readonly": True},
"location": {"required": True},
"system_data": {"readonly": True},
"provisioning_state": {"readonly": True},
"status": {"readonly": True},
}
_attribute_map = {
"id": {"key": "id", "type": "str"},
"name": {"key": "name", "type": "str"},
"type": {"key": "type", "type": "str"},
"location": {"key": "location", "type": "str"},
"tags": {"key": "tags", "type": "{str}"},
"system_data": {"key": "systemData", "type": "SystemData"},
"provisioning_state": {"key": "properties.provisioningState", "type": "str"},
"status": {"key": "properties.status", "type": "Status"},
"region_endpoint_enabled": {"key": "properties.regionEndpointEnabled", "type": "bool"},
"zone_redundancy": {"key": "properties.zoneRedundancy", "type": "str"},
}
def __init__(
self,
*,
location: str,
tags: Optional[Dict[str, str]] = None,
region_endpoint_enabled: bool = True,
zone_redundancy: Optional[Union[str, "_models.ZoneRedundancy"]] = None,
**kwargs: Any
) -> None:
"""
:keyword location: The location of the resource. This cannot be changed after the resource is
created. Required.
:paramtype location: str
:keyword tags: The tags of the resource.
:paramtype tags: dict[str, str]
:keyword region_endpoint_enabled: Specifies whether the replication's regional endpoint is
enabled. Requests will not be routed to a replication whose regional endpoint is disabled,
however its data will continue to be synced with other replications.
:paramtype region_endpoint_enabled: bool
:keyword zone_redundancy: Whether or not zone redundancy is enabled for this container registry
replication. Known values are: "Enabled" and "Disabled".
:paramtype zone_redundancy: str or
~azure.mgmt.containerregistry.v2021_06_01_preview.models.ZoneRedundancy
"""
super().__init__(location=location, tags=tags, **kwargs)
self.provisioning_state = None
self.status = None
self.region_endpoint_enabled = region_endpoint_enabled
self.zone_redundancy = zone_redundancy
class ReplicationListResult(_serialization.Model):
"""The result of a request to list replications for a container registry.
:ivar value: The list of replications. Since this list may be incomplete, the nextLink field
should be used to request the next list of replications.
:vartype value: list[~azure.mgmt.containerregistry.v2021_06_01_preview.models.Replication]
:ivar next_link: The URI that can be used to request the next list of replications.
:vartype next_link: str
"""
_attribute_map = {
"value": {"key": "value", "type": "[Replication]"},
"next_link": {"key": "nextLink", "type": "str"},
}
def __init__(
self, *, value: Optional[List["_models.Replication"]] = None, next_link: Optional[str] = None, **kwargs: Any
) -> None:
"""
:keyword value: The list of replications. Since this list may be incomplete, the nextLink field
should be used to request the next list of replications.
:paramtype value: list[~azure.mgmt.containerregistry.v2021_06_01_preview.models.Replication]
:keyword next_link: The URI that can be used to request the next list of replications.
:paramtype next_link: str
"""
super().__init__(**kwargs)
self.value = value
self.next_link = next_link
class ReplicationUpdateParameters(_serialization.Model):
"""The parameters for updating a replication.
:ivar tags: The tags for the replication.
:vartype tags: dict[str, str]
:ivar region_endpoint_enabled: Specifies whether the replication's regional endpoint is
enabled. Requests will not be routed to a replication whose regional endpoint is disabled,
however its data will continue to be synced with other replications.
:vartype region_endpoint_enabled: bool
"""
_attribute_map = {
"tags": {"key": "tags", "type": "{str}"},
"region_endpoint_enabled": {"key": "properties.regionEndpointEnabled", "type": "bool"},
}
def __init__(
self, *, tags: Optional[Dict[str, str]] = None, region_endpoint_enabled: Optional[bool] = None, **kwargs: Any
) -> None:
"""
:keyword tags: The tags for the replication.
:paramtype tags: dict[str, str]
:keyword region_endpoint_enabled: Specifies whether the replication's regional endpoint is
enabled. Requests will not be routed to a replication whose regional endpoint is disabled,
however its data will continue to be synced with other replications.
:paramtype region_endpoint_enabled: bool
"""
super().__init__(**kwargs)
self.tags = tags
self.region_endpoint_enabled = region_endpoint_enabled
class Request(_serialization.Model):
"""The request that generated the event.
:ivar id: The ID of the request that initiated the event.
:vartype id: str
:ivar addr: The IP or hostname and possibly port of the client connection that initiated the
event. This is the RemoteAddr from the standard http request.
:vartype addr: str
:ivar host: The externally accessible hostname of the registry instance, as specified by the
http host header on incoming requests.
:vartype host: str
:ivar method: The request method that generated the event.
:vartype method: str
:ivar useragent: The user agent header of the request.
:vartype useragent: str
"""
_attribute_map = {
"id": {"key": "id", "type": "str"},
"addr": {"key": "addr", "type": "str"},
"host": {"key": "host", "type": "str"},
"method": {"key": "method", "type": "str"},
"useragent": {"key": "useragent", "type": "str"},
}
def __init__(
self,
*,
id: Optional[str] = None, # pylint: disable=redefined-builtin
addr: Optional[str] = None,
host: Optional[str] = None,
method: Optional[str] = None,
useragent: Optional[str] = None,
**kwargs: Any
) -> None:
"""
:keyword id: The ID of the request that initiated the event.
:paramtype id: str
:keyword addr: The IP or hostname and possibly port of the client connection that initiated the
event. This is the RemoteAddr from the standard http request.
:paramtype addr: str
:keyword host: The externally accessible hostname of the registry instance, as specified by the
http host header on incoming requests.
:paramtype host: str
:keyword method: The request method that generated the event.
:paramtype method: str
:keyword useragent: The user agent header of the request.
:paramtype useragent: str
"""
super().__init__(**kwargs)
self.id = id
self.addr = addr
self.host = host
self.method = method
self.useragent = useragent
class RetentionPolicy(_serialization.Model):
"""The retention policy for a container registry.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar days: The number of days to retain an untagged manifest after which it gets purged.
:vartype days: int
:ivar last_updated_time: The timestamp when the policy was last updated.
:vartype last_updated_time: ~datetime.datetime
:ivar status: The value that indicates whether the policy is enabled or not. Known values are:
"enabled" and "disabled".
:vartype status: str or ~azure.mgmt.containerregistry.v2021_06_01_preview.models.PolicyStatus
"""
_validation = {
"last_updated_time": {"readonly": True},
}
_attribute_map = {
"days": {"key": "days", "type": "int"},
"last_updated_time": {"key": "lastUpdatedTime", "type": "iso-8601"},
"status": {"key": "status", "type": "str"},
}
def __init__(
self, *, days: int = 7, status: Optional[Union[str, "_models.PolicyStatus"]] = None, **kwargs: Any
) -> None:
"""
:keyword days: The number of days to retain an untagged manifest after which it gets purged.
:paramtype days: int
:keyword status: The value that indicates whether the policy is enabled or not. Known values
are: "enabled" and "disabled".
:paramtype status: str or ~azure.mgmt.containerregistry.v2021_06_01_preview.models.PolicyStatus
"""
super().__init__(**kwargs)
self.days = days
self.last_updated_time = None
self.status = status
class ScopeMap(ProxyResource):
"""An object that represents a scope map for a container registry.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: The resource ID.
:vartype id: str
:ivar name: The name of the resource.
:vartype name: str
:ivar type: The type of the resource.
:vartype type: str
:ivar system_data: Metadata pertaining to creation and last modification of the resource.
:vartype system_data: ~azure.mgmt.containerregistry.v2021_06_01_preview.models.SystemData
:ivar description: The user friendly description of the scope map.
:vartype description: str
:ivar type_properties_type: The type of the scope map. E.g. BuildIn scope map.
:vartype type_properties_type: str
:ivar creation_date: The creation date of scope map.
:vartype creation_date: ~datetime.datetime
:ivar provisioning_state: Provisioning state of the resource. Known values are: "Creating",
"Updating", "Deleting", "Succeeded", "Failed", and "Canceled".
:vartype provisioning_state: str or
~azure.mgmt.containerregistry.v2021_06_01_preview.models.ProvisioningState
:ivar actions: The list of scoped permissions for registry artifacts.
E.g. repositories/repository-name/content/read,
repositories/repository-name/metadata/write.
:vartype actions: list[str]
"""
_validation = {
"id": {"readonly": True},
"name": {"readonly": True},
"type": {"readonly": True},
"system_data": {"readonly": True},
"type_properties_type": {"readonly": True},
"creation_date": {"readonly": True},
"provisioning_state": {"readonly": True},
}
_attribute_map = {
"id": {"key": "id", "type": "str"},
"name": {"key": "name", "type": "str"},
"type": {"key": "type", "type": "str"},
"system_data": {"key": "systemData", "type": "SystemData"},
"description": {"key": "properties.description", "type": "str"},
"type_properties_type": {"key": "properties.type", "type": "str"},
"creation_date": {"key": "properties.creationDate", "type": "iso-8601"},
"provisioning_state": {"key": "properties.provisioningState", "type": "str"},
"actions": {"key": "properties.actions", "type": "[str]"},
}
def __init__(
self, *, description: Optional[str] = None, actions: Optional[List[str]] = None, **kwargs: Any
) -> None:
"""
:keyword description: The user friendly description of the scope map.
:paramtype description: str
:keyword actions: The list of scoped permissions for registry artifacts.
E.g. repositories/repository-name/content/read,
repositories/repository-name/metadata/write.
:paramtype actions: list[str]
"""
super().__init__(**kwargs)
self.description = description
self.type_properties_type = None
self.creation_date = None
self.provisioning_state = None
self.actions = actions
class ScopeMapListResult(_serialization.Model):
"""The result of a request to list scope maps for a container registry.
:ivar value: The list of scope maps. Since this list may be incomplete, the nextLink field
should be used to request the next list of scope maps.
:vartype value: list[~azure.mgmt.containerregistry.v2021_06_01_preview.models.ScopeMap]
:ivar next_link: The URI that can be used to request the next list of scope maps.
:vartype next_link: str
"""
_attribute_map = {
"value": {"key": "value", "type": "[ScopeMap]"},
"next_link": {"key": "nextLink", "type": "str"},
}
def __init__(
self, *, value: Optional[List["_models.ScopeMap"]] = None, next_link: Optional[str] = None, **kwargs: Any
) -> None:
"""
:keyword value: The list of scope maps. Since this list may be incomplete, the nextLink field
should be used to request the next list of scope maps.
:paramtype value: list[~azure.mgmt.containerregistry.v2021_06_01_preview.models.ScopeMap]
:keyword next_link: The URI that can be used to request the next list of scope maps.
:paramtype next_link: str
"""
super().__init__(**kwargs)
self.value = value
self.next_link = next_link
class ScopeMapUpdateParameters(_serialization.Model):
"""The properties for updating the scope map.
:ivar description: The user friendly description of the scope map.
:vartype description: str
:ivar actions: The list of scope permissions for registry artifacts.
E.g. repositories/repository-name/pull,
repositories/repository-name/delete.
:vartype actions: list[str]
"""
_attribute_map = {
"description": {"key": "properties.description", "type": "str"},
"actions": {"key": "properties.actions", "type": "[str]"},
}
def __init__(
self, *, description: Optional[str] = None, actions: Optional[List[str]] = None, **kwargs: Any
) -> None:
"""
:keyword description: The user friendly description of the scope map.
:paramtype description: str
:keyword actions: The list of scope permissions for registry artifacts.
E.g. repositories/repository-name/pull,
repositories/repository-name/delete.
:paramtype actions: list[str]
"""
super().__init__(**kwargs)
self.description = description
self.actions = actions
class Sku(_serialization.Model):
"""The SKU of a container registry.
Variables are only populated by the server, and will be ignored when sending a request.
All required parameters must be populated in order to send to Azure.
:ivar name: The SKU name of the container registry. Required for registry creation. Required.
Known values are: "Classic", "Basic", "Standard", and "Premium".
:vartype name: str or ~azure.mgmt.containerregistry.v2021_06_01_preview.models.SkuName
:ivar tier: The SKU tier based on the SKU name. Known values are: "Classic", "Basic",
"Standard", and "Premium".
:vartype tier: str or ~azure.mgmt.containerregistry.v2021_06_01_preview.models.SkuTier
"""
_validation = {
"name": {"required": True},
"tier": {"readonly": True},
}
_attribute_map = {
"name": {"key": "name", "type": "str"},
"tier": {"key": "tier", "type": "str"},
}
def __init__(self, *, name: Union[str, "_models.SkuName"], **kwargs: Any) -> None:
"""
:keyword name: The SKU name of the container registry. Required for registry creation.
Required. Known values are: "Classic", "Basic", "Standard", and "Premium".
:paramtype name: str or ~azure.mgmt.containerregistry.v2021_06_01_preview.models.SkuName
"""
super().__init__(**kwargs)
self.name = name
self.tier = None
class Source(_serialization.Model):
"""The registry node that generated the event. Put differently, while the actor initiates the
event, the source generates it.
:ivar addr: The IP or hostname and the port of the registry node that generated the event.
Generally, this will be resolved by os.Hostname() along with the running port.
:vartype addr: str
:ivar instance_id: The running instance of an application. Changes after each restart.
:vartype instance_id: str
"""
_attribute_map = {
"addr": {"key": "addr", "type": "str"},
"instance_id": {"key": "instanceID", "type": "str"},
}
def __init__(self, *, addr: Optional[str] = None, instance_id: Optional[str] = None, **kwargs: Any) -> None:
"""
:keyword addr: The IP or hostname and the port of the registry node that generated the event.
Generally, this will be resolved by os.Hostname() along with the running port.
:paramtype addr: str
:keyword instance_id: The running instance of an application. Changes after each restart.
:paramtype instance_id: str
"""
super().__init__(**kwargs)
self.addr = addr
self.instance_id = instance_id
class Status(_serialization.Model):
"""The status of an Azure resource at the time the operation was called.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar display_status: The short label for the status.
:vartype display_status: str
:ivar message: The detailed message for the status, including alerts and error messages.
:vartype message: str
:ivar timestamp: The timestamp when the status was changed to the current value.
:vartype timestamp: ~datetime.datetime
"""
_validation = {
"display_status": {"readonly": True},
"message": {"readonly": True},
"timestamp": {"readonly": True},
}
_attribute_map = {
"display_status": {"key": "displayStatus", "type": "str"},
"message": {"key": "message", "type": "str"},
"timestamp": {"key": "timestamp", "type": "iso-8601"},
}
def __init__(self, **kwargs: Any) -> None:
""" """
super().__init__(**kwargs)
self.display_status = None
self.message = None
self.timestamp = None
class StatusDetailProperties(_serialization.Model):
"""The status detail properties of the connected registry.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar type: The component of the connected registry corresponding to the status.
:vartype type: str
:ivar code: The code of the status.
:vartype code: str
:ivar description: The description of the status.
:vartype description: str
:ivar timestamp: The timestamp of the status.
:vartype timestamp: ~datetime.datetime
:ivar correlation_id: The correlation ID of the status.
:vartype correlation_id: str
"""
_validation = {
"type": {"readonly": True},
"code": {"readonly": True},
"description": {"readonly": True},
"timestamp": {"readonly": True},
"correlation_id": {"readonly": True},
}
_attribute_map = {
"type": {"key": "type", "type": "str"},
"code": {"key": "code", "type": "str"},
"description": {"key": "description", "type": "str"},
"timestamp": {"key": "timestamp", "type": "iso-8601"},
"correlation_id": {"key": "correlationId", "type": "str"},
}
def __init__(self, **kwargs: Any) -> None:
""" """
super().__init__(**kwargs)
self.type = None
self.code = None
self.description = None
self.timestamp = None
self.correlation_id = None
class SyncProperties(_serialization.Model):
"""The sync properties of the connected registry with its parent.
Variables are only populated by the server, and will be ignored when sending a request.
All required parameters must be populated in order to send to Azure.
:ivar token_id: The resource ID of the ACR token used to authenticate the connected registry to
its parent during sync. Required.
:vartype token_id: str
:ivar schedule: The cron expression indicating the schedule that the connected registry will
sync with its parent.
:vartype schedule: str
:ivar sync_window: The time window during which sync is enabled for each schedule occurrence.
Specify the duration using the format P[n]Y[n]M[n]DT[n]H[n]M[n]S as per ISO8601.
:vartype sync_window: ~datetime.timedelta
:ivar message_ttl: The period of time for which a message is available to sync before it is
expired. Specify the duration using the format P[n]Y[n]M[n]DT[n]H[n]M[n]S as per ISO8601.
Required.
:vartype message_ttl: ~datetime.timedelta
:ivar last_sync_time: The last time a sync occurred between the connected registry and its
parent.
:vartype last_sync_time: ~datetime.datetime
:ivar gateway_endpoint: The gateway endpoint used by the connected registry to communicate with
its parent.
:vartype gateway_endpoint: str
"""
_validation = {
"token_id": {"required": True},
"message_ttl": {"required": True},
"last_sync_time": {"readonly": True},
"gateway_endpoint": {"readonly": True},
}
_attribute_map = {
"token_id": {"key": "tokenId", "type": "str"},
"schedule": {"key": "schedule", "type": "str"},
"sync_window": {"key": "syncWindow", "type": "duration"},
"message_ttl": {"key": "messageTtl", "type": "duration"},
"last_sync_time": {"key": "lastSyncTime", "type": "iso-8601"},
"gateway_endpoint": {"key": "gatewayEndpoint", "type": "str"},
}
def __init__(
self,
*,
token_id: str,
message_ttl: datetime.timedelta,
schedule: Optional[str] = None,
sync_window: Optional[datetime.timedelta] = None,
**kwargs: Any
) -> None:
"""
:keyword token_id: The resource ID of the ACR token used to authenticate the connected registry
to its parent during sync. Required.
:paramtype token_id: str
:keyword schedule: The cron expression indicating the schedule that the connected registry will
sync with its parent.
:paramtype schedule: str
:keyword sync_window: The time window during which sync is enabled for each schedule
occurrence. Specify the duration using the format P[n]Y[n]M[n]DT[n]H[n]M[n]S as per ISO8601.
:paramtype sync_window: ~datetime.timedelta
:keyword message_ttl: The period of time for which a message is available to sync before it is
expired. Specify the duration using the format P[n]Y[n]M[n]DT[n]H[n]M[n]S as per ISO8601.
Required.
:paramtype message_ttl: ~datetime.timedelta
"""
super().__init__(**kwargs)
self.token_id = token_id
self.schedule = schedule
self.sync_window = sync_window
self.message_ttl = message_ttl
self.last_sync_time = None
self.gateway_endpoint = None
class SyncUpdateProperties(_serialization.Model):
"""The parameters for updating the sync properties of the connected registry with its parent.
:ivar schedule: The cron expression indicating the schedule that the connected registry will
sync with its parent.
:vartype schedule: str
:ivar sync_window: The time window during which sync is enabled for each schedule occurrence.
Specify the duration using the format P[n]Y[n]M[n]DT[n]H[n]M[n]S as per ISO8601.
:vartype sync_window: ~datetime.timedelta
:ivar message_ttl: The period of time for which a message is available to sync before it is
expired. Specify the duration using the format P[n]Y[n]M[n]DT[n]H[n]M[n]S as per ISO8601.
:vartype message_ttl: ~datetime.timedelta
"""
_attribute_map = {
"schedule": {"key": "schedule", "type": "str"},
"sync_window": {"key": "syncWindow", "type": "duration"},
"message_ttl": {"key": "messageTtl", "type": "duration"},
}
def __init__(
self,
*,
schedule: Optional[str] = None,
sync_window: Optional[datetime.timedelta] = None,
message_ttl: Optional[datetime.timedelta] = None,
**kwargs: Any
) -> None:
"""
:keyword schedule: The cron expression indicating the schedule that the connected registry will
sync with its parent.
:paramtype schedule: str
:keyword sync_window: The time window during which sync is enabled for each schedule
occurrence. Specify the duration using the format P[n]Y[n]M[n]DT[n]H[n]M[n]S as per ISO8601.
:paramtype sync_window: ~datetime.timedelta
:keyword message_ttl: The period of time for which a message is available to sync before it is
expired. Specify the duration using the format P[n]Y[n]M[n]DT[n]H[n]M[n]S as per ISO8601.
:paramtype message_ttl: ~datetime.timedelta
"""
super().__init__(**kwargs)
self.schedule = schedule
self.sync_window = sync_window
self.message_ttl = message_ttl
class SystemData(_serialization.Model):
"""Metadata pertaining to creation and last modification of the resource.
:ivar created_by: The identity that created the resource.
:vartype created_by: str
:ivar created_by_type: The type of identity that created the resource. Known values are:
"User", "Application", "ManagedIdentity", and "Key".
:vartype created_by_type: str or
~azure.mgmt.containerregistry.v2021_06_01_preview.models.CreatedByType
:ivar created_at: The timestamp of resource creation (UTC).
:vartype created_at: ~datetime.datetime
:ivar last_modified_by: The identity that last modified the resource.
:vartype last_modified_by: str
:ivar last_modified_by_type: The type of identity that last modified the resource. Known values
are: "User", "Application", "ManagedIdentity", and "Key".
:vartype last_modified_by_type: str or
~azure.mgmt.containerregistry.v2021_06_01_preview.models.LastModifiedByType
:ivar last_modified_at: The timestamp of resource modification (UTC).
:vartype last_modified_at: ~datetime.datetime
"""
_attribute_map = {
"created_by": {"key": "createdBy", "type": "str"},
"created_by_type": {"key": "createdByType", "type": "str"},
"created_at": {"key": "createdAt", "type": "iso-8601"},
"last_modified_by": {"key": "lastModifiedBy", "type": "str"},
"last_modified_by_type": {"key": "lastModifiedByType", "type": "str"},
"last_modified_at": {"key": "lastModifiedAt", "type": "iso-8601"},
}
def __init__(
self,
*,
created_by: Optional[str] = None,
created_by_type: Optional[Union[str, "_models.CreatedByType"]] = None,
created_at: Optional[datetime.datetime] = None,
last_modified_by: Optional[str] = None,
last_modified_by_type: Optional[Union[str, "_models.LastModifiedByType"]] = None,
last_modified_at: Optional[datetime.datetime] = None,
**kwargs: Any
) -> None:
"""
:keyword created_by: The identity that created the resource.
:paramtype created_by: str
:keyword created_by_type: The type of identity that created the resource. Known values are:
"User", "Application", "ManagedIdentity", and "Key".
:paramtype created_by_type: str or
~azure.mgmt.containerregistry.v2021_06_01_preview.models.CreatedByType
:keyword created_at: The timestamp of resource creation (UTC).
:paramtype created_at: ~datetime.datetime
:keyword last_modified_by: The identity that last modified the resource.
:paramtype last_modified_by: str
:keyword last_modified_by_type: The type of identity that last modified the resource. Known
values are: "User", "Application", "ManagedIdentity", and "Key".
:paramtype last_modified_by_type: str or
~azure.mgmt.containerregistry.v2021_06_01_preview.models.LastModifiedByType
:keyword last_modified_at: The timestamp of resource modification (UTC).
:paramtype last_modified_at: ~datetime.datetime
"""
super().__init__(**kwargs)
self.created_by = created_by
self.created_by_type = created_by_type
self.created_at = created_at
self.last_modified_by = last_modified_by
self.last_modified_by_type = last_modified_by_type
self.last_modified_at = last_modified_at
class Target(_serialization.Model):
"""The target of the event.
:ivar media_type: The MIME type of the referenced object.
:vartype media_type: str
:ivar size: The number of bytes of the content. Same as Length field.
:vartype size: int
:ivar digest: The digest of the content, as defined by the Registry V2 HTTP API Specification.
:vartype digest: str
:ivar length: The number of bytes of the content. Same as Size field.
:vartype length: int
:ivar repository: The repository name.
:vartype repository: str
:ivar url: The direct URL to the content.
:vartype url: str
:ivar tag: The tag name.
:vartype tag: str
:ivar name: The name of the artifact.
:vartype name: str
:ivar version: The version of the artifact.
:vartype version: str
"""
_attribute_map = {
"media_type": {"key": "mediaType", "type": "str"},
"size": {"key": "size", "type": "int"},
"digest": {"key": "digest", "type": "str"},
"length": {"key": "length", "type": "int"},
"repository": {"key": "repository", "type": "str"},
"url": {"key": "url", "type": "str"},
"tag": {"key": "tag", "type": "str"},
"name": {"key": "name", "type": "str"},
"version": {"key": "version", "type": "str"},
}
def __init__(
self,
*,
media_type: Optional[str] = None,
size: Optional[int] = None,
digest: Optional[str] = None,
length: Optional[int] = None,
repository: Optional[str] = None,
url: Optional[str] = None,
tag: Optional[str] = None,
name: Optional[str] = None,
version: Optional[str] = None,
**kwargs: Any
) -> None:
"""
:keyword media_type: The MIME type of the referenced object.
:paramtype media_type: str
:keyword size: The number of bytes of the content. Same as Length field.
:paramtype size: int
:keyword digest: The digest of the content, as defined by the Registry V2 HTTP API
Specification.
:paramtype digest: str
:keyword length: The number of bytes of the content. Same as Size field.
:paramtype length: int
:keyword repository: The repository name.
:paramtype repository: str
:keyword url: The direct URL to the content.
:paramtype url: str
:keyword tag: The tag name.
:paramtype tag: str
:keyword name: The name of the artifact.
:paramtype name: str
:keyword version: The version of the artifact.
:paramtype version: str
"""
super().__init__(**kwargs)
self.media_type = media_type
self.size = size
self.digest = digest
self.length = length
self.repository = repository
self.url = url
self.tag = tag
self.name = name
self.version = version
class TlsCertificateProperties(_serialization.Model):
"""The TLS certificate properties of the connected registry login server.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar type: The type of certificate location. "LocalDirectory"
:vartype type: str or ~azure.mgmt.containerregistry.v2021_06_01_preview.models.CertificateType
:ivar location: Indicates the location of the certificates.
:vartype location: str
"""
_validation = {
"type": {"readonly": True},
"location": {"readonly": True},
}
_attribute_map = {
"type": {"key": "type", "type": "str"},
"location": {"key": "location", "type": "str"},
}
def __init__(self, **kwargs: Any) -> None:
""" """
super().__init__(**kwargs)
self.type = None
self.location = None
class TlsProperties(_serialization.Model):
"""The TLS properties of the connected registry login server.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar status: Indicates whether HTTPS is enabled for the login server. Known values are:
"Enabled" and "Disabled".
:vartype status: str or ~azure.mgmt.containerregistry.v2021_06_01_preview.models.TlsStatus
:ivar certificate: The certificate used to configure HTTPS for the login server.
:vartype certificate:
~azure.mgmt.containerregistry.v2021_06_01_preview.models.TlsCertificateProperties
"""
_validation = {
"status": {"readonly": True},
"certificate": {"readonly": True},
}
_attribute_map = {
"status": {"key": "status", "type": "str"},
"certificate": {"key": "certificate", "type": "TlsCertificateProperties"},
}
def __init__(self, **kwargs: Any) -> None:
""" """
super().__init__(**kwargs)
self.status = None
self.certificate = None
class Token(ProxyResource):
"""An object that represents a token for a container registry.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: The resource ID.
:vartype id: str
:ivar name: The name of the resource.
:vartype name: str
:ivar type: The type of the resource.
:vartype type: str
:ivar system_data: Metadata pertaining to creation and last modification of the resource.
:vartype system_data: ~azure.mgmt.containerregistry.v2021_06_01_preview.models.SystemData
:ivar creation_date: The creation date of scope map.
:vartype creation_date: ~datetime.datetime
:ivar provisioning_state: Provisioning state of the resource. Known values are: "Creating",
"Updating", "Deleting", "Succeeded", "Failed", and "Canceled".
:vartype provisioning_state: str or
~azure.mgmt.containerregistry.v2021_06_01_preview.models.ProvisioningState
:ivar scope_map_id: The resource ID of the scope map to which the token will be associated
with.
:vartype scope_map_id: str
:ivar credentials: The credentials that can be used for authenticating the token.
:vartype credentials:
~azure.mgmt.containerregistry.v2021_06_01_preview.models.TokenCredentialsProperties
:ivar status: The status of the token example enabled or disabled. Known values are: "enabled"
and "disabled".
:vartype status: str or ~azure.mgmt.containerregistry.v2021_06_01_preview.models.TokenStatus
"""
_validation = {
"id": {"readonly": True},
"name": {"readonly": True},
"type": {"readonly": True},
"system_data": {"readonly": True},
"creation_date": {"readonly": True},
"provisioning_state": {"readonly": True},
}
_attribute_map = {
"id": {"key": "id", "type": "str"},
"name": {"key": "name", "type": "str"},
"type": {"key": "type", "type": "str"},
"system_data": {"key": "systemData", "type": "SystemData"},
"creation_date": {"key": "properties.creationDate", "type": "iso-8601"},
"provisioning_state": {"key": "properties.provisioningState", "type": "str"},
"scope_map_id": {"key": "properties.scopeMapId", "type": "str"},
"credentials": {"key": "properties.credentials", "type": "TokenCredentialsProperties"},
"status": {"key": "properties.status", "type": "str"},
}
def __init__(
self,
*,
scope_map_id: Optional[str] = None,
credentials: Optional["_models.TokenCredentialsProperties"] = None,
status: Optional[Union[str, "_models.TokenStatus"]] = None,
**kwargs: Any
) -> None:
"""
:keyword scope_map_id: The resource ID of the scope map to which the token will be associated
with.
:paramtype scope_map_id: str
:keyword credentials: The credentials that can be used for authenticating the token.
:paramtype credentials:
~azure.mgmt.containerregistry.v2021_06_01_preview.models.TokenCredentialsProperties
:keyword status: The status of the token example enabled or disabled. Known values are:
"enabled" and "disabled".
:paramtype status: str or ~azure.mgmt.containerregistry.v2021_06_01_preview.models.TokenStatus
"""
super().__init__(**kwargs)
self.creation_date = None
self.provisioning_state = None
self.scope_map_id = scope_map_id
self.credentials = credentials
self.status = status
class TokenCertificate(_serialization.Model):
"""The properties of a certificate used for authenticating a token.
:ivar name: Known values are: "certificate1" and "certificate2".
:vartype name: str or
~azure.mgmt.containerregistry.v2021_06_01_preview.models.TokenCertificateName
:ivar expiry: The expiry datetime of the certificate.
:vartype expiry: ~datetime.datetime
:ivar thumbprint: The thumbprint of the certificate.
:vartype thumbprint: str
:ivar encoded_pem_certificate: Base 64 encoded string of the public certificate1 in PEM format
that will be used for authenticating the token.
:vartype encoded_pem_certificate: str
"""
_attribute_map = {
"name": {"key": "name", "type": "str"},
"expiry": {"key": "expiry", "type": "iso-8601"},
"thumbprint": {"key": "thumbprint", "type": "str"},
"encoded_pem_certificate": {"key": "encodedPemCertificate", "type": "str"},
}
def __init__(
self,
*,
name: Optional[Union[str, "_models.TokenCertificateName"]] = None,
expiry: Optional[datetime.datetime] = None,
thumbprint: Optional[str] = None,
encoded_pem_certificate: Optional[str] = None,
**kwargs: Any
) -> None:
"""
:keyword name: Known values are: "certificate1" and "certificate2".
:paramtype name: str or
~azure.mgmt.containerregistry.v2021_06_01_preview.models.TokenCertificateName
:keyword expiry: The expiry datetime of the certificate.
:paramtype expiry: ~datetime.datetime
:keyword thumbprint: The thumbprint of the certificate.
:paramtype thumbprint: str
:keyword encoded_pem_certificate: Base 64 encoded string of the public certificate1 in PEM
format that will be used for authenticating the token.
:paramtype encoded_pem_certificate: str
"""
super().__init__(**kwargs)
self.name = name
self.expiry = expiry
self.thumbprint = thumbprint
self.encoded_pem_certificate = encoded_pem_certificate
class TokenCredentialsProperties(_serialization.Model):
"""The properties of the credentials that can be used for authenticating the token.
:ivar certificates:
:vartype certificates:
list[~azure.mgmt.containerregistry.v2021_06_01_preview.models.TokenCertificate]
:ivar passwords:
:vartype passwords:
list[~azure.mgmt.containerregistry.v2021_06_01_preview.models.TokenPassword]
"""
_attribute_map = {
"certificates": {"key": "certificates", "type": "[TokenCertificate]"},
"passwords": {"key": "passwords", "type": "[TokenPassword]"},
}
def __init__(
self,
*,
certificates: Optional[List["_models.TokenCertificate"]] = None,
passwords: Optional[List["_models.TokenPassword"]] = None,
**kwargs: Any
) -> None:
"""
:keyword certificates:
:paramtype certificates:
list[~azure.mgmt.containerregistry.v2021_06_01_preview.models.TokenCertificate]
:keyword passwords:
:paramtype passwords:
list[~azure.mgmt.containerregistry.v2021_06_01_preview.models.TokenPassword]
"""
super().__init__(**kwargs)
self.certificates = certificates
self.passwords = passwords
class TokenListResult(_serialization.Model):
"""The result of a request to list tokens for a container registry.
:ivar value: The list of tokens. Since this list may be incomplete, the nextLink field should
be used to request the next list of tokens.
:vartype value: list[~azure.mgmt.containerregistry.v2021_06_01_preview.models.Token]
:ivar next_link: The URI that can be used to request the next list of tokens.
:vartype next_link: str
"""
_attribute_map = {
"value": {"key": "value", "type": "[Token]"},
"next_link": {"key": "nextLink", "type": "str"},
}
def __init__(
self, *, value: Optional[List["_models.Token"]] = None, next_link: Optional[str] = None, **kwargs: Any
) -> None:
"""
:keyword value: The list of tokens. Since this list may be incomplete, the nextLink field
should be used to request the next list of tokens.
:paramtype value: list[~azure.mgmt.containerregistry.v2021_06_01_preview.models.Token]
:keyword next_link: The URI that can be used to request the next list of tokens.
:paramtype next_link: str
"""
super().__init__(**kwargs)
self.value = value
self.next_link = next_link
class TokenPassword(_serialization.Model):
"""The password that will be used for authenticating the token of a container registry.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar creation_time: The creation datetime of the password.
:vartype creation_time: ~datetime.datetime
:ivar expiry: The expiry datetime of the password.
:vartype expiry: ~datetime.datetime
:ivar name: The password name "password1" or "password2". Known values are: "password1" and
"password2".
:vartype name: str or
~azure.mgmt.containerregistry.v2021_06_01_preview.models.TokenPasswordName
:ivar value: The password value.
:vartype value: str
"""
_validation = {
"value": {"readonly": True},
}
_attribute_map = {
"creation_time": {"key": "creationTime", "type": "iso-8601"},
"expiry": {"key": "expiry", "type": "iso-8601"},
"name": {"key": "name", "type": "str"},
"value": {"key": "value", "type": "str"},
}
def __init__(
self,
*,
creation_time: Optional[datetime.datetime] = None,
expiry: Optional[datetime.datetime] = None,
name: Optional[Union[str, "_models.TokenPasswordName"]] = None,
**kwargs: Any
) -> None:
"""
:keyword creation_time: The creation datetime of the password.
:paramtype creation_time: ~datetime.datetime
:keyword expiry: The expiry datetime of the password.
:paramtype expiry: ~datetime.datetime
:keyword name: The password name "password1" or "password2". Known values are: "password1" and
"password2".
:paramtype name: str or
~azure.mgmt.containerregistry.v2021_06_01_preview.models.TokenPasswordName
"""
super().__init__(**kwargs)
self.creation_time = creation_time
self.expiry = expiry
self.name = name
self.value = None
class TokenUpdateParameters(_serialization.Model):
"""The parameters for updating a token.
:ivar scope_map_id: The resource ID of the scope map to which the token will be associated
with.
:vartype scope_map_id: str
:ivar status: The status of the token example enabled or disabled. Known values are: "enabled"
and "disabled".
:vartype status: str or ~azure.mgmt.containerregistry.v2021_06_01_preview.models.TokenStatus
:ivar credentials: The credentials that can be used for authenticating the token.
:vartype credentials:
~azure.mgmt.containerregistry.v2021_06_01_preview.models.TokenCredentialsProperties
"""
_attribute_map = {
"scope_map_id": {"key": "properties.scopeMapId", "type": "str"},
"status": {"key": "properties.status", "type": "str"},
"credentials": {"key": "properties.credentials", "type": "TokenCredentialsProperties"},
}
def __init__(
self,
*,
scope_map_id: Optional[str] = None,
status: Optional[Union[str, "_models.TokenStatus"]] = None,
credentials: Optional["_models.TokenCredentialsProperties"] = None,
**kwargs: Any
) -> None:
"""
:keyword scope_map_id: The resource ID of the scope map to which the token will be associated
with.
:paramtype scope_map_id: str
:keyword status: The status of the token example enabled or disabled. Known values are:
"enabled" and "disabled".
:paramtype status: str or ~azure.mgmt.containerregistry.v2021_06_01_preview.models.TokenStatus
:keyword credentials: The credentials that can be used for authenticating the token.
:paramtype credentials:
~azure.mgmt.containerregistry.v2021_06_01_preview.models.TokenCredentialsProperties
"""
super().__init__(**kwargs)
self.scope_map_id = scope_map_id
self.status = status
self.credentials = credentials
class TrustPolicy(_serialization.Model):
"""The content trust policy for a container registry.
:ivar type: The type of trust policy. "Notary"
:vartype type: str or ~azure.mgmt.containerregistry.v2021_06_01_preview.models.TrustPolicyType
:ivar status: The value that indicates whether the policy is enabled or not. Known values are:
"enabled" and "disabled".
:vartype status: str or ~azure.mgmt.containerregistry.v2021_06_01_preview.models.PolicyStatus
"""
_attribute_map = {
"type": {"key": "type", "type": "str"},
"status": {"key": "status", "type": "str"},
}
def __init__(
self,
*,
type: Union[str, "_models.TrustPolicyType"] = "Notary",
status: Optional[Union[str, "_models.PolicyStatus"]] = None,
**kwargs: Any
) -> None:
"""
:keyword type: The type of trust policy. "Notary"
:paramtype type: str or
~azure.mgmt.containerregistry.v2021_06_01_preview.models.TrustPolicyType
:keyword status: The value that indicates whether the policy is enabled or not. Known values
are: "enabled" and "disabled".
:paramtype status: str or ~azure.mgmt.containerregistry.v2021_06_01_preview.models.PolicyStatus
"""
super().__init__(**kwargs)
self.type = type
self.status = status
class UserIdentityProperties(_serialization.Model):
"""UserIdentityProperties.
:ivar principal_id: The principal id of user assigned identity.
:vartype principal_id: str
:ivar client_id: The client id of user assigned identity.
:vartype client_id: str
"""
_attribute_map = {
"principal_id": {"key": "principalId", "type": "str"},
"client_id": {"key": "clientId", "type": "str"},
}
def __init__(self, *, principal_id: Optional[str] = None, client_id: Optional[str] = None, **kwargs: Any) -> None:
"""
:keyword principal_id: The principal id of user assigned identity.
:paramtype principal_id: str
:keyword client_id: The client id of user assigned identity.
:paramtype client_id: str
"""
super().__init__(**kwargs)
self.principal_id = principal_id
self.client_id = client_id
class VirtualNetworkRule(_serialization.Model):
"""Virtual network rule.
All required parameters must be populated in order to send to Azure.
:ivar action: The action of virtual network rule. "Allow"
:vartype action: str or ~azure.mgmt.containerregistry.v2021_06_01_preview.models.Action
:ivar virtual_network_resource_id: Resource ID of a subnet, for example:
/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworks/{vnetName}/subnets/{subnetName}.
Required.
:vartype virtual_network_resource_id: str
"""
_validation = {
"virtual_network_resource_id": {"required": True},
}
_attribute_map = {
"action": {"key": "action", "type": "str"},
"virtual_network_resource_id": {"key": "id", "type": "str"},
}
def __init__(
self, *, virtual_network_resource_id: str, action: Optional[Union[str, "_models.Action"]] = None, **kwargs: Any
) -> None:
"""
:keyword action: The action of virtual network rule. "Allow"
:paramtype action: str or ~azure.mgmt.containerregistry.v2021_06_01_preview.models.Action
:keyword virtual_network_resource_id: Resource ID of a subnet, for example:
/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworks/{vnetName}/subnets/{subnetName}.
Required.
:paramtype virtual_network_resource_id: str
"""
super().__init__(**kwargs)
self.action = action
self.virtual_network_resource_id = virtual_network_resource_id
class Webhook(Resource):
"""An object that represents a webhook for a container registry.
Variables are only populated by the server, and will be ignored when sending a request.
All required parameters must be populated in order to send to Azure.
:ivar id: The resource ID.
:vartype id: str
:ivar name: The name of the resource.
:vartype name: str
:ivar type: The type of the resource.
:vartype type: str
:ivar location: The location of the resource. This cannot be changed after the resource is
created. Required.
:vartype location: str
:ivar tags: The tags of the resource.
:vartype tags: dict[str, str]
:ivar system_data: Metadata pertaining to creation and last modification of the resource.
:vartype system_data: ~azure.mgmt.containerregistry.v2021_06_01_preview.models.SystemData
:ivar status: The status of the webhook at the time the operation was called. Known values are:
"enabled" and "disabled".
:vartype status: str or ~azure.mgmt.containerregistry.v2021_06_01_preview.models.WebhookStatus
:ivar scope: The scope of repositories where the event can be triggered. For example, 'foo:*'
means events for all tags under repository 'foo'. 'foo:bar' means events for 'foo:bar' only.
'foo' is equivalent to 'foo:latest'. Empty means all events.
:vartype scope: str
:ivar actions: The list of actions that trigger the webhook to post notifications.
:vartype actions: list[str or
~azure.mgmt.containerregistry.v2021_06_01_preview.models.WebhookAction]
:ivar provisioning_state: The provisioning state of the webhook at the time the operation was
called. Known values are: "Creating", "Updating", "Deleting", "Succeeded", "Failed", and
"Canceled".
:vartype provisioning_state: str or
~azure.mgmt.containerregistry.v2021_06_01_preview.models.ProvisioningState
"""
_validation = {
"id": {"readonly": True},
"name": {"readonly": True},
"type": {"readonly": True},
"location": {"required": True},
"system_data": {"readonly": True},
"provisioning_state": {"readonly": True},
}
_attribute_map = {
"id": {"key": "id", "type": "str"},
"name": {"key": "name", "type": "str"},
"type": {"key": "type", "type": "str"},
"location": {"key": "location", "type": "str"},
"tags": {"key": "tags", "type": "{str}"},
"system_data": {"key": "systemData", "type": "SystemData"},
"status": {"key": "properties.status", "type": "str"},
"scope": {"key": "properties.scope", "type": "str"},
"actions": {"key": "properties.actions", "type": "[str]"},
"provisioning_state": {"key": "properties.provisioningState", "type": "str"},
}
def __init__(
self,
*,
location: str,
tags: Optional[Dict[str, str]] = None,
status: Optional[Union[str, "_models.WebhookStatus"]] = None,
scope: Optional[str] = None,
actions: Optional[List[Union[str, "_models.WebhookAction"]]] = None,
**kwargs: Any
) -> None:
"""
:keyword location: The location of the resource. This cannot be changed after the resource is
created. Required.
:paramtype location: str
:keyword tags: The tags of the resource.
:paramtype tags: dict[str, str]
:keyword status: The status of the webhook at the time the operation was called. Known values
are: "enabled" and "disabled".
:paramtype status: str or
~azure.mgmt.containerregistry.v2021_06_01_preview.models.WebhookStatus
:keyword scope: The scope of repositories where the event can be triggered. For example,
'foo:*' means events for all tags under repository 'foo'. 'foo:bar' means events for 'foo:bar'
only. 'foo' is equivalent to 'foo:latest'. Empty means all events.
:paramtype scope: str
:keyword actions: The list of actions that trigger the webhook to post notifications.
:paramtype actions: list[str or
~azure.mgmt.containerregistry.v2021_06_01_preview.models.WebhookAction]
"""
super().__init__(location=location, tags=tags, **kwargs)
self.status = status
self.scope = scope
self.actions = actions
self.provisioning_state = None
class WebhookCreateParameters(_serialization.Model):
"""The parameters for creating a webhook.
All required parameters must be populated in order to send to Azure.
:ivar tags: The tags for the webhook.
:vartype tags: dict[str, str]
:ivar location: The location of the webhook. This cannot be changed after the resource is
created. Required.
:vartype location: str
:ivar service_uri: The service URI for the webhook to post notifications.
:vartype service_uri: str
:ivar custom_headers: Custom headers that will be added to the webhook notifications.
:vartype custom_headers: dict[str, str]
:ivar status: The status of the webhook at the time the operation was called. Known values are:
"enabled" and "disabled".
:vartype status: str or ~azure.mgmt.containerregistry.v2021_06_01_preview.models.WebhookStatus
:ivar scope: The scope of repositories where the event can be triggered. For example, 'foo:*'
means events for all tags under repository 'foo'. 'foo:bar' means events for 'foo:bar' only.
'foo' is equivalent to 'foo:latest'. Empty means all events.
:vartype scope: str
:ivar actions: The list of actions that trigger the webhook to post notifications.
:vartype actions: list[str or
~azure.mgmt.containerregistry.v2021_06_01_preview.models.WebhookAction]
"""
_validation = {
"location": {"required": True},
}
_attribute_map = {
"tags": {"key": "tags", "type": "{str}"},
"location": {"key": "location", "type": "str"},
"service_uri": {"key": "properties.serviceUri", "type": "str"},
"custom_headers": {"key": "properties.customHeaders", "type": "{str}"},
"status": {"key": "properties.status", "type": "str"},
"scope": {"key": "properties.scope", "type": "str"},
"actions": {"key": "properties.actions", "type": "[str]"},
}
def __init__(
self,
*,
location: str,
tags: Optional[Dict[str, str]] = None,
service_uri: Optional[str] = None,
custom_headers: Optional[Dict[str, str]] = None,
status: Optional[Union[str, "_models.WebhookStatus"]] = None,
scope: Optional[str] = None,
actions: Optional[List[Union[str, "_models.WebhookAction"]]] = None,
**kwargs: Any
) -> None:
"""
:keyword tags: The tags for the webhook.
:paramtype tags: dict[str, str]
:keyword location: The location of the webhook. This cannot be changed after the resource is
created. Required.
:paramtype location: str
:keyword service_uri: The service URI for the webhook to post notifications.
:paramtype service_uri: str
:keyword custom_headers: Custom headers that will be added to the webhook notifications.
:paramtype custom_headers: dict[str, str]
:keyword status: The status of the webhook at the time the operation was called. Known values
are: "enabled" and "disabled".
:paramtype status: str or
~azure.mgmt.containerregistry.v2021_06_01_preview.models.WebhookStatus
:keyword scope: The scope of repositories where the event can be triggered. For example,
'foo:*' means events for all tags under repository 'foo'. 'foo:bar' means events for 'foo:bar'
only. 'foo' is equivalent to 'foo:latest'. Empty means all events.
:paramtype scope: str
:keyword actions: The list of actions that trigger the webhook to post notifications.
:paramtype actions: list[str or
~azure.mgmt.containerregistry.v2021_06_01_preview.models.WebhookAction]
"""
super().__init__(**kwargs)
self.tags = tags
self.location = location
self.service_uri = service_uri
self.custom_headers = custom_headers
self.status = status
self.scope = scope
self.actions = actions
class WebhookListResult(_serialization.Model):
"""The result of a request to list webhooks for a container registry.
:ivar value: The list of webhooks. Since this list may be incomplete, the nextLink field should
be used to request the next list of webhooks.
:vartype value: list[~azure.mgmt.containerregistry.v2021_06_01_preview.models.Webhook]
:ivar next_link: The URI that can be used to request the next list of webhooks.
:vartype next_link: str
"""
_attribute_map = {
"value": {"key": "value", "type": "[Webhook]"},
"next_link": {"key": "nextLink", "type": "str"},
}
def __init__(
self, *, value: Optional[List["_models.Webhook"]] = None, next_link: Optional[str] = None, **kwargs: Any
) -> None:
"""
:keyword value: The list of webhooks. Since this list may be incomplete, the nextLink field
should be used to request the next list of webhooks.
:paramtype value: list[~azure.mgmt.containerregistry.v2021_06_01_preview.models.Webhook]
:keyword next_link: The URI that can be used to request the next list of webhooks.
:paramtype next_link: str
"""
super().__init__(**kwargs)
self.value = value
self.next_link = next_link
class WebhookUpdateParameters(_serialization.Model):
"""The parameters for updating a webhook.
:ivar tags: The tags for the webhook.
:vartype tags: dict[str, str]
:ivar service_uri: The service URI for the webhook to post notifications.
:vartype service_uri: str
:ivar custom_headers: Custom headers that will be added to the webhook notifications.
:vartype custom_headers: dict[str, str]
:ivar status: The status of the webhook at the time the operation was called. Known values are:
"enabled" and "disabled".
:vartype status: str or ~azure.mgmt.containerregistry.v2021_06_01_preview.models.WebhookStatus
:ivar scope: The scope of repositories where the event can be triggered. For example, 'foo:*'
means events for all tags under repository 'foo'. 'foo:bar' means events for 'foo:bar' only.
'foo' is equivalent to 'foo:latest'. Empty means all events.
:vartype scope: str
:ivar actions: The list of actions that trigger the webhook to post notifications.
:vartype actions: list[str or
~azure.mgmt.containerregistry.v2021_06_01_preview.models.WebhookAction]
"""
_attribute_map = {
"tags": {"key": "tags", "type": "{str}"},
"service_uri": {"key": "properties.serviceUri", "type": "str"},
"custom_headers": {"key": "properties.customHeaders", "type": "{str}"},
"status": {"key": "properties.status", "type": "str"},
"scope": {"key": "properties.scope", "type": "str"},
"actions": {"key": "properties.actions", "type": "[str]"},
}
def __init__(
self,
*,
tags: Optional[Dict[str, str]] = None,
service_uri: Optional[str] = None,
custom_headers: Optional[Dict[str, str]] = None,
status: Optional[Union[str, "_models.WebhookStatus"]] = None,
scope: Optional[str] = None,
actions: Optional[List[Union[str, "_models.WebhookAction"]]] = None,
**kwargs: Any
) -> None:
"""
:keyword tags: The tags for the webhook.
:paramtype tags: dict[str, str]
:keyword service_uri: The service URI for the webhook to post notifications.
:paramtype service_uri: str
:keyword custom_headers: Custom headers that will be added to the webhook notifications.
:paramtype custom_headers: dict[str, str]
:keyword status: The status of the webhook at the time the operation was called. Known values
are: "enabled" and "disabled".
:paramtype status: str or
~azure.mgmt.containerregistry.v2021_06_01_preview.models.WebhookStatus
:keyword scope: The scope of repositories where the event can be triggered. For example,
'foo:*' means events for all tags under repository 'foo'. 'foo:bar' means events for 'foo:bar'
only. 'foo' is equivalent to 'foo:latest'. Empty means all events.
:paramtype scope: str
:keyword actions: The list of actions that trigger the webhook to post notifications.
:paramtype actions: list[str or
~azure.mgmt.containerregistry.v2021_06_01_preview.models.WebhookAction]
"""
super().__init__(**kwargs)
self.tags = tags
self.service_uri = service_uri
self.custom_headers = custom_headers
self.status = status
self.scope = scope
self.actions = actions
| [
"[email protected]"
]
| |
96c76ae94d06dfc58e6363603425d800499d1a75 | 9df2fb0bc59ab44f026b0a2f5ef50c72b2fb2ceb | /sdk/ml/azure-ai-ml/azure/ai/ml/identity/_internal/pipeline.py | 66a31affbd4140ddaf931c75a2cb7069bbb5a312 | [
"LicenseRef-scancode-python-cwi",
"LGPL-2.1-or-later",
"PSF-2.0",
"LGPL-2.0-or-later",
"GPL-3.0-or-later",
"GPL-1.0-or-later",
"LicenseRef-scancode-warranty-disclaimer",
"LGPL-2.1-only",
"Python-2.0",
"MPL-2.0",
"LicenseRef-scancode-other-copyleft",
"HPND",
"ODbL-1.0",
"GPL-3.0-only",
"ZPL-2.1",
"MIT",
"Apache-2.0",
"BSD-2-Clause",
"BSD-3-Clause",
"LicenseRef-scancode-free-unknown",
"LicenseRef-scancode-generic-cla"
]
| permissive | openapi-env-test/azure-sdk-for-python | b334a2b65eeabcf9b7673879a621abb9be43b0f6 | f61090e96094cfd4f43650be1a53425736bd8985 | refs/heads/main | 2023-08-30T14:22:14.300080 | 2023-06-08T02:53:04 | 2023-06-08T02:53:04 | 222,384,897 | 1 | 0 | MIT | 2023-09-08T08:38:48 | 2019-11-18T07:09:24 | Python | UTF-8 | Python | false | false | 2,585 | py | # ---------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# ---------------------------------------------------------
from azure.ai.ml._user_agent import USER_AGENT
from azure.core.configuration import Configuration
from azure.core.pipeline import Pipeline
from azure.core.pipeline.policies import (
ContentDecodePolicy,
CustomHookPolicy,
DistributedTracingPolicy,
HeadersPolicy,
HttpLoggingPolicy,
NetworkTraceLoggingPolicy,
ProxyPolicy,
RetryPolicy,
UserAgentPolicy,
)
from azure.core.pipeline.transport import RequestsTransport
def _get_config(**kwargs):
"""Configuration common to a/sync pipelines."""
config = Configuration(**kwargs)
config.custom_hook_policy = CustomHookPolicy(**kwargs)
config.headers_policy = HeadersPolicy(**kwargs)
config.http_logging_policy = HttpLoggingPolicy(**kwargs)
config.logging_policy = NetworkTraceLoggingPolicy(**kwargs)
config.proxy_policy = ProxyPolicy(**kwargs)
config.user_agent_policy = UserAgentPolicy(base_user_agent=USER_AGENT, **kwargs)
return config
def _get_policies(config, _per_retry_policies=None, **kwargs):
policies = [
config.headers_policy,
config.user_agent_policy,
config.proxy_policy,
ContentDecodePolicy(**kwargs),
config.retry_policy,
]
if _per_retry_policies:
policies.extend(_per_retry_policies)
policies.extend(
[
config.custom_hook_policy,
config.logging_policy,
DistributedTracingPolicy(**kwargs),
config.http_logging_policy,
]
)
return policies
def build_pipeline(transport=None, policies=None, **kwargs):
if not policies:
config = _get_config(**kwargs)
config.retry_policy = RetryPolicy(**kwargs)
policies = _get_policies(config, **kwargs)
if not transport:
transport = RequestsTransport(**kwargs)
return Pipeline(transport, policies=policies)
def build_async_pipeline(transport=None, policies=None, **kwargs):
from azure.core.pipeline import AsyncPipeline
if not policies:
from azure.core.pipeline.policies import AsyncRetryPolicy
config = _get_config(**kwargs)
config.retry_policy = AsyncRetryPolicy(**kwargs)
policies = _get_policies(config, **kwargs)
if not transport:
from azure.core.pipeline.transport import AioHttpTransport
transport = AioHttpTransport(**kwargs)
return AsyncPipeline(transport, policies=policies)
| [
"[email protected]"
]
| |
e1a369dc2579d3d7f7b2687df356ca92d408e5ca | 6699b8944b71e86725fdc17bb5f9cd69e254b4eb | /leetcode/1448.count-good-nodes-in-binary-tree/solution.py | 93c087524ccb57bbc8d3bb206aa6f474859f9b57 | []
| no_license | jadesym/interview | 6099e663090408f548b4f4b0b17ae90bb60a7d46 | 5b6eecedfa1c7e496bcfe852e2d3896e993ff16e | refs/heads/main | 2023-01-07T21:56:59.063542 | 2022-12-30T20:13:34 | 2022-12-30T20:13:34 | 41,118,644 | 7 | 2 | null | null | null | null | UTF-8 | Python | false | false | 794 | py | # Definition for a binary tree node.
# class TreeNode:
# def __init__(self, val=0, left=None, right=None):
# self.val = val
# self.left = left
# self.right = right
class Solution:
def goodNodes(self, root: TreeNode) -> int:
startMax = -(10 ** 5)
return self.dfs(root, startMax)
def dfs(self, node: TreeNode, maxSoFar: int) -> int:
good_nodes = 0
cur_node_max = max(maxSoFar, node.val)
# Handle if root node
if maxSoFar <= node.val:
good_nodes += 1
# print(node.val)
if node.left is not None:
good_nodes += self.dfs(node.left, cur_node_max)
if node.right is not None:
good_nodes += self.dfs(node.right, cur_node_max)
return good_nodes
| [
"[email protected]"
]
| |
4c2902a924d3a9a14d643a543c10bb7afec70217 | e043f008aaec14e006051c7609748729a78bef90 | /tests/test_falconparser.py | e56ce14595d4071e3f0f9eb1c42050f17872770b | [
"MIT"
]
| permissive | DamianHeard/webargs | 723f38789ae4be61247da2a94ab590e11c808da7 | 4bba0bb4ca7bef3d0c53fab8f9af632e9653b2ed | refs/heads/dev | 2021-01-18T00:25:58.792302 | 2015-11-09T02:07:52 | 2015-11-09T02:07:52 | 45,103,859 | 0 | 0 | null | 2015-10-28T10:05:39 | 2015-10-28T10:05:39 | null | UTF-8 | Python | false | false | 4,147 | py | # -*- coding: utf-8 -*-
import json
import pytest
import falcon
import webtest
from webargs import fields
from webargs.falconparser import parser, use_args, use_kwargs
def use_args_hook(args, context_key='args', **kwargs):
def hook(req, resp, params):
parsed_args = parser.parse(args, req=req, **kwargs)
req.context[context_key] = parsed_args
return hook
@pytest.fixture()
def api():
api_ = falcon.API()
hello_args = {
'name': fields.Str(required=True)
}
class ParseResource(object):
def on_get(self, req, resp):
args = parser.parse(hello_args, req=req, locations=('query', 'headers', 'cookies'))
resp.body = json.dumps(args)
def on_post(self, req, resp):
args = parser.parse(hello_args, req=req, locations=('form', ))
resp.body = json.dumps(args)
def on_put(self, req, resp):
args = parser.parse(hello_args, req=req, locations=('json', ))
resp.body = json.dumps(args)
class UseArgsResource(object):
@use_args(hello_args)
def on_get(self, req, resp, args):
resp.body = json.dumps(args)
class UseArgsWithParamResource(object):
@use_args(hello_args)
def on_get(self, req, resp, args, _id):
args['_id'] = int(_id)
resp.body = json.dumps(args)
class UseKwargsResource(object):
@use_kwargs(hello_args)
def on_get(self, req, resp, name):
resp.body = json.dumps({'name': name})
class AlwaysErrorResource(object):
args = {'bad': fields.Field(validate=lambda x: False)}
def on_get(self, req, resp):
parser.parse(self.args, req=req)
@falcon.before(use_args_hook(hello_args))
class HookResource(object):
def on_get(self, req, resp):
resp.body(req.context['args'])
api_.add_route('/parse', ParseResource())
api_.add_route('/use_args', UseArgsResource())
api_.add_route('/use_args_with_param/{_id}', UseArgsWithParamResource())
api_.add_route('/use_kwargs', UseKwargsResource())
api_.add_route('/hook', UseKwargsResource())
api_.add_route('/error', AlwaysErrorResource())
return api_
@pytest.fixture()
def testapp(api):
return webtest.TestApp(api)
class TestParseResource:
url = '/parse'
def test_parse_querystring(self, testapp):
assert testapp.get(self.url + '?name=Fred').json == {'name': 'Fred'}
def test_parse_form(self, testapp):
res = testapp.post(self.url, {'name': 'Fred'})
assert res.json == {'name': 'Fred'}
def test_parse_json(self, testapp):
res = testapp.put_json(self.url, {'name': 'Fred'})
assert res.json == {'name': 'Fred'}
def test_parse_headers(self, testapp):
res = testapp.get(self.url, headers={'name': 'Fred'})
assert res.json == {'name': 'Fred'}
def test_parsing_cookies(self, testapp):
testapp.set_cookie('name', 'Fred')
assert testapp.get(self.url).json == {'name': 'Fred'}
class TestErrorHandler:
url = '/error'
def test_error_handler_returns_422_response(self, testapp):
res = testapp.get(self.url + '?bad=42', expect_errors=True)
assert res.status_code == 422
assert 'errors' in res.json
assert 'bad' in res.json['errors']
assert res.json['errors']['bad'] == ['Invalid value.']
class TestUseArgsResource:
url = '/use_args'
def test_parse_querystring(self, testapp):
assert testapp.get(self.url + '?name=Fred').json == {'name': 'Fred'}
class TestUseArgsWithParamResource:
url = '/use_args_with_param/42'
def test_parse_querystring(self, testapp):
assert testapp.get(self.url + '?name=Fred').json == {'name': 'Fred', '_id': 42}
class TestUseKwargsResource:
url = '/use_kwargs'
def test_parse_querystring(self, testapp):
assert testapp.get(self.url + '?name=Fred').json == {'name': 'Fred'}
class TestHookResource:
url = '/hook'
def test_parse_querystring(self, testapp):
assert testapp.get(self.url + '?name=Fred').json == {'name': 'Fred'}
| [
"[email protected]"
]
| |
8e55286d2adba619b99dc413e3201836767bb789 | a88a99fb3f754649db06ad86d22b5cb0d2d1e19c | /scholariumat/users/migrations/0005_auto_20181125_1759.py | 37e390c09b8e19e06e7d8ed6a47ff1bf93ab1a89 | [
"MIT"
]
| permissive | valuehack/scholariumat | 91ec59647948759d917ce7077d06b0aa9618c807 | 47c13f3429b95b9ad5ca59b45cf971895260bb5c | refs/heads/master | 2022-12-07T22:20:23.967854 | 2020-04-09T22:05:52 | 2020-04-09T22:05:52 | 135,466,121 | 0 | 3 | MIT | 2022-12-06T18:38:22 | 2018-05-30T15:55:14 | JavaScript | UTF-8 | Python | false | false | 540 | py | # Generated by Django 2.0.9 on 2018-11-25 16:59
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('users', '0004_auto_20181120_1929'),
]
operations = [
migrations.RenameField(
model_name='profile',
old_name='name',
new_name='last_name',
),
migrations.AddField(
model_name='profile',
name='first_name',
field=models.CharField(blank=True, max_length=200),
),
]
| [
"[email protected]"
]
| |
a2156bc789a1d722ae16fc02c3016a476e85d470 | 41a4ef26cf3b4710dfa6fe3f1e88a935bb909654 | /utils/logger.py | 5a8cecd51025f9a4557d25d6b86232b0ad7b72a8 | []
| no_license | little-alexandra/attention_ocr | c6c0846342f947bbb8697f99e02cdd5ce2c276c2 | 475273573ae02efe1c7c1ba3905939580d26876e | refs/heads/master | 2020-11-30T10:01:52.232714 | 2019-12-18T09:35:08 | 2019-12-18T09:35:08 | 230,371,558 | 1 | 0 | null | 2019-12-27T04:13:46 | 2019-12-27T04:13:45 | null | UTF-8 | Python | false | false | 1,463 | py | import logging
import time
import os
from logging import handlers
import datetime
import tensorflow as tf
debug=True
def _p(tensor,msg):
if (debug):
dt = datetime.datetime.now().strftime('TF_DEBUG: %m-%d %H:%M:%S: ')
msg = dt + msg
return tf.Print(tensor, [tensor], msg,summarize= 100)
else:
return tensor
def _p_shape(tensor,msg):
if (debug):
dt = datetime.datetime.now().strftime('TF_DEBUG: %m-%d %H:%M:%S: ')
msg = dt + msg
return tf.Print(tensor, [tf.shape(tensor)], msg,summarize= 100)
else:
return tensor
def init(level=logging.DEBUG,when="D",backup=7,_format="%(levelname)s: %(asctime)s: %(filename)s:%(lineno)d行 %(message)s"):
train_start_time = time.strftime('%Y%m%d%H%M%S', time.localtime(time.time()))
filename = 'logs/ocr-attention-'+train_start_time + '.log'
_dir = os.path.dirname(filename)
if not os.path.isdir(_dir):os.makedirs(_dir)
logger = logging.getLogger()
if not logger.handlers:
formatter = logging.Formatter(_format)
logger.setLevel(level)
handler = handlers.TimedRotatingFileHandler(filename, when=when, backupCount=backup,encoding="utf-8")
handler.setLevel(level)
handler.setFormatter(formatter)
logger.addHandler(handler)
handler = logging.StreamHandler()
handler.setLevel(level)
handler.setFormatter(formatter)
logger.addHandler(handler)
| [
"[email protected]"
]
| |
9e3769ed23384bf504e6dc9a8a92c51ee8651186 | d5ad13232e3f1ced55f6956bc4cbda87925c8085 | /cc_mcc_seq/coverage/coverage_stat/1_coverage_stat.py | 495a50838e4e7cb74a40295b588b592c6c6f5ef4 | []
| no_license | arvin580/SIBS | c0ba9a8a41f59cb333517c286f7d80300b9501a2 | 0cc2378bf62359ec068336ea4de16d081d0f58a4 | refs/heads/master | 2021-01-23T21:57:35.658443 | 2015-04-09T23:11:34 | 2015-04-09T23:11:34 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 657 | py | chr=['chr1','chr2','chr3','chr4','chr5','chr6','chr7','chr8','chr9','chr10','chr11','chr12','chr13','chr14','chr15','chr16','chr17','chr18','chr19','chr20','chr21','chr22','chrX','chrY']
for sample in range(3,13) :
for ch in chr :
inFile=open('../fudan1.coverage')
list1=list()
for line in inFile :
line=line.strip()
fields=line.split('\t')
if fields[0].find(ch)!=-1 :
list1.append(fields[sample])
ouFile=open('fudan1.coverage.'+ch+'.'+str(sample-3),'w')
for item in list1 :
ouFile.write(item+'\n')
ouFile.close()
inFile.close()
| [
"[email protected]"
]
| |
f6dd0869441d5a356d50e7e10e961dee504deb0e | 46734ec336f502dc3d69e31428bacb9cef222a70 | /examples/bq_file_load_benchmark/load_benchmark_tools/benchmark_load_table.py | cee84591f2ea51e813d6e1c94db979c177d7a5b4 | [
"Apache-2.0"
]
| permissive | tims/professional-services | 93ef3d3083d73991d4faba2c40a8ab4ea550cca8 | 8c610e259217ad83dbbceeb388aa1da828d1343b | refs/heads/master | 2020-12-07T20:01:07.550995 | 2020-02-03T07:43:31 | 2020-02-03T07:43:31 | 232,788,171 | 0 | 0 | Apache-2.0 | 2020-01-09T10:58:05 | 2020-01-09T10:58:04 | null | UTF-8 | Python | false | false | 10,178 | py | # Copyright 2018 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import logging
import re
import time
from google.api_core import exceptions
from google.cloud import bigquery
from google.cloud import storage
from generic_benchmark_tools import benchmark_parameters
from generic_benchmark_tools import benchmark_result_util
from generic_benchmark_tools import table_util
from generic_benchmark_tools import file_constants
class BenchmarkLoadTable(object):
"""Represents a BigQuery load table.
Holds methods for creating a table in BigQuery and loading data from GCS
into the table.
Attributes:
benchmark_name(str): The name of the benchmark test.
bq_project(str): ID of the project that holds the BigQuery dataset
and table that the data is loaded into.
bq_client(google.cloud.bigquery.client.Client): Client to hold
configurations needed for BigQuery API requests.
gcs_project(str): ID of the project that holds the GCS bucket
where the files to be loaded are stored.
gcs_client(google.cloud.storage.client.Client): Client to hold
configurations needed for GCS API requests.
staging_project(str): ID of the project that holds the staging tables
used to create the file combinations.
staging_dataset_id(str): ID of the dataset that contains the staging
table that the files loaded into the benchmark table were
generated from.
dataset_id(str): ID of the dataset that holds the benchmark table.
dataset_ref(google.cloud.bigquery.dataset.DatasetReference): Pointer
to the dataset that holds the benchmark table.
bucket_name(str): Name of the bucket that holds the files to be loaded
into the benchmark table.
path(str): Path of the files in GCS to be loaded into the benchmark
table. Path does not include the full GCS URI.
uri(str): Full GCS URI of the files to be loaded into the benchmark
table. Includes the 'gs://' prefix, the bucket name, and path
above.
results_table_name(str): Name of the BigQuery table that the
benchmark table's load results will be inserted into.
results_table_dataset_id(str): Name of the BigQuery dataset that the
benchmark table's load results will be inserted into.
results_table(google.cloud.bigquery.table.Table): BigQuery table that
the benchmark table's load results will be inserted into.
bq_logs_dataset(str): Name of dataset hold BQ logs table.
file_type(str): Type of files that will be loaded from GCS into
the benchmark table (i.e. csv, avro, parquet, etc).
compression_format(bigquery.job.Compression): Object representing the
compression of the file.
benchmark_table_util(load_benchmark_tools.table_util.TableUtil): Object to
assist with the handling of the benchmark table's creation
and properties.
num_columns(int): Number of columns in the benchmark table.
column_types(str): Representation of the types of columns in the
benchmark table(50_STRING_50_NUMERIC, 100_STRING, etc)
bq_schema(List[google.cloud.bigquery.schema.SchemaField]): Schema of
the benchmark table.
load_job(google.cloud.bigquery.job.LoadJob): Object for loading data
from GCS to BigQuery tables.
job_destination_table(str): Name of the destination table. Generated
using the current timestamp converted to a string.
"""
def __init__(
self,
benchmark_name,
bq_project,
gcs_project,
staging_project,
staging_dataset_id,
dataset_id,
bucket_name,
path,
results_table_name,
results_table_dataset_id,
bq_logs_dataset,
):
self.benchmark_name = benchmark_name
self.bq_project = bq_project
self.bq_client = bigquery.Client(
project=self.bq_project
)
self.gcs_project = gcs_project
self.gcs_client = storage.Client(
project=self.gcs_project
)
self.staging_project = staging_project
self.staging_dataset_id = staging_dataset_id
self.dataset_id = dataset_id
self.dataset_ref = self.bq_client.dataset(self.dataset_id)
self.bucket_name = bucket_name
self.path = path
self.uri = 'gs://{0:s}/{1:s}'.format(self.bucket_name, path)
self.results_table_name = results_table_name
self.results_table_dataset_id = results_table_dataset_id
self.results_table_dataset_ref = self.bq_client.dataset(
results_table_dataset_id
)
results_table_ref = self.results_table_dataset_ref.table(
self.results_table_name
)
self.results_table = self.bq_client.get_table(results_table_ref)
self.bq_logs_dataset = bq_logs_dataset
self.file_type = None
self.compression_format = None
self.benchmark_table_util = None
self.num_columns = None
self.column_types = None
self.bq_schema = None
self.load_job = None
self.job_destination_table = None
self.gather_file_properties()
def gather_file_properties(self):
"""Gathers properties of the files loaded into the benchmark table.
"""
# gather file properties from the files' path
# pylint: disable=line-too-long
benchmark_details_pattern = \
r'fileType=(\w+)/compression=(\w+)/numColumns=(\d+)/columnTypes=(\w+)/numFiles=(\d+)/tableSize=(\w+)'
self.file_type, compression, self.num_columns, self.column_types, \
num_files, table_size = \
re.findall(benchmark_details_pattern, self.path)[0]
self.compression_format = (file_constants.FILE_CONSTANTS
['compressionFormats'][compression])
# get schema from the staging table that the file was generated from
source_staging_table_name = '{0:s}_{1:s}'.format(
self.column_types,
self.num_columns
)
source_staging_table_util = table_util.TableUtil(
source_staging_table_name,
self.staging_dataset_id,
project=self.staging_project,
)
if self.file_type == 'parquet' or self.file_type == 'avro':
self.bq_schema = None
else:
self.bq_schema = source_staging_table_util.table.schema
def create_table(self):
"""Creates the bencmark table in BigQuery.
The method creates an empty table using the schema from the staging
table that the files were generated from. It uses the current
timestamp to name the benchmark table to create a random, unique name.
"""
self.job_destination_table = '{0:d}'.format(int(time.time()))
self.benchmark_table_util = table_util.TableUtil(
self.job_destination_table,
self.dataset_id,
bq_schema=self.bq_schema,
)
self.benchmark_table_util.create_table()
def load_from_gcs(self):
"""Loads GCS files into the benchmark table and stores results.
Creates and runs a load job to load files the GCS URI into the
benchmark table. Then uses benchmark_result_util.BenchmarkResultUtil
to gather results and generate a results row, which it then inserts
into the BigQuery results table.
Raises:
google.api_core.exceptions.BadRequest: 400 Error while reading data,
error message: Total data size exceeds max allowed size
"""
job_type = benchmark_parameters.BENCHMARK_PARAMETERS[
'benchmark_names'][self.benchmark_name]['type']
source_formats = file_constants.FILE_CONSTANTS['sourceFormats']
job_config = bigquery.LoadJobConfig()
job_config.source_format = source_formats[self.file_type]
if self.file_type == 'csv':
job_config.skip_leading_rows = 1
self.load_job = self.bq_client.load_table_from_uri(
source_uris='{0:s}/*'.format(self.uri),
destination=self.dataset_ref.table(self.job_destination_table),
job_config=job_config,
)
logging.info('Started load job {0:s} for table {1:s}.'.format(
self.load_job.job_id,
self.job_destination_table
))
try:
self.load_job.result()
result = benchmark_result_util.LoadBenchmarkResultUtil(
job=self.load_job,
job_type=job_type,
benchmark_name=self.benchmark_name,
project_id=self.bq_project,
result_table_name=self.results_table_name,
result_dataset_id=self.results_table_dataset_id,
bq_logs_dataset=self.bq_logs_dataset,
job_source_uri='{0:s}/*'.format(self.uri),
load_table_id=self.job_destination_table,
load_dataset_id=self.dataset_id
)
result.insert_results_row()
except exceptions.BadRequest as e:
logging.error(e.message)
self.bq_client.delete_table(self.benchmark_table_util.table_ref)
logging.info('Deleting table {0:s}'.format(
self.job_destination_table
))
| [
"[email protected]"
]
| |
2f0c21da46fc0a27a43c211905c51a9b98e78cad | c16ea32a4cddb6b63ad3bacce3c6db0259d2bacd | /google/ads/googleads/v4/googleads-py/google/ads/googleads/v4/resources/types/campaign_criterion_simulation.py | b270fa50fac0d5f5919e4dcac9d75a76b8179a43 | [
"Apache-2.0"
]
| permissive | dizcology/googleapis-gen | 74a72b655fba2565233e5a289cfaea6dc7b91e1a | 478f36572d7bcf1dc66038d0e76b9b3fa2abae63 | refs/heads/master | 2023-06-04T15:51:18.380826 | 2021-06-16T20:42:38 | 2021-06-16T20:42:38 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,103 | py | # -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import proto # type: ignore
from google.ads.googleads.v4.common.types import simulation
from google.ads.googleads.v4.enums.types import simulation_modification_method
from google.ads.googleads.v4.enums.types import simulation_type
from google.protobuf import wrappers_pb2 # type: ignore
__protobuf__ = proto.module(
package='google.ads.googleads.v4.resources',
marshal='google.ads.googleads.v4',
manifest={
'CampaignCriterionSimulation',
},
)
class CampaignCriterionSimulation(proto.Message):
r"""A campaign criterion simulation. Supported combinations of
advertising channel type, criterion ids, simulation type and
simulation modification method is detailed below respectively.
1. SEARCH - 30000,30001,30002 - BID_MODIFIER - UNIFORM
2. SHOPPING - 30000,30001,30002 - BID_MODIFIER - UNIFORM
3. DISPLAY - 30001 - BID_MODIFIER - UNIFORM
Attributes:
resource_name (str):
Output only. The resource name of the campaign criterion
simulation. Campaign criterion simulation resource names
have the form:
``customers/{customer_id}/campaignCriterionSimulations/{campaign_id}~{criterion_id}~{type}~{modification_method}~{start_date}~{end_date}``
campaign_id (google.protobuf.wrappers_pb2.Int64Value):
Output only. Campaign ID of the simulation.
criterion_id (google.protobuf.wrappers_pb2.Int64Value):
Output only. Criterion ID of the simulation.
type_ (google.ads.googleads.v4.enums.types.SimulationTypeEnum.SimulationType):
Output only. The field that the simulation
modifies.
modification_method (google.ads.googleads.v4.enums.types.SimulationModificationMethodEnum.SimulationModificationMethod):
Output only. How the simulation modifies the
field.
start_date (google.protobuf.wrappers_pb2.StringValue):
Output only. First day on which the
simulation is based, in YYYY-MM-DD format.
end_date (google.protobuf.wrappers_pb2.StringValue):
Output only. Last day on which the simulation
is based, in YYYY-MM-DD format.
bid_modifier_point_list (google.ads.googleads.v4.common.types.BidModifierSimulationPointList):
Output only. Simulation points if the simulation type is
BID_MODIFIER.
"""
resource_name = proto.Field(
proto.STRING,
number=1,
)
campaign_id = proto.Field(
proto.MESSAGE,
number=2,
message=wrappers_pb2.Int64Value,
)
criterion_id = proto.Field(
proto.MESSAGE,
number=3,
message=wrappers_pb2.Int64Value,
)
type_ = proto.Field(
proto.ENUM,
number=4,
enum=simulation_type.SimulationTypeEnum.SimulationType,
)
modification_method = proto.Field(
proto.ENUM,
number=5,
enum=simulation_modification_method.SimulationModificationMethodEnum.SimulationModificationMethod,
)
start_date = proto.Field(
proto.MESSAGE,
number=6,
message=wrappers_pb2.StringValue,
)
end_date = proto.Field(
proto.MESSAGE,
number=7,
message=wrappers_pb2.StringValue,
)
bid_modifier_point_list = proto.Field(
proto.MESSAGE,
number=8,
oneof='point_list',
message=simulation.BidModifierSimulationPointList,
)
__all__ = tuple(sorted(__protobuf__.manifest))
| [
"bazel-bot-development[bot]@users.noreply.github.com"
]
| bazel-bot-development[bot]@users.noreply.github.com |
f7ca419508798f1929999e5cb30894c192fb6861 | de24f83a5e3768a2638ebcf13cbe717e75740168 | /moodledata/vpl_data/70/usersdata/164/36211/submittedfiles/impedimento.py | d16709d5193363df8e0a3b6b23963d9cbe92b2b9 | []
| no_license | rafaelperazzo/programacao-web | 95643423a35c44613b0f64bed05bd34780fe2436 | 170dd5440afb9ee68a973f3de13a99aa4c735d79 | refs/heads/master | 2021-01-12T14:06:25.773146 | 2017-12-22T16:05:45 | 2017-12-22T16:05:45 | 69,566,344 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 253 | py | # -*- coding: utf-8 -*-
import math
#COMECE SEU CÓDIGO AQUI
L=int(input('Digite a posição de L: '))
R=int(input('Digite a posição de R: '))
D=int(input('Digite a posição de D: '))
if (R>50) and (L<R) and (R>D):
print('S')
else:
print('N') | [
"[email protected]"
]
| |
f1bdea1ca514039345f8ed87d738ee50d65e70be | eacfc1c0b2acd991ec2cc7021664d8e79c9e58f6 | /ccpnmr2.4/python/ccp/format/ansig/sequenceIO.py | c11cb240aad4a5e8c13d21ebc8b1168fd7d87865 | []
| no_license | edbrooksbank/ccpnmr2.4 | cfecb0896dcf8978d796e6327f7e05a3f233a921 | f279ca9bb2d972b1ce075dad5fcc16e6f4a9496c | refs/heads/master | 2021-06-30T22:29:44.043951 | 2019-03-20T15:01:09 | 2019-03-20T15:01:09 | 176,757,815 | 0 | 1 | null | 2020-07-24T14:40:26 | 2019-03-20T14:59:23 | HTML | UTF-8 | Python | false | false | 5,597 | py | """
======================COPYRIGHT/LICENSE START==========================
sequenceIO.py: I/O for Ansig sequence files
Copyright (C) 2005-2009 Wim Vranken (European Bioinformatics Institute)
=======================================================================
This library is free software; you can redistribute it and/or
modify it under the terms of the GNU Lesser General Public
License as published by the Free Software Foundation; either
version 2.1 of the License, or (at your option) any later version.
A copy of this license can be found in ../../../../license/LGPL.license
This library is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public
License along with this library; if not, write to the Free Software
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
======================COPYRIGHT/LICENSE END============================
for further information, please contact :
- CCPN website (http://www.ccpn.ac.uk/)
- PDBe website (http://www.ebi.ac.uk/pdbe/)
- contact Wim Vranken ([email protected])
=======================================================================
If you are using this software for academic purposes, we suggest
quoting the following references:
===========================REFERENCE START=============================
R. Fogh, J. Ionides, E. Ulrich, W. Boucher, W. Vranken, J.P. Linge, M.
Habeck, W. Rieping, T.N. Bhat, J. Westbrook, K. Henrick, G. Gilliland,
H. Berman, J. Thornton, M. Nilges, J. Markley and E. Laue (2002). The
CCPN project: An interim report on a data model for the NMR community
(Progress report). Nature Struct. Biol. 9, 416-418.
Wim F. Vranken, Wayne Boucher, Tim J. Stevens, Rasmus
H. Fogh, Anne Pajon, Miguel Llinas, Eldon L. Ulrich, John L. Markley, John
Ionides and Ernest D. Laue (2005). The CCPN Data Model for NMR Spectroscopy:
Development of a Software Pipeline. Proteins 59, 687 - 696.
===========================REFERENCE END===============================
"""
import os
# Import general stuff
from memops.universal.Util import returnInt
from ccp.format.ansig.generalIO import AnsigGenericFile
from ccp.format.general.formatIO import Sequence, SequenceElement
#####################
# Class definitions #
#####################
class AnsigSequenceFile(AnsigGenericFile):
def initialize(self):
self.sequences = []
def read(self,verbose = 0):
if verbose == 1:
print "Reading ansig sequence file %s" % self.name
self.sequences.append(AnsigSequence())
lineErrors = []
validLines = 0
fin = open(self.name, 'rU')
# Read first line
line = fin.readline()
while line:
cols = line.split()
if len(cols) == 0 or self.patt['exclamation'].search(line):
pass
elif cols[0] == 'sequence':
self.sequences[-1].molName = cols[1]
elif cols[0] == 'residue':
# Get remarks on residue
details = line.split('!')
if len(details) > 1:
details = details[1]
else:
details = None
if cols[1] == 'lig':
self.sequences.append(AnsigSequence())
self.sequences[-1].elements.append(AnsigSequenceElement(1,cols[2],details = details,ligand = True))
else:
self.sequences[-1].elements.append(AnsigSequenceElement(cols[1],cols[2],details = details))
validLines += 1
else:
lineErrors.append(line)
line = fin.readline()
fin.close()
#
# Check
#
if len(lineErrors) > min(5,validLines * 0.5):
self.sequences = []
print " Bad %s format lines:%s" % (self.format,self.newline)
for lineError in lineErrors:
print lineError
def write(self,verbose = 0):
if verbose == 1:
print "Writing ansig sequence file %s" % self.name
if len(self.sequences) > 1:
print "Warning: multiple sequences - writing to same file."
fout = open(self.name,'w')
for sequence in self.sequences:
#
# Writing header
#
fout.write("! Ansig sequence file" + self.newline)
fout.write("!" + self.newline)
fout.write("! written from Ansig sequenceIO in ccpNmr formatConverter suite" + self.newline)
fout.write("!" + (self.newline * 2))
fout.write("sequence %s" % sequence.molName + self.newline)
#
# Write seqCode + code3Letter (lowercase with first uppercase)
#
for residue in sequence.elements:
resLabel = residue.code3Letter.lower().capitalize()
if residue.details:
addString = " ! %s" % residue.details
else:
addString = ""
if not residue.ligand:
fout.write(" residue %5d %3s%s" % (residue.seqCode,resLabel,addString))
else:
fout.write(" residue %5s %3s%s" % ('lig',resLabel,addString))
fout.write(self.newline)
fout.write("end_sequence" + self.newline)
fout.close()
AnsigSequence = Sequence
class AnsigSequenceElement(SequenceElement):
def setFormatSpecific(self,*args,**keywds):
if keywds.has_key('details') and keywds['details'] != None:
self.details = keywds['details'].strip()
else:
self.details = None
if keywds.has_key('ligand'):
self.ligand = True
else:
self.ligand = False
| [
"[email protected]"
]
| |
a9dcfe05f4c4e478e4587c722f15febc52961ea1 | 9743d5fd24822f79c156ad112229e25adb9ed6f6 | /xai/brain/wordbase/otherforms/_welded.py | 9d8c71c76ac244c69252e3e58618cb575c8c845f | [
"MIT"
]
| permissive | cash2one/xai | de7adad1758f50dd6786bf0111e71a903f039b64 | e76f12c9f4dcf3ac1c7c08b0cc8844c0b0a104b6 | refs/heads/master | 2021-01-19T12:33:54.964379 | 2017-01-28T02:00:50 | 2017-01-28T02:00:50 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 216 | py |
#calss header
class _WELDED():
def __init__(self,):
self.name = "WELDED"
self.definitions = weld
self.parents = []
self.childen = []
self.properties = []
self.jsondata = {}
self.basic = ['weld']
| [
"[email protected]"
]
| |
3e436904f0f2dde6f5b4715e4ef0bab9ee10fb76 | 15f321878face2af9317363c5f6de1e5ddd9b749 | /solutions_python/Problem_200/43.py | 066bc53d2e703644911d495cf32ef62e7768e710 | []
| no_license | dr-dos-ok/Code_Jam_Webscraper | c06fd59870842664cd79c41eb460a09553e1c80a | 26a35bf114a3aa30fc4c677ef069d95f41665cc0 | refs/heads/master | 2020-04-06T08:17:40.938460 | 2018-10-14T10:12:47 | 2018-10-14T10:12:47 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 604 | py | # Time: O((logn)^2)
# Space: O(logn)
def tidy_numbers():
digits = map(int, list(raw_input().strip()))
for i in reversed(xrange(1, len(digits))):
if digits[i] == 0 or digits[i] < digits[i-1]:
for j in xrange(i, len(digits)):
digits[j] = 9
for j in reversed(xrange(i)):
if digits[j] != 0:
digits[j] -= 1
break
else:
digits[j] = 9
return int("".join(map(str, digits)))
for case in xrange(input()):
print 'Case #%d: %s' % (case+1, tidy_numbers())
| [
"[email protected]"
]
| |
7a48069b4fd1080f251a258b80c72b101888b83c | d571d407cfda435fcab8b7ccadb1be812c7047c7 | /examples/features/switch.py | 35af4fcb1a13395b4ea7bb6a0625647b1be3beab | [
"Apache-2.0",
"LicenseRef-scancode-free-unknown"
]
| permissive | guildai/guildai | 2d8661a2a6bf0d1ced6334095c8bf5a8e391d8af | 149055da49f57eaf4aec418f2e339c8905c1f02f | refs/heads/main | 2023-08-25T10:09:58.560059 | 2023-08-12T20:19:05 | 2023-08-12T20:19:05 | 105,057,392 | 833 | 86 | Apache-2.0 | 2023-08-07T19:34:27 | 2017-09-27T18:57:50 | Python | UTF-8 | Python | false | false | 112 | py | a = False
b = False
c = False
print("feature a: %s" % a)
print("feature b: %s" % b)
print("feature c: %s" % c)
| [
"[email protected]"
]
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.