code
stringlengths 501
5.19M
| package
stringlengths 2
81
| path
stringlengths 9
304
| filename
stringlengths 4
145
|
---|---|---|---|
from zabbix_enums import _ZabbixEnum
class HostInterfaceMain(_ZabbixEnum):
"""
https://www.zabbix.com/documentation/5.2/en/manual/api/reference/hostinterface/object#host-interface
Whether the interface is used as default on the host.
Only one interface of some type can be set as default on a host.
"""
NO = 0
YES = 1
class HostInterfaceType(_ZabbixEnum):
"""
https://www.zabbix.com/documentation/5.2/en/manual/api/reference/hostinterface/object#host-interface
Interface type.
"""
AGENT = 1
SNMP = 2
IPMI = 3
JMX = 4
class HostIntrefaceUseIP(_ZabbixEnum):
"""
https://www.zabbix.com/documentation/5.2/en/manual/api/reference/hostinterface/object#host-interface
Whether the connection should be made via IP.
"""
NO = 0
YES = 1
class DetailsTagVersion(_ZabbixEnum):
"""
https://www.zabbix.com/documentation/5.2/en/manual/api/reference/hostinterface/object#details-tag
SNMP interface version.
"""
SNMP1 = 1
SNMP2 = 2
SNMP3 = 3
class DetailsTagBulk(_ZabbixEnum):
"""
https://www.zabbix.com/documentation/5.2/en/manual/api/reference/hostinterface/object#details-tag
Whether to use bulk SNMP requests.
"""
NO = 0
YES = 1
class DetailsTagSecurityLevel(_ZabbixEnum):
"""
https://www.zabbix.com/documentation/5.2/en/manual/api/reference/hostinterface/object#details-tag
SNMPv3 security level.
Used only by SNMPv3 interfaces.
"""
NOAUTHNOPRIV = 0
AUTHNOPRIV = 1
AUTHPRIV = 2
class DetailsTagAuthProtocol(_ZabbixEnum):
"""
https://www.zabbix.com/documentation/5.2/en/manual/api/reference/hostinterface/object#details-tag
SNMPv3 authentication protocol.
Used only by SNMPv3 interfaces.
"""
MD5 = 0
SHA = 1
class DetailsTagPrivProtocol(_ZabbixEnum):
"""
https://www.zabbix.com/documentation/5.2/en/manual/api/reference/hostinterface/object#details-tag
SNMPv3 privacy protocol.
Used only by SNMPv3 interfaces.
"""
DES = 0
AES = 1 | zabbix-enums | /zabbix-enums-1.60.1.tar.gz/zabbix-enums-1.60.1/src/zabbix_enums/z52/hostinterface.py | hostinterface.py |
import time
try:
import threading
except ImportError:
import dummy_threading as threading
from pyzabbix import ZabbixMetric, ZabbixSender, ZabbixResponse
from zbxepics.logging import logger
from zbxepics.casender.peekqueue import PriorityPeekQueue
from zbxepics.casender.item import MonitorItemFactory, IntervalItemFactory
class ZabbixSenderCA(object):
"""
ZabbixSenderCA class, send metrics to Zabbix server.
Attributes
----------
_monitor_items : list of zbxepics.casender.item.MonitorItem
list of monitor items
_interval_item_q : zbxepics.casender.peekqueue.PriorityPeekQueue
priority queue of interval items
zbx_sender : pyzabbix.ZabbixSender
Zabbix sender to send metrics to Zabbix server
__is_stop : threading.Event
whether server is stop or not
__stop_request : bool
to stop running server
_is_running : bool
whether server is running or not
"""
def __init__(self, zabbix_server='127.0.0.1', zabbix_port=10051,
use_config=None, items=None):
"""
Parameters
----------
zabbix_server : str
Zabbix server ip address (default is '127.0.0.1')
zabbix_port : int
Zabbix server port (default is 10051)
use_config : str or bool
Path to zabbix_agentd.conf file to load settings from.
If value is 'True' then default config path will used:
/etc/zabbix/zabbix_agentd.conf
items: dict
List of sender items.
(Prerequisite keys: host pv interval, Optional: item_key func)
"""
self._monitor_items = []
self._interval_item_q = PriorityPeekQueue()
self.zbx_sender = ZabbixSender(zabbix_server,
zabbix_port,
use_config)
self.__is_stop = threading.Event()
self.__stop_request = False
self._is_running = False
if items:
for item in items:
self.add_item(item)
def add_item(self, item):
"""Add sender item to container
Parameters
----------
item : dict
dict of item with following keys
('host', 'pv', 'interval', 'item_key', 'func')
Returns
-------
item.MonitorItem or item.IntervalItem
Added item
"""
try:
host = item['host']
pvname = item['pv']
interval = item['interval']
item_key = item.get('item_key')
if interval == 'monitor':
sender_item = (MonitorItemFactory
.create_item(host, pvname, item_key))
self._monitor_items.append(sender_item)
else:
func = item['func']
sender_item = (IntervalItemFactory
.create_item(host, pvname, interval,
func, item_key))
self._interval_item_q.put((0, sender_item))
except Exception:
sender_item = None
return sender_item
def __get_interval_items(self):
"""Return a list of items to be executed
Returns
-------
list of item.Intervalitem
Return a list of items to be executed
"""
if self._interval_item_q.empty():
return []
items = []
now = int(time.time())
while now >= self._interval_item_q.peek()[0]:
_, item = self._interval_item_q.get()
items.append(item)
# Rescedule
runtime = now + item.interval
self._interval_item_q.put((runtime, item))
return items
def _create_metrics(self, items):
"""Return a list of metrics from item
Parameters
----------
items : list of item.MonitorItem or item.IntervalItem
items to get metrics
Returns
-------
list of pyzabbix.ZabbixMetric
a list of metrics from item
"""
metrics = []
for item in items:
try:
zm = item.get_metrics()
metrics.extend(zm)
except Exception:
pass
return metrics
def _send_metrics(self, items):
"""Send metrics to Zabbix server
Parameters
----------
items : list of item.MonitorItem or item.IntervalItem
"""
metrics = self._create_metrics(items)
if not metrics:
return
result = self.zbx_sender.send(metrics)
logger.debug('%s: %s',
self.__class__.__name__,
result)
def run(self):
"""Start ca sender server"""
if (not self._monitor_items
and self._interval_item_q.empty()):
# Do not start if items is empty
raise Exception('Sender process have no items.')
self._is_running = True
self.__is_stop.clear()
try:
while not self.__stop_request:
items = []
items.extend(self._monitor_items)
items.extend(self.__get_interval_items())
self._send_metrics(items)
time.sleep(1)
except Exception as err:
logger.error(err)
finally:
logger.info('%s: %s',
self.__class__.__name__,
'Sender process stopped.')
self.__stop_request = False
self.__is_stop.set()
self._is_running = False
def stop(self):
"""Stop the run loop"""
self.__stop_request = True
self.__is_stop.wait()
@property
def is_running(self):
"""bool: Whether server is running or not"""
return self._is_running | zabbix-epics-py | /zabbix_epics_py-1.0.0-py3-none-any.whl/zbxepics/casender/casender.py | casender.py |
try:
import threading
except ImportError:
import dummy_threading as threading
from epics import PV
from pyzabbix import ZabbixMetric
class IntervalItem(object):
"""
a class for interval item
Attributes
----------
host : str
host name of item
pvname : str
pv name to monitor
pv : epics.PV
pv object to be monitored
item_key : str
item key of item
interval : float
interval in senconds to send
value : obj
value to be got
__last_value : obj
last got value
_lock : threading.Lock()
lock for value
"""
DEFAULT_INTERVAL = 30.0
def __init__(self, host, pvname, interval=30.0, item_key=None):
"""
Parameters
----------
host : str
host name of item
pvname : str
pv name to monitor
interval : float
interval in senconds to send (default is 30.0)
item_key : str
item key of item (default is None)
"""
self.host = str(host)
pvname_ = str(pvname)
self.pv = PV(pvname_,
connection_callback=self._on_connection_change,
callback=self._on_value_change)
self.item_key = pvname_ if item_key is None else item_key
self.interval = interval
if self.interval is None or self.interval < 1.0:
self.interval = self.DEFAULT_INTERVAL
self._value = None
self.__last_value = None
self._lock = threading.Lock()
self._setup()
def _setup(self):
"""May be overridden by a subclass"""
pass
def _on_connection_change(self, pvname=None, conn=None, **kws):
"""Callback to be called every connection state change
Parameters
----------
pvname : str
the name of the pv
conn : bool
specifying whether the PV is now connected
Notes
-----
May be overridden by a subclass.
"""
if not conn:
self._value = None
self._setup()
def _on_value_change(self, value=None, timestamp=None, **kw):
"""Callback to be called every monitor update
Parameters
----------
value : obj
updated value
timestamp : float
timestamp of pv relative to POSIX time origin
Notes
-----
May be overridden by a subclass.
"""
pass
def __get_value(self):
"""Get current value
Returns
-------
obj
current value if value is updated else last value
"""
if not self.pv.connected:
return None
with self._lock:
if self._value is not None:
self.__last_value = self._value
self._value = None
self._setup()
return self.__last_value
def get_metrics(self):
"""Get Zabbix Metircs
Returns
-------
list of pyzabbix.ZabbixMetric
list of zabbix metric
"""
value = self.__get_value()
if value is None:
return []
zm = ZabbixMetric(self.host, self.item_key, value)
return [zm]
def __lt__(self, other):
return True
class IntervalItemLast(IntervalItem):
"""a class for last interval item"""
def _on_value_change(self, value=None, timestamp=None, **kw):
"""Callback to be called every monitor update
Parameters
----------
value : obj
updated value
timestamp : float
timestamp of pv relative to POSIX time origin
"""
with self._lock:
self._value = value
class IntervalItemMin(IntervalItem):
"""a class for min interval item"""
def _on_value_change(self, value=None, timestamp=None, **kw):
"""Callback to be called every monitor update
Parameters
----------
value : obj
updated value
timestamp : float
timestamp of pv relative to POSIX time origin
"""
with self._lock:
self._value = (value if self._value is None
else min(self._value, value))
class IntervalItemMax(IntervalItem):
"""a class for max interval item"""
def _on_value_change(self, value=None, timestamp=None, **kw):
"""Callback to be called every monitor update
Parameters
----------
value : obj
updated value
timestamp : float
timestamp of pv relative to POSIX time origin
"""
with self._lock:
self._value = (value if self._value is None
else max(self._value, value))
class IntervalItemAvg(IntervalItem):
"""
a class for average interval item
Attributes
----------
sum : int or float
sum of values
count : int
counts of value change
"""
def _setup(self):
"""Set up to initialize"""
self.__sum = 0
self.__count = 0
def _on_value_change(self, value=None, timestamp=None, **kw):
"""Callback to be called every monitor update
Parameters
----------
value : obj
updated value
timestamp : float
timestamp of pv relative to POSIX time origin
"""
with self._lock:
self.__sum += value
self.__count += 1
self._value = float(self.__sum) / self.__count | zabbix-epics-py | /zabbix_epics_py-1.0.0-py3-none-any.whl/zbxepics/casender/item/intervalitem.py | intervalitem.py |
import os
import logging
import logging.config
class Logger(object):
"""
logging application log
Attributes
----------
logger : logging.Logger
logger object
"""
NOTSET = logging.NOTSET
DEBUG = logging.DEBUG
INFO = logging.INFO
WARNING = logging.WARNING
ERROR = logging.ERROR
CRITICAL = logging.CRITICAL
LEVELS = [NOTSET, DEBUG, INFO, WARNING, ERROR, CRITICAL]
def __init__(self):
self.logger = logging.getLogger(__name__)
self.debug('Create logger(%s)', self.logger.name)
def set_config(self, config_file):
"""Set configuration for logging from config file
Parameters
----------
config_file : str or bool
Path to config file to load settings from.
If value is `True` then default config path will used.
"""
# If value is `True` then default config path will used
if config_file and isinstance(config_file, bool):
dir_path = os.path.dirname(__file__)
config_file = os.path.join(dir_path, 'logging.conf')
# Set configuration for logging
logging.config.fileConfig(config_file,
disable_existing_loggers=False)
self.debug('Changed configuration for logger(%s)', self.logger.name)
def set_level(self, level):
"""Set the logging level of this logger
Parameters
----------
level : int
logging level
"""
if level in self.LEVELS:
self.logger.setLevel(level)
def debug(self, msg, *args, **kw):
"""Log 'msg % args' with severity 'DEBUG'.
Notes
-----
To pass exception information, use the keyword argument
exc_info with a true value.
"""
self.log(self.DEBUG, msg, *args, **kw)
def info(self, msg, *args, **kw):
"""Log 'msg % args' with severity 'INFO'.
Notes
-----
To pass exception information, use the keyword argument
exc_info with a true value.
"""
self.log(self.INFO, msg, *args, **kw)
def warning(self, msg, *args, **kw):
"""Log 'msg % args' with severity 'WARNING'.
Notes
-----
To pass exception information, use the keyword argument
exc_info with a true value.
"""
self.log(self.WARNING, msg, *args, **kw)
def error(self, msg, *args, **kw):
"""Log 'msg % args' with severity 'ERROR'.
Notes
-----
To pass exception information, use the keyword argument
exc_info with a true value.
"""
self.log(self.ERROR, msg, *args, **kw)
def critical(self, msg, *args, **kw):
"""Log 'msg % args' with severity 'CRITICAL'.
Notes
-----
To pass exception information, use the keyword argument
exc_info with a true value.
"""
self.log(self.CRITICAL, msg, *args, **kw)
def log(self, level, msg, *args, **kw):
"""Log 'msg % args' with the integer severity 'level'.
Notes
-----
To pass exception information, use the keyword argument
exc_info with a true value.
"""
self.logger.log(level, msg, *args, **kw)
logger = Logger() | zabbix-epics-py | /zabbix_epics_py-1.0.0-py3-none-any.whl/zbxepics/logging/logger.py | logger.py |
import json
import copy
import collections
from pyzabbix import ZabbixAPI
from zbxepics.logging.logger import logger
from . import apiobjects
class ZabbixProvisionCA(object):
"""ZabbixProvisionCA class, Provision zabbix configuration."""
def __init__(self, url=None, user=None, password=None):
self.__zbx_api = ZabbixAPI(url, user=user, password=password)
self.__hostgroup = apiobjects.HostGroup(self.__zbx_api)
self.__host = apiobjects.Host(self.__zbx_api)
self.__template = apiobjects.Template(self.__zbx_api)
self.__trigger = apiobjects.Trigger(self.__zbx_api)
def set_hostgroups(self, hostgroups):
self.__hostgroup.create_or_update(hostgroups)
def set_hosts(self, hosts):
self.__host.create_or_update(hosts)
def set_templates(self, templates):
self.__template.create_or_update(templates)
def set_items(self, items, templated=False):
itemapi = apiobjects.Item(self.__zbx_api, templated)
itemapi.create_or_update(items)
def set_applications(self, applications, templated=False):
appapi = apiobjects.Application(self.__zbx_api, templated)
appapi.create_or_update(applications)
def set_triggers(self, triggers):
self.__trigger.create_or_update(triggers)
def exec_provision(self, config):
"""Make provision for zabbix configurations.
:type config: dict
:param config: Zabbix configurations. required keys
'hostgroups', 'hosts', 'templates'
"""
if config is None:
return
if 'hostgroups' in config:
self.set_hostgroups(config['hostgroups'])
if 'hosts' in config:
host_objs = [host['info'] for host in config['hosts']]
self.set_hosts(host_objs)
for host_ in config['hosts']:
self.set_applications(host_['applications'])
self.set_items(host_['items'])
self.set_triggers(host_['triggers'])
if 'templates' in config:
tpl_objs = [tpl['info'] for tpl in config['templates']]
self.set_templates(tpl_objs)
templated = True
for tpl_ in config['templates']:
self.set_applications(tpl_['applications'], templated)
self.set_items(tpl_['items'], templated)
self.set_triggers(tpl_['triggers'])
class ZabbixProvisionConfigJSON(object):
"""ZabbixProvisionConfigJSON class.
Load configuration from JSON.
"""
def __update_nested_dict(self, orig_dict, new_dict):
for key, val in new_dict.items():
if isinstance(val, dict):
tmp = orig_dict.get(key, {})
orig_dict[key] = self.__update_nested_dict(tmp, val)
elif isinstance(val, list):
tmp = orig_dict.get(key, [])
orig_dict[key] = tmp.extend(val) if tmp else val
else:
orig_dict[key] = new_dict[key]
return orig_dict
def load_config_from_json(self, config_file):
"""Load zabbix configuration from config file at JSON.
:type config_file: str
:param config_file: Path to config file to load
configurations from.
:rtype: dict
:return: Configurations loaded from config file.
"""
config = {}
config['hostgroups'] = []
config['hosts'] = []
config['templates'] = []
with open(config_file, 'r') as f:
json_data = json.load(f)
# Read default configuration
top_default = {}
if 'default' in json_data:
top_default = json_data['default']
# Read configuration
if 'hostgroups' not in json_data:
return config
for group in json_data['hostgroups']:
# Default in group
group_default = copy.deepcopy(top_default)
if 'default' in group:
group_default = self.__update_nested_dict(group_default,
group['default'])
groupname = group['name']
group_ = {'name': groupname}
config['hostgroups'].append(group_)
groups = [groupname]
hosts = group.get('hosts', [])
for host in hosts:
host_ = self.__parse_host(host, groups,
default=group_default)
if host_:
config['hosts'].append(host_)
templates = group.get('templates', [])
for template in templates:
template_ = self.__parse_template(template, groups,
default=group_default)
if template_:
config['templates'].append(template_)
return config
def __parse_template(self, template, groups, default=None):
# Default
if default is None:
default = {}
template_default = copy.deepcopy(default)
if 'default' in template:
template_default = self.__update_nested_dict(template_default,
template['default'])
template_config = {}
info = copy.deepcopy(template)
info['groups'] = groups
info.pop('default', None)
info.pop('applications', None)
info.pop('items', None)
info.pop('triggers', None)
template_config['info'] = info
if 'hosts' in template:
template_config['info']['hosts'] = template['hosts']
contents = self.__parse_host_contents(template, template_default)
if contents:
template_config['applications'] = contents['applications']
template_config['items'] = contents['items']
template_config['triggers'] = contents['triggers']
return template_config
def __parse_host(self, host, groups, default=None):
# Default
if default is None:
default = {}
host_default = copy.deepcopy(default)
if 'default' in host:
host_default = self.__update_nested_dict(host_default,
host['default'])
host_config = {}
info = copy.deepcopy(host)
info['groups'] = groups
info.pop('default', None)
info.pop('applications', None)
info.pop('items', None)
info.pop('triggers', None)
host_config['info'] = info
default_iface = {}
if 'interface' in host_default:
default_iface = copy.deepcopy(host_default['interface'])
if 'interfaces' in host:
interfaces = []
for interface in host['interfaces']:
interface_ = default_iface
interface_.update(interface)
interfaces.append(interface_)
host_config['info']['interfaces'] = interfaces
if interfaces:
host_default['item']['interface'] = interfaces[0]
templates = None
if 'templates' in host:
templates = host['templates']
elif 'templates' in host_default:
templates = host_default['templates']
if templates:
host_config['info']['templates'] = templates
contents = self.__parse_host_contents(host, host_default)
if contents:
host_config['applications'] = contents['applications']
host_config['items'] = contents['items']
host_config['triggers'] = contents['triggers']
return host_config
def __parse_host_contents(self, host, default=None):
if default is None:
default = {}
contents = {}
contents['applications'] = []
contents['items'] = []
contents['triggers'] = []
hostname = host['name']
if 'applications' in host:
default_item = None
if 'item' in default:
default_item = copy.deepcopy(default['item'])
for app in host['applications']:
app_name = app['name']
app_ = {'host': hostname, 'name': app_name}
contents['applications'].append(app_)
if 'items' not in app:
continue
items = self.__parse_items(app['items'], hostname,
[app_name], default_item)
contents['items'].extend(items)
if 'triggers' in host:
default_trigger = None
if 'trigger' in default:
default_trigger = copy.deepcopy(default['trigger'])
triggers = self.__parase_triggers(host['triggers'], hostname,
default_trigger)
contents['triggers'] = triggers
return contents
def __parse_items(self, items, hostname, apps, default=None):
if default is None:
default = {}
items_ = []
for item in items:
item_ = copy.deepcopy(default)
item_.update(item)
item_['host'] = hostname
item_['applications'] = apps
items_.append(item_)
return items_
def __parase_triggers(self, triggers, hostname, default=None):
if default is None:
default = {}
triggers_ = []
for trigger in triggers:
trigger_ = copy.deepcopy(default)
trigger_.update(trigger)
trigger_['host'] = hostname
if 'dependencies' in trigger_:
for trg in trigger_['dependencies']:
if 'host' not in trg:
trg['host'] = hostname
triggers_.append(trigger_)
return triggers_ | zabbix-epics-py | /zabbix_epics_py-1.0.0-py3-none-any.whl/zbxepics/zbxconfig/provision.py | provision.py |
import copy
from .apiobject import APIObject
from .hostgroup import HostGroup
from .template import Template
from zbxepics.logging.logger import logger
class Host(APIObject):
"""Host object class for ZabbixAPI.
:type host: dict
:param host: dict for host object.
:keys:
'name': (str) Technical name of the host.
'interfaces': (array) Interfaces to be created for the host.
'groups': (array) Host groups to add the host to.
"""
def __init__(self, zbx_api):
self.__hostgroup = HostGroup(zbx_api)
self.__template = Template(zbx_api)
super(Host, self).__init__(zbx_api)
def create(self, hosts):
for host in hosts:
self.create_one(host)
def create_one(self, host):
"""Create new host.
:type host: dict
:param host: Paramter of Host object.
:rtype: str
:return: Return single ID of the created host.
"""
name = host['name']
hostid = self.get_id_by_name(name)
if hostid is not None:
logger.debug('Already exists({0})'.format(name))
return None
params = self.__to_parameters(host)
result = self._do_request('host.create', params)
return result['hostids'][0] if result else None
def update_one(self, host, hostid=None):
"""Update existing host.
:type host: dict
:param host: Paramter of Host object.
:type hostid: str
:param hostid: ID of the host.
:rtype: str
:return: Return single ID of the updated host.
"""
name = host['name']
if hostid is None:
hostid = self.get_id_by_name(name)
if hostid is None:
logger.debug('Not exists({0})'.format(name))
return None
params = self.__to_parameters(host)
params['hostid'] = hostid
if 'templates_clear' in host:
templateids = self.__get_templateids(host['templates_clear'])
params['templates_clear'] = templateids
result = self._do_request('host.update', params)
return result['hostids'][0] if result else None
def create_or_update(self, hosts):
for host in hosts:
hostid = self.get_id_by_name(host['name'])
if hostid is None:
self.create_one(host)
else:
self.update_one(host, hostid)
def __to_parameters(self, host):
params = copy.deepcopy(host)
params['host'] = host['name']
params['groups'] = self.__get_groupids(host['groups'])
if 'templates' in params:
templateids = self.__get_templateids(params['templates'])
params['templates'] = templateids
return params
def get_hosts_by_name(self, names, output=None):
params = {}
params['filter'] = {'host': names}
params['output'] = ['hostid', 'host', 'name']
if output is not None:
params['output'] = output
result = self._do_request('host.get', params)
return result
def get_id_by_name(self, name):
hostids = self.get_ids_by_name([name])
return hostids[0]['hostid'] if hostids else None
def get_ids_by_name(self, names):
hosts = self.get_hosts_by_name(names, ['hostid'])
return hosts if hosts else None
def __get_groupids(self, groups):
groupids = self.__hostgroup.get_ids_by_name(groups)
return groupids
def __get_templateids(self, templates):
templateids = self.__template.get_ids_by_name(templates)
return templateids
def delete(self, names):
"""Delete hosts.
:type names: list
:param names: Technical names of the hosts to delete.
:rtype: list
:return: Return IDs of the deleted hosts.
"""
hosts = self.get_ids_by_name(names)
if not hosts:
return None
params = [host['hostid'] for host in hosts]
result = self._do_request('host.delete', params)
return result['hostids'] if result else None | zabbix-epics-py | /zabbix_epics_py-1.0.0-py3-none-any.whl/zbxepics/zbxconfig/apiobjects/host.py | host.py |
from .apiobject import APIObject
from .host import Host
from .template import Template
from zbxepics.logging.logger import logger
class Application(APIObject):
"""Application object class for ZabbixAPI.
:type application: dict
:param application: dict for application object.
:keys:
'name': (str) Name of the application.
'host': (str) Name of the host that the item belongs to.
"""
def __init__(self, zbx_api, templated=False):
if templated:
self.__host = Template(zbx_api)
else:
self.__host = Host(zbx_api)
super(Application, self).__init__(zbx_api)
def create(self, applications):
for application in applications:
self.create_one(application)
def create_one(self, application):
"""Create new application.
:type application: dict
:param application: Paramter of Application object.
:rtype: str
:return: Return single ID of the created application.
"""
name = application['name']
hostname = application['host']
applicationid = self.get_id_by_name(name, hostname)
if applicationid is not None:
logger.debug('Already exists({0},{1})'.format(hostname, name))
return None
params = self.__to_parameters(application)
params['hostid'] = self.__host.get_id_by_name(hostname)
result = self._do_request('application.create', params)
return result['applicationids'][0] if result else None
def update_one(self, application, applicationid=None):
"""Update existing application.
:type application: dict
:param application: Paramter of Application object.
:type applicationid: str
:param applicationid: ID of the application.
:rtype: str
:return: Return single ID of the updated application.
"""
name = application['name']
hostname = application['host']
if applicationid is None:
applicationid = self.get_id_by_name(name, hostname)
if applicationid is None:
logger.debug('Not exists({0},{1})'.format(hostname, name))
return None
params = self.__to_parameters(application)
params['applicationid'] = applicationid
result = self._do_request('application.update', params)
return result['applicationids'][0] if result else None
def create_or_update(self, applications):
for application in applications:
appid = self.get_id_by_name(application['name'],
application['host'])
if appid is None:
self.create_one(application)
else:
self.update_one(application, appid)
def __to_parameters(self, application):
params = {}
params['name'] = application['name']
return params
def get_applications_by_name(self, names, hostname=None, output=None):
hostid = self.__host.get_id_by_name(hostname)
params = {}
params['filter'] = {'hostid': hostid, 'name': names}
params['output'] = ['applicationid', 'hostid', 'name']
if output is not None:
params['output'] = output
result = self._do_request('application.get', params)
return result
def get_id_by_name(self, name, hostname):
apps = self.get_ids_by_name([name], hostname)
return apps[0]['applicationid'] if apps else None
def get_ids_by_name(self, names, hostname):
apps = self.get_applications_by_name(names, hostname,
['applicationid'])
return apps if apps else None
def delete(self, names, hostname):
"""Delete applications.
:type names: list
:param names: Names of the applications to delete.
:type hostname: str
:param hostname: Technical name of the host.
:rtype: list
:return: Return IDs of the deleted applications.
"""
apps = self.get_ids_by_name(names, hostname)
if not apps:
return None
params = [app['applicationid'] for app in apps]
result = self._do_request('application.delete', params)
return result['applicationids'] if result else None | zabbix-epics-py | /zabbix_epics_py-1.0.0-py3-none-any.whl/zbxepics/zbxconfig/apiobjects/application.py | application.py |
import copy
from .apiobject import APIObject
from .host import Host
from .template import Template
from .application import Application
from .hostinterface import HostInterface
from zbxepics.logging.logger import logger
class Item(APIObject):
"""Item object class for ZabbixAPI.
:type item: dict
:param item:
:keys:
'key_': (string) Item key.
'name': (string) Name of the item.
'type': (integer) Type of the item.
'value_type': (integer) Type of information of the item.
'host': (string) Name of the host that the item belongs to.
:other keys:
See also `API` in Zabbix Documentation.
"""
def __init__(self, zbx_api, templated=False):
if templated:
self.__host = Template(zbx_api)
else:
self.__host = Host(zbx_api)
self.__app = Application(zbx_api, templated)
self.__iface = HostInterface(zbx_api)
super(Item, self).__init__(zbx_api)
def create(self, items):
for item in items:
self.create_one(item)
def create_one(self, item):
"""Create new item.
:type item: dict
:param item: Paramter of Item object.
:rtype: str
:return: Return single ID of the created item.
"""
key_ = item['key_']
hostname = item['host']
itemid = self.get_id_by_key(key_, hostname)
if itemid is not None:
logger.debug(('Already exists({0}:{1})'
.format(hostname, key_)))
return None
params = self.__to_parameters(item)
params['hostid'] = self.__host.get_id_by_name(hostname)
result = self._do_request('item.create', params)
return result['itemids'][0] if result else None
def update_one(self, item, itemid=None):
"""Update existing item.
:type item: dict
:param item: Paramter of Item object.
:type itemid: str
:param itemid: ID of the item.
:rtype: str
:return: Return single ID of the updated item.
"""
key_ = item['key_']
hostname = item['host']
if itemid is None:
itemid = self.get_id_by_key(key_, hostname)
if itemid is None:
logger.debug(('Not exists({0}:{1})'
.format(hostname, key_)))
return None
params = self.__to_parameters(item)
params['itemid'] = itemid
result = self._do_request('item.update', params)
return result['itemids'][0] if result else None
def __to_parameters(self, item):
params = copy.deepcopy(item)
if 'interface' in params:
iface = params['interface']
ifaceid = self.__iface.get_id_by_ip(params['host'], iface['ip'],
iface['port'])
params['interfaceid'] = ifaceid
params.pop('interface', None)
if 'applications' in params:
app_names = params['applications']
app_ids = self.__get_app_ids(app_names, params['host'])
params['applications'] = app_ids
return params
def create_or_update(self, items):
for item in items:
itemid = self.get_id_by_key(item['key_'], item['host'])
if itemid is None:
self.create_one(item)
else:
self.update_one(item, itemid)
def get_items_by_key(self, keys, hostname=None, output=None):
params = {}
params['filter'] = {'key_': keys, 'host': hostname}
params['output'] = ['itemid', 'name', 'key_']
if output is not None:
params['output'] = output
result = self._do_request('item.get', params)
return result
def get_id_by_key(self, key_, hostname):
items = self.get_items_by_key([key_], hostname)
return items[0]['itemid'] if items else None
def __get_app_ids(self, app_names, hostname):
apps = self.__app.get_ids_by_name(app_names, hostname)
if not apps:
return None
app_ids = [app['applicationid'] for app in apps]
return app_ids
def delete(self, hostname, keys):
"""Delete items.
:type hostname: str
:param hostname: Technical name of the host.
:type keys: list
:param keys: Keys of the items to delete.
:rtype: list
:return: Return IDs of the deleted items.
"""
items = self.get_items_by_key(keys, hostname)
if not items:
return None
params = [item['itemid'] for item in items]
result = self._do_request('item.delete', params)
return result['itemids'] if result else None | zabbix-epics-py | /zabbix_epics_py-1.0.0-py3-none-any.whl/zbxepics/zbxconfig/apiobjects/item.py | item.py |
import copy
from .apiobject import APIObject
from .hostgroup import HostGroup
from zbxepics.logging.logger import logger
class Template(APIObject):
"""Template object class for ZabbixAPI.
:type template: dict
:param template: dict for template object.
:keys:
'name': (str) Technical name of the template.
'groups': (array) Host groups to add the template to.
"""
def __init__(self, zbx_api):
self.__hostgroup = HostGroup(zbx_api)
super(Template, self).__init__(zbx_api)
def create(self, templates):
for template in templates:
self.create_one(template)
def create_one(self, template):
"""Create new template.
:type template: dict
:param template: Paramter of Template object.
:rtype: str
:return: Return single ID of the created template.
"""
name = template['name']
templateid = self.get_id_by_name(name)
if templateid is not None:
logger.debug('Already exists({0})'.format(name))
return None
params = self.__to_parameters(template)
params['groups'] = self.__get_groupids(template['groups'])
result = self._do_request('template.create', params)
return result['templateids'][0] if result else None
def update_one(self, template, templateid=None):
"""Update existing template.
:type template: dict
:param template: Paramter of Template object.
:type templateid: str
:param templateid: ID of the template.
:rtype: str
:return: Return single ID of the updated template.
"""
name = template['name']
if templateid is None:
templateid = self.get_id_by_name(name)
if templateid is None:
logger.debug('Not exists({0})'.format(name))
return None
params = self.__to_parameters(template)
params['templateid'] = templateid
if 'groups' in template:
params['groups'] = self.__get_groupids(template['groups'])
if 'templates_clear' in template:
ids = self.get_ids_by_name(template['templates_clear'])
params['templates_clear'] = ids
result = self._do_request('template.update', params)
return result['templateids'][0] if result else None
def create_or_update(self, templates):
for template in templates:
templateid = self.get_id_by_name(template['name'])
if templateid is None:
self.create_one(template)
else:
self.update_one(template, templateid)
def __to_parameters(self, template):
params = copy.deepcopy(template)
params['host'] = template['name']
if 'templates' in params:
ids = self.get_ids_by_name(params['templates'])
params['templates'] = ids
return params
def get_templates_by_name(self, names, output=None):
params = {}
params['filter'] = {'host': names}
params['output'] = ['templateid', 'host', 'status']
if output is not None:
params['output'] = output
result = self._do_request('template.get', params)
return result
def get_id_by_name(self, name):
templateids = self.get_ids_by_name([name])
return templateids[0]['templateid'] if templateids else None
def get_ids_by_name(self, names):
templates = self.get_templates_by_name(names, ['templateid'])
return templates if templates else None
def __get_groupids(self, groups):
groupids = self.__hostgroup.get_ids_by_name(groups)
return groupids
def delete(self, names):
"""Delete templates.
:type names: list
:param names: Technical names of the templates to delete.
:rtype: list
:return: Return IDs of the deleted templates.
"""
templates = self.get_ids_by_name(names)
if not templates:
return None
params = [template['templateid'] for template in templates]
result = self._do_request('template.delete', params)
return result['templateids'] if result else None | zabbix-epics-py | /zabbix_epics_py-1.0.0-py3-none-any.whl/zbxepics/zbxconfig/apiobjects/template.py | template.py |
from .apiobject import APIObject
from zbxepics.logging.logger import logger
class HostGroup(APIObject):
"""Host group object class for ZabbixAPI.
:type host: dict
:param host: dict for host group object.
:keys:
'name': (str) Name of the host group.
"""
def create(self, groups):
for group in groups:
self.create_one(group)
def create_one(self, group):
"""Create new host group.
:type group: dict
:param group: Paramter of Host group object.
:rtype: str
:return: Return single ID of the created host group.
"""
name = group['name']
groupid = self.get_id_by_name(name)
if groupid is not None:
logger.debug('Already exists({0})'.format(name))
return None
params = self.__to_parameters(group)
result = self._do_request('hostgroup.create', params)
return result['groupids'][0] if result else None
def update_one(self, group, groupid=None):
"""Update existing host group.
:type group: dict
:param group: Paramter of Host group object.
:type groupid: str
:param groupid: ID of the host group.
:rtype: str
:return: Return single ID of the updated host group.
"""
name = group['name']
if groupid is None:
groupid = self.get_id_by_name(name)
if groupid is None:
logger.debug('Not exists({0})'.format(name))
return None
params = self.__to_parameters(group)
params['groupid'] = groupid
result = self._do_request('hostgroup.update', params)
return result['groupids'][0] if result else None
def create_or_update(self, groups):
for group in groups:
groupid = self.get_id_by_name(group['name'])
if groupid is None:
self.create_one(group)
else:
self.update_one(group, groupid)
def __to_parameters(self, group):
params = {}
params['name'] = group['name']
return params
def get_hostgroups_by_name(self, names, output=None):
params = {}
params['filter'] = {'name': names}
params['output'] = ['groupid', 'name']
if output is not None:
params['output'] = output
result = self._do_request('hostgroup.get', params)
return result
def get_id_by_name(self, name):
groups = self.get_ids_by_name([name])
return groups[0]['groupid'] if groups else None
def get_ids_by_name(self, names):
groups = self.get_hostgroups_by_name(names, ['groupid'])
return groups if groups else None
def delete(self, names):
"""Delete host groups.
:type names: list
:param names: Names of the host groups to delete.
:rtype: list
:return: Return IDs of the deleted host groups.
"""
groups = self.get_ids_by_name(names)
if not groups:
return None
params = [group['groupid'] for group in groups]
result = self._do_request('hostgroup.delete', params)
return result['groupids'] if result else None | zabbix-epics-py | /zabbix_epics_py-1.0.0-py3-none-any.whl/zbxepics/zbxconfig/apiobjects/hostgroup.py | hostgroup.py |
import copy
from .apiobject import APIObject
from zbxepics.logging.logger import logger
class Trigger(APIObject):
"""Trigger object class for ZabbixAPI.
:type trigger: dict
:param trigger:
:keys:
'description': (str) Name of the trigger.
'expression': (str) Reduced trigger expression.
:optional keys:
'priority': (integer) Severity of the trigger.
'recovery_expression': (str) Reduced trigger recovery expression.
'manual_close': (integer) Allow manual close.
"""
def __init__(self, zbx_api):
super(Trigger, self).__init__(zbx_api)
def create(self, triggers):
for trigger in triggers:
self.create_one(trigger)
def create_one(self, trigger):
"""Create new trigger.
:type trigger: dict
:param trigger: Paramter of Trigger object.
:rtype: str
:return: Return single ID of the created trigger.
"""
triggerid = self.__get_id(trigger)
if triggerid is not None:
logger.debug(('Already exists({0})'
.format(trigger['expression'])))
return None
params = self.__to_parameters(trigger)
result = self._do_request('trigger.create', params)
return result['triggerids'][0] if result else None
def update_one(self, trigger, triggerid=None):
"""Update existing trigger.
:type trigger: dict
:param trigger: Paramter of Trigger object.
:type triggerid: str
:param triggerid: ID of the trigger.
:rtype: str
:return: Return single ID of the updated trigger.
"""
if triggerid is None:
triggerid = self.__get_id(trigger)
if triggerid is None:
logger.debug(('Not exists({0})'
.format(trigger['expression'])))
return None
params = self.__to_parameters(trigger)
params['triggerid'] = triggerid
result = self._do_request('trigger.update', params)
return result['triggerids'][0] if result else None
def create_or_update(self, triggers):
for trigger in triggers:
triggerid = self.__get_id(trigger)
if triggerid is None:
self.create_one(trigger)
else:
self.update_one(trigger, triggerid)
def __to_parameters(self, trigger):
params = copy.deepcopy(trigger)
if 'dependencies' in params:
ids = self.__get_ids(params['dependencies'])
params['dependencies'] = ids
return params
def get_triggers_by_host(self, hostname, description=None,
expand_expression=True, output=None):
params = {}
params['filter'] = {'host': hostname, 'description': description}
params['expandExpression'] = expand_expression
params['output'] = ['triggerid', 'description', 'expression',
'recovery_expression', 'priority',
'manual_close']
if output is not None:
params['output'] = output
result = self._do_request('trigger.get', params)
return result
def get_id_by_expression(self, expression, hostname,
description=None):
triggers = self.get_triggers_by_host(hostname, description)
if not triggers:
return None
triggerid = None
for trigger in triggers:
if trigger['expression'] == expression:
triggerid = trigger['triggerid']
break
return triggerid
def __get_id(self, trigger):
triggerid = self.get_id_by_expression(trigger['expression'],
trigger['host'],
trigger['description'])
return triggerid
def __get_ids(self, triggers):
triggerids = []
for trigger in triggers:
id_ = self.get_id_by_expression(trigger['expression'],
trigger['host'],
trigger['description'])
if id_:
triggerids.append({'triggerid': id_})
return triggerids
def delete(self, hostname, expressions):
"""Delete triggers.
:type hostname: str
:param hostname: Technical name of the host.
:type expressions: str or list
:param expressions: Expressions of the triggers to delete.
:rtype: list
:return: Return IDs of the deleted triggers.
"""
triggers = self.get_triggers_by_host(hostname)
if not triggers:
return None
if not isinstance(expressions, (tuple, list)):
expressions = [expressions]
params = []
for trigger in triggers:
if trigger['expression'] in expressions:
params.append(trigger['triggerid'])
result = self._do_request('trigger.delete', params)
return result['triggerids'] if result else None | zabbix-epics-py | /zabbix_epics_py-1.0.0-py3-none-any.whl/zbxepics/zbxconfig/apiobjects/trigger.py | trigger.py |
### (一)zabbix-feishu-alert 模块使用方法
> **`(一):安装`**
- 从 PYPI 安装
```
pip install -U zabbix-feishu-alert
```
- 从 Github 安装
```
pip install git+https://github.com/yinhuanyi/zabbix-feishu-alert.git
```
> **`(二):使用方法`**
```
from zabbix_feishu_alert import FeishuMessage
# 第一个参数:100.99.1.3为你的zabbix serverIP地址
# 第二个参数:Admin为你的zabbix web登录用户名
# 第三个参数:zabbix为你的zabbix web登录密码
# 第四个参数:13970236751为被@人的手机号码
# 第五个参数:36836为监控item的item id
# 第六个参数:zabbix graph存储路径
# 第七个参数:飞书机器人的app_id
# 第八个参数:飞书机器人的app_secret
feishu = FeishuMessage('100.99.1.3',
'Admin',
'zabbix',
'13970236751',
36836,
'./',
'cli_9e44d8e26dbb500d',
'8X4jX9MLwg6AXIEVJh0lC8oeHNDBfbnd')
# 第一个和第二个参数为:发送告警信息的时候,需要获取到zabbix中的title信息和message信息
# 第三个参数:38524是此次告警的event_id
# 第四个参数:http://100.112.2.11:8000/monitor/problem_ack/是[立即处理]按钮发送ACK消息webhook的地址
feishu.send_alarm_message("Zabbix Alert Title",
"Zabbix Alert Content",
38524,
'http://100.112.2.11:8000/monitor/problem_ack/')
# 发送确认告警消息
feishu.send_ack_message("Zabbix Ack Title",
"Zabbix Content Title")
# 发送恢复告警消息
feishu.send_recovery_message("Zabbix Recovery Title",
"Zabbix Content Title")
```
> **`(三):告警效果`**
- 测试效果

- 真实接入zabbix之后的效果

> **`(四):点击[立即处理]按钮`**
- 当值班人被@后,需要点击立即处理,立即处理会跳转到企业内部的运维平台,记录告警人的基本信息,例如:姓名,处理告警的时间等

### (二)飞书机器人的创建
> **`(一):登录飞书开放平台`**
- 登录飞书开放
[飞书开放平台](https://open.feishu.cn/)
- 在我的应用中,点击创建企业自建应用
- 在应用凭证栏中,可以看到APP ID和App Secret

# 欢迎提交PR | zabbix-feishu-alert | /zabbix-feishu-alert-1.0.8.tar.gz/zabbix-feishu-alert-1.0.8/README.md | README.md |
import os
import json
from datetime import datetime
import requests
from requests.cookies import RequestsCookieJar
class FeishuBase:
def __init__(self, zabbix_host, zabbix_user, zabbix_passwd, user_mobile, item_id, data_dir, app_id, app_secret):
"""
:param zabbix_host: zabbix ip address
:param zabbix_user: zabbix admin username
:param zabbix_passwd: zabbix admin passwd
:param user_mobile: people mobile
:param item_id: zabbix item id
:param data_dir: zabbix graph storage directory
"""
self.tenant_access_token = self._get_tenant_access_token(app_id, app_secret)
self.chat_id = self._get_chat_id(self.tenant_access_token)
self.user_id = self._get_user_id(self.tenant_access_token, user_mobile)
self.zabbix_graph = self._get_zabbix_graph(item_id, zabbix_host, zabbix_user, zabbix_passwd, data_dir)
self.image_key = self._upload_zabbix_graph(self.tenant_access_token, self.zabbix_graph)
def _get_tenant_access_token(self, *args, **kwargs):
raise Exception("Please Implement This Method")
def _get_user_id(self, tenant_access_token, user_mobile):
"""
:param tenant_access_token: feishu tenant_access_token
:param user_mobile: people mobile
:return: user id
"""
mobiles = user_mobile
userurl = "https://open.feishu.cn/open-apis/user/v1/batch_get_id?mobiles=%s" % mobiles
headers = {"Authorization": "Bearer %s" % tenant_access_token}
request = requests.get(url=userurl, headers=headers)
response = json.loads(request.content)['data']['mobile_users'][mobiles][0]['user_id']
return response
def _get_chat_id(self, tenant_access_token):
"""
:param tenant_access_token: feishu tenant_access_token
:return: chat id
"""
chaturl = "https://open.feishu.cn/open-apis/chat/v4/list?page_size=20"
headers = {"Authorization": "Bearer %s" % tenant_access_token, "Content-Type": "application/json"}
request = requests.get(url=chaturl, headers=headers)
response = json.loads(request.content)['data']['groups'][0]['chat_id']
return response
def _get_zabbix_graph(self, item_id, zabbix_host, zabbix_user, zabbix_passwd, data_dir):
"""
:param item_id: zabbix item id
:param zabbix_host: zabbix ip addr
:param zabbix_user: zabbix admin username
:param zabbix_passwd: zabbix admin passwd
:param data_dir: zabbix graph storage directory
:return: local absolute zabbix graph path name
"""
# 创建session会话
session = requests.Session()
# 定义session头部
loginheaders = {
"Host": zabbix_host,
"Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.9",
'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_4) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/83.0.4103.61 Safari/537.36',
'Referer': 'http://{}/zabbix/index.php'.format(zabbix_host)
}
# 定义payload
payload = {
"name": zabbix_user,
"password": zabbix_passwd,
"autologin": 1,
"enter": "Sign in",
}
try:
# session登录
login_ret = session.post(url='http://{}/zabbix/index.php'.format(zabbix_host),
headers=loginheaders,
data=payload)
# 获取cookie
cookies = login_ret.cookies
# 初始化jar,写入cookie
jar = RequestsCookieJar()
for item in cookies.iteritems():
jar.set(item[0], item[1], domain='{}'.format(zabbix_host), path='/zabbix')
# 访问图标
graph_response = requests.get('http://{}/zabbix/chart.php?period=7200&width=600&time=600&itemids={}'.format(zabbix_host, item_id),cookies=jar)
# 拼接图片路径
local_time_str = datetime.now().strftime('%Y-%m-%d_%H:%M:%S')
graph_name = 'zabbix_' + local_time_str + '.png'
graph_path = os.path.join(data_dir, graph_name)
# 使用绝对路径保存图片,二进制写入
with open(graph_path, 'wb', ) as f:
f.write(graph_response.content)
# 返回图片名称
return graph_path
except Exception:
raise Exception("get zabbix graph failed")
def _upload_zabbix_graph(self, tenant_access_token, graph_path):
"""
:param tenant_access_token: feishu tenant_access_token
:param graph_path: local absolute zabbix graph path name
:return:
"""
with open(graph_path, 'rb') as f:
image = f.read()
img_url = 'https://open.feishu.cn/open-apis/image/v4/put/'
headers = {'Authorization': "Bearer %s" % tenant_access_token}
files = {"image": image}
data = {"image_type": "message"}
resp = requests.post(
url=img_url,
headers=headers,
files=files,
data=data
)
resp.raise_for_status()
content = resp.json()
# 获取上传的image_key
return content['data']['image_key']
# 发送告警消息
def send_alarm_message(self, title, content, event_id, zabbix_ack_addr):
"""
:param user_id: user id
:param chat_id: chat id
:param tenant_access_token: feishu tenant_access_token
:param image_key: feishu image key
:param title: zabbix alart title
:param content: zabbix alart content
:param event_id: zabbix event id
:param zabbix_ack_addr: your website for zabbix alert ack addr
:return: None
"""
send_url = "https://open.feishu.cn/open-apis/message/v4/send/"
headers = {"Authorization": "Bearer %s" % self.tenant_access_token, "Content-Type": "application/json"}
data = {
"chat_id": self.chat_id,
"msg_type": "post",
"content": {
"post": {
"zh_cn": {
"title": title,
"content": [
[
{
"tag": "text",
"un_escape": True,
"text": content
},
{
"tag": "at",
"user_id": self.user_id
},
{
"tag": "a",
"text": "\n立即处理",
# http://{}:8000/monitor/problem_ack/
"href": "{}?event_id={}".format(zabbix_ack_addr, event_id)
},
],
[
{
"tag": "img",
"image_key": self.image_key,
"width": 1000,
"height": 600
}
]
]
}
}
}
}
requests.post(url=send_url, headers=headers, json=data)
# 发送恢复消息
def send_recovery_message(self, title, content):
"""
:param title: zabbix alert title
:param content: zabbix alert content
:return: None
"""
sendurl = "https://open.feishu.cn/open-apis/message/v4/send/"
headers = {"Authorization": "Bearer %s" % self.tenant_access_token, "Content-Type": "application/json"}
data = {
"chat_id": self.chat_id,
"msg_type": "post",
"content": {
"post": {
"zh_cn": {
"title": title,
"content": [
[
{
"tag": "text",
"un_escape": True,
"text": content
},
{
"tag": "at",
"user_id": self.user_id
},
],
[
{
"tag": "img",
"image_key": self.image_key,
"width": 1000,
"height": 600
}
]
]
}
}
}
}
requests.post(url=sendurl, headers=headers, json=data)
# 发送确认消息
def send_ack_message(self, title, content):
"""
:param title: zabbix alert title
:param content: zabbix alert content
:return: None
"""
sendurl = "https://open.feishu.cn/open-apis/message/v4/send/"
headers = {"Authorization": "Bearer %s" % self.tenant_access_token, "Content-Type": "application/json"}
data = {
"chat_id": self.chat_id,
"msg_type": "post",
"content": {
"post": {
"zh_cn": {
"title": title,
"content": [
[
{
"tag": "text",
"un_escape": True,
"text": content
},
{
"tag": "at",
"user_id": self.user_id
},
],
[
{
"tag": "img",
"image_key": self.image_key,
"width": 1000,
"height": 600
}
]
]
}
}
}
}
requests.post(url=sendurl, headers=headers, json=data) | zabbix-feishu-alert | /zabbix-feishu-alert-1.0.8.tar.gz/zabbix-feishu-alert-1.0.8/zabbix_feishu_alert/message_base.py | message_base.py |
[](https://pypi.org/project/zabbix-import/)
Utility to import exported XML configuration(templates, hosts, ...) into Zabbix using it's [API](https://www.zabbix.com/documentation/3.4/manual/api).
```
$ zbx-import.py -u Admin -p *** --url https://zabbix.local/api_jsonrpc.php exported_templates.xml
SUCCESS: configuration import
```
Tested with Zabbix 3.4 and 4.0, probably will work with older versions up to 2.0. Written in pure python, no additional libraries are required. Works with both python 3 and python 2.
Allows to control import options:
* create new - add new elements from the import file. Default: True
* update existing - update existing elements from the import file. Default: True
* delete missing - remove existing elements not present in the import file. Default: False. *NOTE*: without this option importing existing template with changed triggers will create new triggers, but old ones with the same name and different value will remain.
You can set this options for all elements or precisely select list of elements for the option: `--delete-missing 'triggers graphs'`. Check `--help` for available elements.
```
$ zbx-import.py -u Admin -p *** --url https://zabbix.local/api_jsonrpc.php --delete-missing exported_templates.xml
SUCCESS: configuration import
```
### Installation
Simplest option - just use `zbx-import.py` directly, it does not have any dependencies.
From [pypi.org](https://pypi.org):
`pip install zabbix-import`
Or create a Docker image and use it:
```bash
docker build -t zabbix-import .
# No options to get help on usage
docker run -it --rm zabbix-import [options]
```
**P.S.** If this code is useful for you - don't forget to put a star on it's [github repo](https://github.com/selivan/zabbix-import).
| zabbix-import | /zabbix-import-1.0.0.tar.gz/zabbix-import-1.0.0/README.md | README.md |
from argparse import ArgumentParser, RawTextHelpFormatter
import json
import os
import io # for handling file encoding in python2
from pprint import pformat
import sys
try: # python3
from urllib.request import Request, urlopen
except: # python2
from urllib2 import Request, urlopen
import traceback
DEFAULT_ZABBIX_API_URL = 'http://127.0.0.1:80/api_jsonrpc.php'
ELEMENTS_OPTIONS_DICT = {
'createMissing': ['applications', 'discoveryRules', 'graphs', 'groups',
'hosts', 'httptests', 'images', 'items', 'maps',
'screens', 'templateLinkage', 'templates',
'templateScreens', 'triggers', 'valueMaps'],
'updateExisting': ['discoveryRules', 'graphs',
'hosts', 'httptests', 'images', 'items', 'maps',
'screens', 'templates',
'templateScreens', 'triggers', 'valueMaps'],
'deleteMissing': ['applications', 'discoveryRules', 'graphs',
'httptests', 'items', 'templateScreens', 'triggers'],
}
def __create_parser():
cm_list = ELEMENTS_OPTIONS_DICT['createMissing']
ue_list = ELEMENTS_OPTIONS_DICT['updateExisting']
dm_list = ELEMENTS_OPTIONS_DICT['deleteMissing']
# Number of displayed element values per line (on help description)
BRKLN_NUM_EL = 6
# Parse command line arguments
parser = ArgumentParser(description=__doc__,
formatter_class=RawTextHelpFormatter)
parser.add_argument('template_file',
help='Zabbix exported template xml file\n')
parser.add_argument(
'-u', '--user',
help='Use the --user flag to provide the Zabbix API user name.\n'
'Alternatively you can set the ZABBIX_API_USER environment '
'variable.\nOne of the two methods is required. '
'In case you are using both,\nthe flag value takes '
'precedence over the environment variable\n'
)
parser.add_argument(
'-p', '--passwd', metavar='PASSWORD',
help='Use the --passwd flag to provide the Zabbix API password.\n'
'Alternatively you can set the ZABBIX_API_PASSWD environment '
'variable.\nOne of the two methods is required. '
'In case you are using both,\nthe flag value takes '
'precedence over the environment variable\n'
)
parser.add_argument('-s', '--url', default=DEFAULT_ZABBIX_API_URL,
help='Zabbix API URL\nDefault value is: {}\n'
''.format(DEFAULT_ZABBIX_API_URL))
parser.add_argument(
'--no-create-missing', nargs='*', default=None,
help='All the elements in the xml file that are missing in the zabbix'
'\ndatabase are being created by default.\nTo unselect the '
'createMissing option (i.e set false), use this flag\n followed'
' by a list of space separated values to be excluded.\nThe '
'available element values are:\n\n{}\n\nIf not any value is '
'provided, all of them will be excluded for the\ncreateMissing '
'option\n'.format('\n'.join(
[', '.join(cm_list[idx:idx + BRKLN_NUM_EL])
for idx in range(len(cm_list))[::BRKLN_NUM_EL]]))
)
parser.add_argument(
'--no-update-existing', nargs='*', default=None,
help='All the elements in the xml file that already exists in the '
'zabbix\ndatabase are being updated by default.\nTo unselect the '
'updateExisting option (i.e set false), use this flag\n followed '
'by a list of space separated values to be excluded.\nThe '
'available element values are:\n\n{}\n\nIf not any value is '
'provided, all of them will be excluded for the\nupdateExisting '
'option\n'.format('\n'.join(
[', '.join(ue_list[idx:idx + BRKLN_NUM_EL])
for idx in range(len(ue_list))[::BRKLN_NUM_EL]]
))
)
parser.add_argument(
'--delete-missing', nargs='*', default=None,
help='All the elements that existes in the zabbix database that are '
'not\npresent in the xml file are being preserved by default.\n'
'To select the deleteMissing option (i.e set true), use this flag'
'\nfollowed by a list of space separated values to be included.\n'
'The available element values are:\n\n{}\n\nIf not any value is '
'provided, all of them will be included for the\ndeleteMissing '
'option\n'.format('\n'.join(
[', '.join(dm_list[idx:idx + BRKLN_NUM_EL])
for idx in range(len(dm_list))[::BRKLN_NUM_EL]]
))
)
return parser
def __build_rules(no_create_missing, no_update_existing, delete_missing):
# https://www.zabbix.com/documentation/3.4/manual/api/reference/configuration/import
if no_create_missing is None:
no_create_missing = []
elif not any(no_create_missing):
no_create_missing = ELEMENTS_OPTIONS_DICT['createMissing']
if no_update_existing is None:
no_update_existing = []
elif not any(no_update_existing):
no_update_existing = ELEMENTS_OPTIONS_DICT['updateExisting']
if delete_missing is None:
delete_missing = []
elif not any(delete_missing):
delete_missing = ELEMENTS_OPTIONS_DICT['deleteMissing']
rules = {el: {'createMissing': el not in no_create_missing}
for el in ELEMENTS_OPTIONS_DICT['createMissing']}
for el in ELEMENTS_OPTIONS_DICT['updateExisting']:
rules[el]['updateExisting'] = el not in no_update_existing
for el in ELEMENTS_OPTIONS_DICT['deleteMissing']:
rules[el]['deleteMissing'] = el in delete_missing
return rules
def zbxrequest(url, method, auth, params):
if params is None:
params = {}
data = {"jsonrpc": "2.0", "id": 1, "method": method,
"auth": auth, "params": params}
headers = {'Content-Type': 'application/json'}
# Convert to string and then to byte
data = json.dumps(data).encode('utf-8')
req = Request(args.url, headers=headers, data=data)
resp = urlopen(req)
# Get string
resp = resp.read().decode('utf-8')
# Convert to object
resp = json.loads(resp, encoding='utf-8')
return resp
def import_zabbix_template(template_file, user, passwd, url,
no_create_missing=None,
no_update_existing=None, delete_missing=None):
rules = __build_rules(no_create_missing,
no_update_existing, delete_missing)
# TODO: add API version check
# r=zbxrequest(args.url, method="apiinfo.version", auth=None, params={})
# print(r)
# Get authentication token
# https://www.zabbix.com/documentation/3.4/manual/api/reference/user/login
auth_result = zbxrequest(url, method="user.login", auth=None,
params={"user": user, "password": passwd})
# If authentication was not OK
if 'result' not in auth_result:
raise ZbxImportError('auth failed\n{}'
''.format(pformat(auth_result)))
global auth_token
auth_token = auth_result['result']
# Read template file content
with io.open(template_file, 'r', encoding='utf-8') as f:
source = f.read()
# Set import parameters, including template file content
params = {'format': 'xml', 'rules': rules, 'source': source}
import_result = zbxrequest(url, method="configuration.import",
auth=auth_token, params=params)
# Something like: {'id': 1, 'jsonrpc': '2.0', 'result': True}
if 'result' in import_result and import_result['result']:
print('SUCCESS: configuration import')
else:
raise ZbxImportError('configuration import failed\n{}'
''.format(pformat(import_result)))
class ZbxImportError(Exception):
def __init__(self, message, errors=1):
traceback.print_exc()
super(ZbxImportError, self).__init__(message)
self.errors = errors
if __name__ == '__main__':
parser = __create_parser()
args = parser.parse_args()
auth_token = None
# Get user/password values from the environment variable,
# in case the respective argument is missing:
if args.user is None:
try:
args.user = os.environ['ZABBIX_API_USER']
except KeyError as err:
raise ZbxImportError('Missing zabbix API user name.\n{}'
''.format(parser.__dict__[
'_option_string_actions'
]['--user'].help))
if args.passwd is None:
try:
args.passwd = os.environ['ZABBIX_API_PASSWD']
except KeyError as err:
raise ZbxImportError('Missing zabbix API password.\n{}'
''.format(parser.__dict__[
'_option_string_actions'
]['--passwd'].help))
try:
import_zabbix_template(args.template_file, args.user, args.passwd,
args.url, args.no_create_missing,
args.no_update_existing, args.delete_missing)
except Exception as e:
raise ZbxImportError(str(e))
finally:
# Logout to prevent generation of unnecessary open sessions
# https://www.zabbix.com/documentation/3.4/manual/api/reference/user/logout
if auth_token is not None:
zbxrequest(args.url, method="user.logout",
auth=auth_token, params={}) | zabbix-import | /zabbix-import-1.0.0.tar.gz/zabbix-import-1.0.0/zbx-import.py | zbx-import.py |
from os import environ, path, unlink
from zabbix_client import ZabbixServerProxy
from UserDict import UserDict
import json
CRITICAL = 'critical'
WARNINGS = 'warnings'
CRITICAL_SYMBOLE = u'🔥'
WARNINGS_SYMBOLE = u'⚠️'
SYMBOLE_NODIFF = u'±'
SYMBOLE_ADD = u' ↑'
SYMBOLE_NEGATIVE = u' ↓'
class JsonDataStore(UserDict, object):
def __init__(self, initialdata={}):
file_name = self.get_filename()
if path.exists(file_name):
with open(file_name) as json_file:
try:
self.data = json.load(json_file)
except:
self.data = {}
else:
self.data = initialdata
def __del__(self):
file_name = self.get_filename()
if self.data:
with open(file_name, 'w') as json_file:
self.data = json.dump(self.data, json_file)
else:
if path.exists(file_name):
unlink(file_name)
def get_filename(self):
tmp_dir = environ.get('TMPDIR')
file_name = 'storage.json'
return path.join(tmp_dir, file_name)
class ZabbixAgent(object):
def __init__(self, username, password, endpoint):
self.conn = ZabbixServerProxy(endpoint)
self.conn.user.login(user=username, password=password)
def get_current_high_triggers(self):
return self.conn.trigger.get(
filter={'priority': [4, 5]},
only_true='true', monitored=1, withUnacknowledgedEvents='true')
def get_current_warning_triggers(self):
return self.conn.trigger.get(
filter={'priority': [1, 2, 3]},
only_true='true', monitored=1, withUnacknowledgedEvents='true')
def zabbix_current_active(pl, username, password, endpoint):
zabbix = ZabbixAgent(username, password, endpoint)
triggers_high = zabbix.get_current_high_triggers()
triggers_low = zabbix.get_current_warning_triggers()
if triggers_high:
level = 100
elif triggers_low:
level = triggers_low
else:
level = 0
return [{
'contents': u"H[%s] W[%s]" % (len(triggers_high), len(triggers_low)),
'highlight_group': [
'zabbix_current_state_gradient', 'zabbix_current_state'],
'divider_highlight_group': 'background:divider',
'gradient_level': level
}]
def active_triggers(pl, username, password, endpoint, triggers='warnings'):
zabbix = ZabbixAgent(username, password, endpoint)
storage = JsonDataStore()
count_trigger_key = 'current_count_%s' % (triggers)
count_delta_key = 'last_delta_%s' % (triggers)
if triggers == WARNINGS:
active_triggers = zabbix.get_current_warning_triggers()
symbole = WARNINGS_SYMBOLE
highlight_group = 'active_triggers_%s' % (WARNINGS)
elif triggers == CRITICAL:
active_triggers = zabbix.get_current_high_triggers()
symbole = CRITICAL_SYMBOLE
highlight_group = 'active_triggers_%s' % (CRITICAL)
triggers_count = len(active_triggers)
if count_trigger_key in storage:
if storage[count_trigger_key] != triggers_count:
delta = triggers_count - storage[count_trigger_key]
if delta == triggers_count:
delta = 0
sign = SYMBOLE_NEGATIVE if delta < 0 else SYMBOLE_ADD
storage[count_delta_key] = delta
storage[count_trigger_key] = triggers_count
else:
storage[count_trigger_key] = triggers_count
if count_delta_key in storage:
delta = storage[count_delta_key]
sign = SYMBOLE_NEGATIVE if delta < 0 else SYMBOLE_ADD
else:
delta = 0
sign = SYMBOLE_NODIFF
else:
storage[count_trigger_key] = triggers_count
sign = SYMBOLE_NODIFF
delta = 0
storage[count_delta_key] = delta
return [{
'contents': u"%s %s%s%s" % (
symbole, triggers_count, sign, abs(delta)),
'highlight_group': [highlight_group]
}] | zabbix-powerline-status | /zabbix-powerline-status-0.0.2.tar.gz/zabbix-powerline-status-0.0.2/zabbix_segment/segment.py | segment.py |
zabbix-template-converter
=========================
This Python script aims to resolve compatability issues when migrating Zabbix
template XML files between versions of Zabbix. For example, you may wish to
import a Zabbix v3.2 template into Zabbix v2.0.
The script works by applying conversion rules to a template, which manipulate
the template XML to match the desired Zabbix version template format.
Installation
------------
Install the Python script to ``/usr/local/bin`` with pip:
.. code-block:: shell
$ pip install zabbix-template-converter
Usage
-----
.. code-block:: shell
$ zabbix-template-convertor -h
usage: zabbix-template-convertor [-h] [-v] -o X.Y.Z [-s] file
Migrate Zabbix templates between versions
positional arguments:
file Zabbix template XML file
optional arguments:
-h, --help show this help message and exit
-v, --version show program's version number and exit
-o X.Y.Z, --output-version X.Y.Z
target Zabbix version
-s, --squash-value-maps
remove references to value maps for versions older
than 3.0.0
Examples
--------
To convert a Zabbix 3.2 template for import into v2.0:
.. code-block:: shell
$ zabbix-template-convertor -o 2.0 my_template.xml > my_template-2.0.xml
A number of transformations will take place. For example, Discovery Rule
filters will be downgraded from the multiple-filter format introduced in Zabbix 2.4, to a single filter expression as follows:
.. code-block:: xml
<filter>
<evaltype>0</evaltype>
<formula/>
<conditions>
<condition>
<macro>{#IFNAME}</macro>
<value>@Network interfaces for discovery</value>
<operator>8</operator>
<formulaid>A</formulaid>
</condition>
</conditions>
</filter>
Becomes:
.. code-block:: xml
<filter>{#IFNAME}:@Network interfaces for discovery</filter>
Coverage
--------
This project relies heavily on the community to report incompatibility problems
when importing templates.
**Please raise an issue** if you find a template that won't import after being
converted. Be sure to include the error messages and template file.
Over time, as conversion rules are added, the script should become more
comprehensive, and more reliable.
| zabbix-template-converter | /zabbix-template-converter-1.1.0.tar.gz/zabbix-template-converter-1.1.0/README.rst | README.rst |
__author__ = "Janssen dos Reis Lima"
from zabbix_api import ZabbixAPI
import os, sys
from termcolor import colored
from conf.zabbix import *
def banner():
print colored('''
______ ______ ______ _____ ________
___ /______ ___ /____ /____(_)___ __ ___ __/___ _____________________
__ / _ __ `/_ __ \_ __ \_ /__ |/_/ __ / _ / / /_ __ \ _ \_ ___/
_ /__/ /_/ /_ /_/ / /_/ / / __> < _ / / /_/ /_ / / / __/ /
/____/\__,_/ /_.___//_.___//_/ /_/|_| /_/ \__,_/ /_/ /_/\___//_/
''', 'red', attrs=['bold'])
print
try:
zapi = ZabbixAPI(server=server, path="", log_level=loglevel)
zapi.login(username, password)
except:
os.system('clear')
banner()
print colored(' Não foi possível conectar ao Zabbix Server.', 'yellow', attrs=['bold'])
print u"\n Verifique se a URL " + colored (server, 'red', attrs=['bold']) + u" está disponível."
print
print colored('''
Desenvolvido por Janssen Lima - [email protected]
''', 'blue', attrs=['bold'])
exit(1)
def menu():
os.system('clear')
banner()
print colored("[+] - Bem-vindo ao ZABBIX TUNER - [+]\n"
"[+] - Zabbix Tuner faz um diagnóstico do seu ambiente e propõe melhorias na busca de um melhor desempenho - [+]\n"
"[+] - Desenvolvido por Janssen Lima - [+]\n"
"[+] - Dúvidas/Sugestões envie e-mal para [email protected] - [+]", 'blue')
print
print colored("--- Escolha uma opção do menu ---",'yellow', attrs=['bold'])
print
print "[1] - Relatório de itens do sistema"
print "[2] - Listar itens não suportados"
print "[3] - Desabilitar itens não suportados"
print "[4] - Relatório da média de coleta dos itens (por tipo) (não implementado)"
print "[5] - Iniciar diagnóstico (não implementado)"
print "[6] - Relatório de Agentes Zabbix desatualizados"
print "[7] - ??? (não implementado)"
print "[8] - ??? (não implementado)"
print "[9] - ??? (não implementado)"
print
print "[0] - Sair"
print
menu_opcao()
def menu_opcao():
opcao = raw_input( "[+] - Selecione uma opção[0-9]: ")
if opcao == '1':
dadosItens()
elif opcao == '2':
listagemItensNaoSuportados()
elif opcao == '3':
desabilitaItensNaoSuportados()
elif opcao == '5':
diagnosticoAmbiente()
elif opcao == '6':
agentesDesatualizados()
elif opcao == '0':
sys.exit()
else:
menu()
def desabilitaItensNaoSuportados():
opcao = raw_input( "Confirma operação? [s/n]")
if opcao == 's' or opcao == 'S':
itens = zapi.item.get({
"output": "extend",
"filter": {
"state": 1
},
"monitored": True
})
for x in itens:
zapi.item.update({
"itemid": x['itemid'], "status":1
})
print "Itens desabilitados!!!"
raw_input("Pressione ENTER para continuar")
main()
else:
main()
def agentesDesatualizados():
itens = zapi.item.get ({
"filter": {"key_": "agent.version"},
"output": ["lastvalue", "hostid"],
"templated": False,
"selectHosts": ["host"],
"sortorder": "ASC"
})
versaoZabbixServer = zapi.item.get ({
"filter": {"key_": "agent.version"},
"output": ["lastvalue", "hostid"],
"hostids": "10084"
})[0]["lastvalue"]
print colored('{0:6} | {1:30}' .format("Versão","Host"), attrs=['bold'])
for x in itens:
if x['lastvalue'] != versaoZabbixServer:
print '{0:6} | {1:30}'.format(x["lastvalue"], x["hosts"][0]["host"])
print ""
raw_input("Pressione ENTER para continuar")
main()
def diagnosticoAmbiente():
print colored("[+++]", 'green'), "analisando itens não númericos"
itensNaoNumericos = zapi.item.get ({
"output": "extend",
"monitored": True,
"filter": {"value_type": [1, 2, 4]},
"countOutput": True
})
print colored("[+++]", 'green'), "analisando itens ICMPPING com histórico acima de 7 dias"
itensPing = zapi.item.get ({
"output": "extend",
"monitored": True,
"filter": {"key_": "icmpping"},
})
contPing = 0
for x in itensPing:
if int(x["history"]) > 7:
contPing += 1
print ""
print colored("Resultado do diagnóstico:", attrs=['bold'])
print colored("[INFO]", 'blue'), "Quantidade de itens com chave icmpping armazenando histórico por mais de 7 dias:", contPing
print colored("[WARN]", 'yellow', None, attrs=['blink']), "Quantidade de itens não numéricos (ativos): ", itensNaoNumericos
print ""
raw_input("Pressione ENTER para continuar")
main()
def listagemItensNaoSuportados():
itensNaoSuportados = zapi.item.get({"output": ["itemid", "error", "name"],
"filter": {"state": 1,"status":0},
"monitored": True,
"selectHosts": ["hostid", "host"],
})
if itensNaoSuportados:
print colored('{0:5} | {1:30} | {2:40} | {3:10}' .format("Item","Nome", "Error", "Host"), attrs=['bold'])
for x in itensNaoSuportados:
print '{0:5} | {1:30} | {2:40} | {3:10}'.format(x["itemid"], x["name"], x["error"], x["hosts"][0]["host"])
print ""
else:
print "Não há dados a exibir"
print ""
raw_input("Pressione ENTER para continuar")
main()
def dadosItens():
itensNaoSuportados = zapi.item.get({"output": "extend",
"filter": {"state": 1,"status":0},
"monitored": True,
"countOutput": True
})
totalItensHabilitados = zapi.item.get({"output": "extend",
"filter": {"state": 0},
"monitored": True,
"countOutput": True
})
itensDesabilitados = zapi.item.get({"output": "extend",
"filter": {"status": 1},
"templated": False,
"countOutput": True
})
itensDescobertos = zapi.item.get({
"output": "extend",
"selectItemDiscovery": ["itemid"],
"selectTriggers": ["description"]
})
cont = 0
for i in itensDescobertos:
if i["itemDiscovery"]:
cont += 1
print ""
print "Relatório de itens"
print "=" * 18
print ""
print colored("[INFO]",'blue'), "Total de itens: ", int(totalItensHabilitados) + int(itensDesabilitados) + int(itensNaoSuportados)
print colored("[INFO]",'blue'), "Itens habilitados: ", totalItensHabilitados
print colored("[INFO]",'blue'), "Itens desabilitados: ", itensDesabilitados
if itensNaoSuportados > "0":
print colored("[ERRO]",'red'), "Itens não suportados: ", itensNaoSuportados
else:
print colored("[-OK-]",'green'), "Itens não suportados: ", itensNaoSuportados
print colored("[INFO]",'blue'), "Itens descobertos: ", cont
print ""
raw_input("Pressione ENTER para continuar")
main()
def main():
menu()
main() | zabbix-tuner | /zabbix-tuner-0.2.tar.gz/zabbix-tuner-0.2/ZabbixTuner.py | ZabbixTuner.py |
from decimal import Decimal
import json
import logging
import socket
import struct
import re
from io import StringIO
import configparser
LOG = logging.getLogger(__name__)
class ZabbixResponse():
"""
The :class:`ZabbixResponse` contains the parsed response from Zabbix.
"""
def __init__(self):
self._processed = 0
self._failed = 0
self._total = 0
self._time = 0
self._chunk = 0
pattern = (r'[Pp]rocessed:? (\d*);? [Ff]ailed:? (\d*);? '
r'[Tt]otal:? (\d*);? [Ss]econds spent:? (\d*\.\d*)')
self._regex = re.compile(pattern)
def __repr__(self):
"""Represent detailed ZabbixResponse view."""
result = json.dumps({'processed': self._processed,
'failed': self._failed,
'total': self._total,
'time': str(self._time),
'chunk': self._chunk})
return result
def parse(self, response):
"""Parse zabbix response."""
info = response.get('info')
res = self._regex.search(info)
self._processed += int(res.group(1))
self._failed += int(res.group(2))
self._total += int(res.group(3))
self._time += Decimal(res.group(4))
self._chunk += 1
@property
def processed(self):
"""Processed property."""
return self._processed
@property
def failed(self):
"""Failed property."""
return self._failed
@property
def total(self):
"""Total property."""
return self._total
@property
def time(self):
"""Time property."""
return self._time
@property
def chunk(self):
"""Chunk property."""
return self._chunk
class ZabbixMetric(): # pylint: disable=too-few-public-methods
"""
The :class:`ZabbixMetric` contain one metric for zabbix server.
:type host: str
:param host: Hostname as it displayed in Zabbix.
:type key: str
:param key: Key by which you will identify this metric.
:type value: str
:param value: Metric value.
:type clock: int
:param clock: Unix timestamp. Current time will used if not specified.
"""
def __init__(self, host, key, value, clock=None):
self.host = str(host)
self.key = str(key)
self.value = str(value)
if clock:
if isinstance(clock, (float, int)):
self.clock = int(clock)
else:
raise Exception('Clock must be time in unixtime format')
def __repr__(self):
"""Represent detailed ZabbixMetric view."""
result = json.dumps(self.__dict__)
LOG.debug('%s: %s', self.__class__.__name__, result)
return result
class ZabbixSender():
"""The :class:`ZabbixSender` send metrics to Zabbix server.
Implementation of
`zabbix protocol <https://www.zabbix.com/documentation/1.8/protocols>`_.
:type zabbix_server: str
:param zabbix_server: Zabbix server ip address. Default: `127.0.0.1`
:type zabbix_port: int
:param zabbix_port: Zabbix server port. Default: `10051`
:type use_config: str
:param use_config: Path to zabbix_agentd.conf file to load settings from.
If value is `True` then default config path will used:
/etc/zabbix/zabbix_agentd.conf
:type chunk_size: int
:param chunk_size: Number of metrics send to the server at one time
"""
def __init__(self,
zabbix_server='127.0.0.1',
zabbix_port=10051,
use_config=None,
chunk_size=250):
self.chunk_size = chunk_size
if use_config:
self.zabbix_uri = self._load_from_config(use_config)
else:
self.zabbix_uri = [(zabbix_server, zabbix_port)]
def __repr__(self):
"""Represent detailed ZabbixSender view."""
result = json.dumps(self.__dict__, ensure_ascii=False)
LOG.debug('%s: %s', self.__class__.__name__, result)
return result
def _load_from_config(self, config_file):
"""
Load zabbix server IP address and port from zabbix agent config file.
If ServerActive variable is not found in the file, it will
use the default: 127.0.0.1:10051
:type config_file: str
:param use_config: Path to zabbix_agentd.conf file to load settings
from. If value is `True` then default config path will used:
/etc/zabbix/zabbix_agentd.conf
"""
if config_file and isinstance(config_file, bool):
config_file = '/etc/zabbix/zabbix_agentd.conf'
LOG.debug("Used config: %s", config_file)
# This is workaround for config wile without sections
with open(config_file, 'r', encoding="utf8") as config_file_obj:
config_file_data = "[root]\n" + config_file_obj.read()
default_params = {
'ServerActive': '127.0.0.1:10051',
}
config_file_fp = StringIO(config_file_data)
config = configparser.RawConfigParser(default_params)
config.read_file(config_file_fp)
zabbix_serveractives = config.get('root', 'ServerActive')
result = []
for serverport in zabbix_serveractives.split(','):
if ':' not in serverport:
serverport = serverport.strip() + ":" + str(10051)
server, port = serverport.split(':')
serverport = (server, int(port))
result.append(serverport)
LOG.debug("Loaded params: %s", result)
return result
def _receive(self, sock, count):
"""Reads socket to receive data from zabbix server.
:type socket: :class:`socket._socketobject`
:param socket: Socket to read.
:type count: int
:param count: Number of bytes to read from socket.
"""
buf = b''
while len(buf) < count:
chunk = sock.recv(count - len(buf))
if not chunk:
break
buf += chunk
return buf
def _create_messages(self, metrics):
"""Create a list of zabbix messages from a list of ZabbixMetrics.
:type metrics_array: list
:param metrics_array: List of :class:`zabbix.sender.ZabbixMetric`.
:rtype: list
:return: List of zabbix messages.
"""
messages = []
# Fill the list of messages
for metric in metrics:
messages.append(str(metric))
LOG.debug('Messages: %s', messages)
return messages
def _create_request(self, messages):
"""Create a formatted request to zabbix from a list of messages.
:type messages: list
:param messages: List of zabbix messages
:rtype: list
:return: Formatted zabbix request
"""
msg = ','.join(messages)
request = f'{{"request":"sender data","data":[{msg}]}}'
request = request.encode("utf-8")
LOG.debug('Request: %s', request)
return request
def _create_packet(self, request):
"""Create a formatted packet from a request.
:type request: str
:param request: Formatted zabbix request
:rtype: str
:return: Data packet for zabbix
"""
data_len = struct.pack('<Q', len(request))
packet = b'ZBXD\x01' + data_len + request
def ord23(xnum):
if not isinstance(xnum, int):
return ord(xnum)
return xnum
LOG.debug('Packet [str]: %s', packet)
LOG.debug(
'Packet [hex]: %s',
':'.join(hex(ord23(x))[2:] for x in packet))
return packet
def _get_response(self, connection):
"""Get response from zabbix server, reads from self.socket.
:type connection: :class:`socket._socketobject`
:param connection: Socket to read.
:rtype: dict
:return: Response from zabbix server or False in case of error.
"""
response_header = self._receive(connection, 13)
LOG.debug('Response header: %s', response_header)
if (not response_header.startswith(b'ZBXD\x01') or
len(response_header) != 13):
LOG.debug('Zabbix return not valid response.')
result = False
else:
response_len = struct.unpack('<Q', response_header[5:])[0]
response_body = connection.recv(response_len)
result = json.loads(response_body.decode("utf-8"))
LOG.debug('Data received: %s', result)
try:
connection.close()
except Exception as error: # pylint: disable=broad-except
LOG.error("Zabbix server response error: '%s'.", error)
return result
def _chunk_send(self, metrics):
"""Send the one chunk metrics to zabbix server.
:type metrics: list
:param metrics: List of :class:`zabbix.sender.ZabbixMetric` to send
to Zabbix
:rtype: str
:return: Response from Zabbix Server
"""
messages = self._create_messages(metrics)
request = self._create_request(messages)
packet = self._create_packet(request)
for host_addr in self.zabbix_uri:
LOG.debug('Sending data to %s', host_addr)
# create socket object
connection = socket.socket()
# server and port must be tuple
connection.connect(host_addr)
try:
connection.sendall(packet)
except Exception as error:
# In case of error we should close connection, otherwise
# we will close it after data will be received.
connection.close()
raise Exception(error) from error
response = self._get_response(connection)
LOG.debug('%s response: %s', host_addr, response)
if response and response.get('response') != 'success':
LOG.debug('Response error: %s}', response)
raise Exception(response)
return response
def send(self, metrics):
"""Send the metrics to zabbix server.
:type metrics: list
:param metrics: List of :class:`zabbix.sender.ZabbixMetric` to send
to Zabbix
:rtype: :class:`pyzabbix.sender.ZabbixResponse`
:return: Parsed response from Zabbix Server
"""
result = ZabbixResponse()
for metric in range(0, len(metrics), self.chunk_size):
result.parse(
self._chunk_send(
metrics[metric: metric + self.chunk_size]))
return result | zabbix | /zabbix-2.0.2-py3-none-any.whl/pyzabbix/sender.py | sender.py |
import logging
from typing import Mapping, Optional, Sequence, Tuple, Union
from warnings import warn
from packaging.version import Version
from requests import Session
__all__ = [
"ZabbixAPI",
"ZabbixAPIException",
"ZabbixAPIMethod",
"ZabbixAPIObject",
"ZabbixAPIObjectClass",
]
LOG = logging.getLogger(__name__)
class ZabbixAPIException(Exception):
"""Generic Zabbix API exception
Codes:
-32700: invalid JSON. An error occurred on the server while
parsing the JSON text (typo, wrong quotes, etc.)
-32600: received JSON is not a valid JSON-RPC Request
-32601: requested remote-procedure does not exist
-32602: invalid method parameters
-32603: Internal JSON-RPC error
-32400: System error
-32300: Transport error
-32500: Application error
"""
def __init__(self, *args, **kwargs):
super().__init__(*args)
self.error = kwargs.get("error", None)
class ZabbixAPI: # pylint: disable=too-many-instance-attributes
"""Zabbix API class."""
def __init__( # pylint: disable=too-many-arguments
self,
server: str = "http://localhost/zabbix",
session: Optional[Session] = None,
use_authenticate: bool = False,
timeout: Optional[Union[float, int, Tuple[int, int]]] = None,
detect_version: bool = True,
):
"""
:param server: Base URI for zabbix web interface
(omitting /api_jsonrpc.php).
:param session: optional pre-configured requests.Session instance
:param use_authenticate: Use old (Zabbix 1.8) style authentication
:param timeout: optional connect and read timeout in seconds,
default: None.
If you're using Requests >= 2.4 you can set it as
tuple: "(connect, read)" which is used to set
individual connect and read timeouts.
:param detect_version: autodetect Zabbix API version
"""
self.session = session or Session()
# Default headers for all requests
self.session.headers.update(
{
"Content-Type": "application/json-rpc",
"User-Agent": "python/pyzabbix",
"Cache-Control": "no-cache",
}
)
self.use_authenticate = use_authenticate
self.use_api_token = False
self.auth = ""
self.id = 0 # pylint: disable=invalid-name
self.timeout = timeout
if not server.endswith("/api_jsonrpc.php"):
server = server.rstrip("/") + "/api_jsonrpc.php"
self.url = server
LOG.info("JSON-RPC Server Endpoint: '%s'.", self.url)
self.version: Optional[Version] = None
self._detect_version = detect_version
def __enter__(self) -> "ZabbixAPI":
return self
# pylint: disable=inconsistent-return-statements
def __exit__(self, exception_type, exception_value, traceback):
if isinstance(exception_value, (ZabbixAPIException, type(None))):
if self.is_authenticated and not self.use_api_token:
# Logout the user if they are authenticated
# using username + password.
self.user.logout()
return True
return None
def login(
self,
user: str = "",
password: str = "",
api_token: Optional[str] = None,
) -> None:
"""Convenience method for calling user.authenticate
and storing the resulting auth token for further commands.
If use_authenticate is set, it uses the older (Zabbix 1.8)
authentication command
:param password: Password used to login into Zabbix
:param user: Username used to login into Zabbix
:param api_token: API Token to authenticate with
"""
if self._detect_version:
self.version = Version(self.api_version())
LOG.info("Zabbix API version is: '%s'.", self.version)
# If the API token is explicitly provided, use this instead.
if api_token is not None:
self.use_api_token = True
self.auth = api_token
return
# If we have an invalid auth token, we are not allowed to send a login
# request. Clear it before trying.
self.auth = ""
if self.use_authenticate:
self.auth = self.user.authenticate(user=user, password=password)
elif self.version and self.version >= Version("5.4.0"):
self.auth = self.user.login(username=user, password=password)
else:
self.auth = self.user.login(user=user, password=password)
def check_authentication(self):
"""Check if we are authenticated."""
if self.use_api_token:
# We cannot use this call using an API Token
return True
# Convenient method for calling user.checkAuthentication
# of the current session
return self.user.checkAuthentication(sessionid=self.auth)
@property
def is_authenticated(self) -> bool:
"""Property showing we are authenticated."""
if self.use_api_token:
# We cannot use this call using an API Token
return True
try:
self.user.checkAuthentication(sessionid=self.auth)
except ZabbixAPIException:
return False
return True
def confimport(
self,
confformat: str = "",
source: str = "",
rules: str = "",
) -> dict:
"""Alias for configuration.import because it clashes with
Python's import reserved keyword
:param rules:
:param source:
:param confformat:
"""
warn(
"ZabbixAPI.confimport(format, source, rules) has been deprecated,"
"please use "
"ZabbixAPI.configuration['import']"
"(format=format, source=source, rules=rules) instead",
DeprecationWarning,
2,
)
return self.configuration["import"](
format=confformat,
source=source,
rules=rules,
)
def api_version(self) -> str:
"""Get Zabbix API version."""
return self.apiinfo.version()
def do_request(
self,
method: str,
params: Optional[Union[Mapping, Sequence]] = None,
) -> dict:
"""Do request to Zabbix API."""
payload = {
"jsonrpc": "2.0",
"method": method,
"params": params or {},
"id": self.id,
}
# We don't have to pass the auth token if asking for
# the apiinfo.version or user.checkAuthentication
if (
self.auth
and method != "apiinfo.version"
and method != "user.checkAuthentication"
):
payload["auth"] = self.auth
printable_payload = dict(payload)
printable_payload["password"] = "***"
printable_payload["auth"] = "***"
LOG.debug("Sending: '%s'.", printable_payload)
resp = self.session.post(self.url, json=payload, timeout=self.timeout)
LOG.debug("Response Code: %i.", resp.status_code)
# NOTE: Getting a 412 response code means the headers are not in the
# list of allowed headers.
resp.raise_for_status()
if not resp.text:
raise ZabbixAPIException("Received empty response")
try:
response = resp.json()
except ValueError as exception:
raise ZabbixAPIException(
f"Unable to parse json: {resp.text}"
) from exception
LOG.debug("Response Body: '%s'.", response)
self.id += 1
if "error" in response: # some exception
error = response["error"]
# some errors don't contain 'data': workaround for ZBX-9340
if "data" not in error:
error["data"] = "No data"
raise ZabbixAPIException(
f"Error {error['code']}: {error['message']}, {error['data']}",
error["code"],
error=error,
)
return response
def _object(self, attr: str) -> "ZabbixAPIObject":
"""Dynamically create an object class (ie: host)"""
return ZabbixAPIObject(attr, self)
def __getattr__(self, attr: str) -> "ZabbixAPIObject":
return self._object(attr)
def __getitem__(self, attr: str) -> "ZabbixAPIObject":
return self._object(attr)
# pylint: disable=too-few-public-methods
class ZabbixAPIMethod:
"""To find out."""
def __init__(self, method: str, parent: ZabbixAPI):
self._method = method
self._parent = parent
def __call__(self, *args, **kwargs):
if args and kwargs:
raise TypeError("Found both args and kwargs")
return self._parent.do_request(self._method, args or kwargs)["result"]
class ZabbixAPIObject:
"""To find out."""
def __init__(self, name: str, parent: ZabbixAPI):
self._name = name
self._parent = parent
def _method(self, attr: str) -> ZabbixAPIMethod:
"""Dynamically create a method (ie: get)"""
return ZabbixAPIMethod(f"{self._name}.{attr}", self._parent)
def __getattr__(self, attr: str) -> ZabbixAPIMethod:
return self._method(attr)
def __getitem__(self, attr: str) -> ZabbixAPIMethod:
return self._method(attr)
class ZabbixAPIObjectClass(ZabbixAPIObject):
"""To find out."""
def __init__(self, *args, **kwargs):
warn(
"ZabbixAPIObjectClass has been renamed to ZabbixAPIObject",
DeprecationWarning,
2,
)
super().__init__(*args, **kwargs)
# pylint: enable=too-few-public-methods | zabbix | /zabbix-2.0.2-py3-none-any.whl/pyzabbix/api.py | api.py |
zabbix2jira
===========
*Creates or updates a ticket on JIRA from Zabbix alarms*
Purpose
-------
zabbix2jira is a simple command line program that receives an action from
Zabbix (or any other script) and creates (or updates) a ticket on a project
inside a JIRA installation.
Installation
------------
Use the following command under the program directory::
$ pip install -e .
We recommend using virtualenv to setup a self-contained app directory. In this
case, you should use::
$ virtualenv env
$ source env/bin/activate
$ pip install -e .
If you want to see which libraries this application uses, please check the
``requirements.txt`` file.
Usage
-----
Default paths:
- Configuration: */etc/zabbix2jira.cfg*
- Log: */var/log/zabbix2jira.log*
- Cache Directory: */var/cache/zabbix2jira*
To run it, activate virtualenv first::
$ source env/bin/activate
$ zabbix2jira -h
Configuration
-------------
The command itself uses some default configuration, but you will want
to configure a file to configure your jira url, username and password.
Simply copy the ``sample-config.cfg`` file to the default configuration
path (*/etc/zabbix/zabbix2jira.cfg*) or copy to any location and use the
``-c`` parameter at the CLI call.
The file itself is self-explanatory.
Zabbix Integration
------------------
Create an action that calls the script with the proper variables.
We create a action named ``zabbix2jira`` with the conditions:
* A Maintenance status not in maintenance
* B Trigger value = PROBLEM
* C Trigger value = OK
And with calculation: ``A and (B or C)``. Pay attention to the
``(B or C)`` because we want to run the action both on alarm and recovery.
*Note: this changed on Zabbix 3.2. You can skip and B and C conditions
because on this version, problem and recovery operations are separate.*
Then on the *Operations* Tab, create a step that executes a ``Custom script``
on the Zabbix Server with the following commands::
zabbix2jira -v -i {EVENT.ID} PROJECT {TRIGGER.STATUS} "[Zabbix Alert] {HOSTNAME} - {TRIGGER.NAME}" "Alert Details"
You can also use the script as a user media and send a message to it.
Note that if using a *virtualenv* setup, activate it before the previous command::
source /opt/z2d/env/bin/activate
Examples
--------
Here are some examples for running zabbix2jira.
Create an issue with component ``Alert``::
zabbix2jira -v -p Alert PROJECT PROBLEM "[Zabbix Alert] PROBLEM" "Alert Details"
With the zabbix backend enabled, track the event id (123) to acknowledge it::
zabbix2jira -v -i 123 PROJECT PROBLEM "[Zabbix Alert] PROBLEM" "Alert Details"
Recover the previous issue::
zabbix2jira -v -i 123 PROJECT OK "[Zabbix Alert] PROBLEM "Alert Details"
Create an issue with type ``Bug``::
zabbix2jira -v -t Bug PROJECT PROBLEM "[Zabbix Alert] PROBLEM" "Alert Details"
| zabbix2jira | /zabbix2jira-0.2.1.tar.gz/zabbix2jira-0.2.1/README.rst | README.rst |
zabbix_exporter
===============
.. image:: https://img.shields.io/badge/python-2.7,%203.6-blue.svg
:target: https://pypi.python.org/pypi/zabbix-exporter/
.. image:: https://travis-ci.org/MyBook/zabbix-exporter.svg?branch=master
:target: https://travis-ci.org/MyBook/zabbix-exporter
.. image:: https://codecov.io/gh/MyBook/zabbix-exporter/branch/master/graph/badge.svg
:target: https://codecov.io/gh/MyBook/zabbix-exporter
Usage
=====
::
Usage: zabbix_exporter [OPTIONS]
Zabbix metrics exporter for Prometheus
Use config file to map zabbix metrics names/labels into prometheus. Config
below transfroms this:
local.metric[uwsgi,workers,myapp,busy] = 8
local.metric[uwsgi,workers,myapp,idle] = 6
into familiar Prometheus gauges:
uwsgi_workers{instance="host1",app="myapp",status="busy"} 8
uwsgi_workers{instance="host1",app="myapp",status="idle"} 6
YAML:
metrics:
- key: 'local.metric[uwsgi,workers,*,*]'
name: 'uwsgi_workers'
labels:
app: $1
status: $2
reject:
- 'total'
Options:
--config PATH Path to exporter config
--port INTEGER Port to serve prometheus stats [default: 9224]
--url TEXT HTTP URL for zabbix instance
--login TEXT Zabbix username
--password TEXT Zabbix password
--verify-tls / --no-verify Enable TLS cert verification [default: true]
--timeout INTEGER API read/connect timeout
--verbose
--dump-metrics Output all metrics for human to write yaml
config
--version
--help Show this message and exit.
Deploying with Docker
=====================
::
docker run -d --name zabbix_exporter -v /path/to/your/config.yml:/zabbix_exporter/zabbix_exporter.yml --env=ZABBIX_URL="https://zabbix.example.com/" --env="ZABBIX_LOGIN=username" --env="ZABBIX_PASSWORD=secret" mybook/zabbix-exporter
| zabbix_exporter | /zabbix_exporter-1.0.2.tar.gz/zabbix_exporter-1.0.2/README.rst | README.rst |
.. highlight:: shell
============
Contributing
============
Contributions are welcome, and they are greatly appreciated! Every
little bit helps, and credit will always be given.
You can contribute in many ways:
Types of Contributions
----------------------
Report Bugs
~~~~~~~~~~~
Report bugs at https://github.com/coagulant/zabbix_exporter/issues.
If you are reporting a bug, please include:
* Your operating system name and version.
* Any details about your local setup that might be helpful in troubleshooting.
* Detailed steps to reproduce the bug.
Fix Bugs
~~~~~~~~
Look through the GitHub issues for bugs. Anything tagged with "bug"
is open to whoever wants to implement it.
Implement Features
~~~~~~~~~~~~~~~~~~
Look through the GitHub issues for features. Anything tagged with "feature"
is open to whoever wants to implement it.
Write Documentation
~~~~~~~~~~~~~~~~~~~
zabbix exporter could always use more documentation, whether as part of the
official zabbix exporter docs, in docstrings, or even on the web in blog posts,
articles, and such.
Submit Feedback
~~~~~~~~~~~~~~~
The best way to send feedback is to file an issue at https://github.com/coagulant/zabbix_exporter/issues.
If you are proposing a feature:
* Explain in detail how it would work.
* Keep the scope as narrow as possible, to make it easier to implement.
* Remember that this is a volunteer-driven project, and that contributions
are welcome :)
Get Started!
------------
Ready to contribute? Here's how to set up `zabbix_exporter` for local development.
1. Fork the `zabbix_exporter` repo on GitHub.
2. Clone your fork locally::
$ git clone [email protected]:your_name_here/zabbix_exporter.git
3. Install your local copy into a virtualenv. Assuming you have virtualenvwrapper installed, this is how you set up your fork for local development::
$ mkvirtualenv zabbix_exporter
$ cd zabbix_exporter/
$ python setup.py develop
4. Create a branch for local development::
$ git checkout -b name-of-your-bugfix-or-feature
Now you can make your changes locally.
5. When you're done making changes, check that your changes pass flake8 and the tests, including testing other Python versions with tox::
$ flake8 zabbix_exporter tests
$ python setup.py test
$ tox
To get flake8 and tox, just pip install them into your virtualenv.
6. Commit your changes and push your branch to GitHub::
$ git add .
$ git commit -m "Your detailed description of your changes."
$ git push origin name-of-your-bugfix-or-feature
7. Submit a pull request through the GitHub website.
Pull Request Guidelines
-----------------------
Before you submit a pull request, check that it meets these guidelines:
1. The pull request should include tests.
2. If the pull request adds functionality, the docs should be updated. Put
your new functionality into a function with a docstring, and add the
feature to the list in README.rst.
3. The pull request should work for Python 2.6, 2.7, 3.3, 3.4 and 3.5, and for PyPy. Check
https://travis-ci.org/coagulant/zabbix_exporter/pull_requests
and make sure that the tests pass for all supported Python versions.
Tips
----
To run a subset of tests::
$ python -m unittest tests.test_zabbix_exporter
| zabbix_exporter | /zabbix_exporter-1.0.2.tar.gz/zabbix_exporter-1.0.2/CONTRIBUTING.rst | CONTRIBUTING.rst |
import pprint
class ZabbixDefaults(dict):
"""
Store default values for zabbix settings, and map template names to zabbix id
Example:
# Override defaults
c = ZabbixDefauls(disabled=True, item={'interval': 100})
"""
def __init__(self, **args):
# Return value constant
self.__dict__['return_type'] = (
'float',
'char',
'log',
'numeric',
'text')
# Checking method constants
self.__dict__['method'] = ('agent', 'snmp v1', 'trapper',
'simple', 'snmp v2', 'internal', 'snmp v3',
'active', 'aggregate', '', 'external',
'database monitor', 'ipmi', 'ssh', 'telnet',
'calculated', 'jmx', 'snmp trap')
self.__dict__['store_as'] = ('as is', 'speed', 'change')
# Graph constants
self.__dict__['graph_type'] = ('normal', 'stacked', 'pie', 'exploded')
self.__dict__['graph_y_type'] = ('calculated', 'fixed', 'item')
self.__dict__['graph_func'] = {
'min': 1,
'avg': 2,
'max': 4,
'all': 7,
'last': 9}
self.__dict__['graph_style'] = ('line', 'filled region', 'bold line',
'dot', 'dashed line', 'gradient line')
self.__dict__['y_min_max_type'] = ('calculated', 'fixed', 'item')
# Trigger severety level constants
self.__dict__['warn_level'] = (
'none',
'info',
'warning',
'average',
'high',
'disaster')
# Comparsion, for severety use in alerts
self.__dict__['cmp'] = {'=': 0, '!=': 1, '>=': 5, '<=': 6}
# Trigger status, for alerts
self.__dict__['trigger_status'] = ('ok', 'problem')
# Default parameters
self.__dict__['default'] = {
'disabled': False,
'item': {
'return_type': 'numeric',
'method': 'agent',
'interval': 60,
'history': 7,
'trends': 365,
'store_as': 'as is',
},
'trigger': {
'warn_level': 'none',
'multiple_warn': False,
},
'graph': {
'height': 200,
'width': 900,
'type': 'normal',
'func': 'avg',
'color': '009900',
'y_side': 'left',
'style': 'line',
'gitype': 'simple',
'3d_view': 0,
'show_legend': 1,
'show_working_time': 0,
'show_triggers': 0,
'y_min_type': 'calculated',
'y_max_type': 'calculated',
'percent_right': 0.0,
'percent_left': 0.0,
},
'discovery': {
'delay': 3600,
'lifetime': 30,
},
'alert': {
'recovery': True,
'trigger_status': 'problem',
'warn_level': 'warning',
'subject': '[{EVENT.ID}] {TRIGGER.STATUS}: {TRIGGER.NAME} on {HOST.NAME1}',
'text': ("{TRIGGER.SEVERITY}:\n"
"{TRIGGER.DESCRIPTION}\n"
"{HOST.NAME1}:[{ITEM.NAME1}]: {ITEM.VALUE1}"),
'recovery_subject': '[{EVENT.ID}] {TRIGGER.STATUS}: {TRIGGER.NAME} on {HOST.NAME1}',
'recovery_text': ("{TRIGGER.SEVERITY}:\n"
"{TRIGGER.DESCRIPTION}\n"
"{HOST.NAME1}:[{ITEM.NAME1}]: {ITEM.VALUE1}"),
'eventsource': 0, # Trigger handle
'eval': 0, # AND/OR: 0, AND: 1, OR: 2
'escalation_time': 300, # Must be >60s
'over': 'Email', # Email: 1
'action': 'message',
'cmp': '>=',
},
}
# Override settings with argumentsi
for k, v in args.iteritems():
if isinstance(v, dict):
self.__dict__['default'][k].update(v)
else:
self.__dict__['default'][k] = v
def __getitem__(self, item):
return self.__dict__.get(item)
def get(self, item):
return self.__getitem__(item)
def __repr__(self):
pf = pprint.PrettyPrinter(indent=4).pformat
return pf(self.__dict__) | zabbixcli | /zabbixcli-1.0.6.tar.gz/zabbixcli-1.0.6/zabbixlib/defaults.py | defaults.py |
import logging
from group import ZabbixGroups
from object import ZabbixObject
# Connect to logger object
log = logging.getLogger(__name__)
class ZabbixAutoreg(ZabbixObject):
"""
Implements working with zabbix autoregister action objects.
Arguments:
zapi (ZabbixAPI) ZabbixAPI connector to send request.
obj (dict) Dictionary discribed zabbix application template.
"""
def __init__(self, zapi, obj):
self.zapi = zapi
self.obj = obj
self.obj_type = 'action'
ZabbixObject(self.zapi, self.obj)
def _create_request(self):
"""
Create request for changes.
Return (str) Request for changes.
"""
result = {}
result['name'] = 'Auto registration {role}'.format(
role=self.obj['name'])
# if contains metadata tag then use it
if isinstance(
self.obj['autoreg'],
dict) and self.obj['autoreg'].get('metadata'):
metadata = self.obj['autoreg']['metadata']
else:
metadata = self.obj['name']
result['conditions'] = [
# actionid: host metadata - 24, like - 2
{'conditiontype': 24, 'operator': 2, 'value': metadata}
]
result['operations'] = [
{
# actionid: link template - 6
'operationtype': 6, 'esc_step_to': 1, 'esc_step_from': 1, 'esc_period': 0,
'optemplate': [self.zapi.get_id('template', self.obj['name'], with_id=True)],
},
# Add host
{'esc_step_from': 1,
'esc_period': 0,
'operationtype': 2,
'esc_step_to': 1},
# Disable host
{'esc_step_from': 1,
'esc_period': 0,
'operationtype': 9,
'esc_step_to': 1},
]
# if contains add_to_group
if isinstance(
self.obj['autoreg'],
dict) and self.obj['autoreg'].get('add_to_group'):
result['operations'].append(
{
# actionid: add to hostgroup - 4
'operationtype': 4, 'esc_step_to': 1, 'esc_step_from': 1, 'esc_period': 0,
'opgroup': ZabbixGroups(
self.zapi,
self.obj.get('autoreg')['add_to_group']).apply()
},
)
return result
def apply(self):
"""
Push action object to zabbix server.
"""
result = None
req = self._create_request()
log.info("Auto-registration: '%s'", req['name'])
# Get 'action' object id
log.debug('ZabbixAutoreg._create_request: %s', req)
obj_id = self.zapi.get_id('action', req['name'])
if obj_id:
result = self.zapi.action.update(
actionid=obj_id,
eventsource=2,
status=0,
esc_period=0,
evaltype=0,
conditions=req['conditions'],
operations=req['operations'])
else:
result = self.zapi.action.create(
name=req['name'],
eventsource=2,
status=0,
esc_period=0,
evaltype=0,
conditions=req['conditions'],
operations=req['operations'])
return result | zabbixcli | /zabbixcli-1.0.6.tar.gz/zabbixcli-1.0.6/zabbixlib/autoreg.py | autoreg.py |
import logging
from object import ZabbixObject
log = logging.getLogger(__name__)
class ZabbixItem(ZabbixObject):
"""
Implements working with zabbix item objects.
Arguments:
zapi (ZabbixAPI) ZabbixAPI connector to send request.
obj (dict) Dictionary discribed zabbix item template.
defaults (ZabbixDefaults) Default values.
template_id (int) Zabbix Template id.
"""
def __init__(self, zapi, obj=None, defaults=None, template_id=None):
self.zapi = zapi
self.obj = obj
self.defaults = defaults
self.template_id = template_id
self.obj_type = 'item'
ZabbixObject(self.zapi, self.obj, self.template_id)
def _create_request(self):
"""
Create request for item changes.
Return (str) Request for changes.
"""
value_type = self.defaults['return_type'].index(
self.obj.get(
'return_type',
'numeric').lower())
status = int(
bool(
self.obj.get(
'disabled',
self.defaults['default']['disabled'])))
delay = int(
self.obj.get(
'interval',
self.defaults['default']['item']['interval']))
history = int(
self.obj.get(
'history',
self.defaults['default']['item']['history']))
trends = int(
self.obj.get(
'trends',
self.defaults['default']['item']['trends']))
type_ = self.defaults['method'].index(
self.obj.get(
'method',
self.defaults['default']['item']['method']).lower())
delta = self.defaults['store_as'].index(
self.obj.get(
'store_as',
self.defaults['default']['item']['store_as']).lower())
result = {
'name': self.obj['name'],
'type': type_,
'key_': self.obj['key'],
'value_type': value_type,
'status': status,
'applications': [self.obj.get('app_id')],
'hostid': self.template_id,
'delay': delay,
'history': history,
'trends': trends,
'description': self.obj.get('description', ''),
'delta': delta,
}
if 'params' in self.obj:
result.update({'params': self.obj['params']})
if self.obj.get(
'return_type',
'numeric').lower() != 'boolean' and self.obj.get('units'):
result.update({
'units': self.obj.get('units'),
'multiplier': int(bool(self.obj.get('multiplier', 0))),
'formula': self.obj.get('multiplier', 0),
})
return result
def disable(self, id_):
"""
Disable specifiec zabbix item.
Arguments:
id_ (int) Zabbix item ID.
"""
return self.zapi.item.update({'itemid': id_, 'status': 1})
class ZabbixItemPrototype(ZabbixItem):
"""
Implements working with zabbix item prototype objects.
Arguments:
zapi (ZabbixAPI) ZabbixAPI connector to send request.
obj (dict) Dictionary discribed zabbix item template.
defaults (ZabbixDefaults) Default values.
template_id (int) Zabbix Template id.
"""
def __init__(self, zapi, obj, defaults, template_id):
self.zapi = zapi
self.obj = obj
self.defaults = defaults
self.template_id = template_id
ZabbixItem(self.zapi, self.obj, self.defaults, self.template_id)
def _create_request(self):
"""
Create request for item prototype changes.
Return (str) Request for changes.
"""
result = None
result = super(ZabbixItemPrototype, self)._create_request()
result.update({'ruleid': self.obj.get('rule_id')})
self.obj_type = 'itemprototype'
return result | zabbixcli | /zabbixcli-1.0.6.tar.gz/zabbixcli-1.0.6/zabbixlib/item.py | item.py |
import logging
log = logging.getLogger(__name__)
class ZabbixObject(object):
"""
Base class for all zabbix objects.
Arguments:
zapi (ZabbixAPI) ZabbixAPI connector to send request.
obj (dict) Dictionary discribed zabbix template.
template_id (int) Zabbix Template id.
"""
def __init__(self, zapi, obj, template_id=None, obj_type=None):
self.zapi = zapi
self.obj = obj
self.template_id = template_id
self.obj_type = obj_type
def _get_id_name(self):
"""
Return id name by object type (Zabbix use different name for id).
"""
result = None
id_name = {'discoveryrule': 'item',
'hostgroup': 'group',
'graphptototype': 'graph',
'itemprototype': 'item',
'triggerprototype': 'trigger',
}.get(self.obj_type, self.obj_type)
result = '{0}id'.format(id_name)
return result
def _func(self, req):
"""
Generate zapi function name.
"""
result = None
if self.template_id:
obj_id = self.zapi.get_id(
self.obj_type,
self.obj['name'],
hostid=self.template_id)
if obj_id:
req.update({self._get_id_name(): obj_id})
zbx_method = 'update'
else:
zbx_method = 'create'
result = "self.zapi.{obj_type}.{zbx_method}".format(
obj_type=self.obj_type,
zbx_method=zbx_method)
return result
def apply(self):
"""
Push this object to zabbix server.
"""
result = None
req = self._create_request()
log.info(
"%s: '%s'",
str(self.obj_type).capitalize(),
self.obj.get('name'))
func = self._func(req)
log.debug('%s: %s', func, req)
result = eval(func)(req)
return result
def delete(self):
"""
Delete this object from zabbix.
"""
result = None
obj_id = self.zapi.get_id(self.obj_type, self.obj['name'])
if obj_id:
func = 'self.zapi.{obj_type}.delete'.format(obj_type=self.obj_type)
result = eval(func)(obj_id)
return result | zabbixcli | /zabbixcli-1.0.6.tar.gz/zabbixcli-1.0.6/zabbixlib/object.py | object.py |
import fnmatch
import logging
import os
import pprint
import yaml
from group import ZabbixGroups
log = logging.getLogger(__name__)
class ZabbixTemplate(object):
"""
Implements working with zabbix template objects.
Arguments:
zapi (ZabbixAPI) ZabbixAPI connector to send request.
obj (dict) Dictionary discribed zabbix template.
"""
def __init__(self, zapi, obj):
self.zapi = zapi
self.obj = obj
self.obj_type = 'template'
def _create_request(self):
"""
Create request for template changes.
Return (str) Request for changes.
"""
return {'groups': ZabbixGroups(self.zapi, self.obj['groups']).apply()}
def apply(self):
"""
Push template object to zabbix server.
"""
result = None
req = self._create_request()
log.info("%s: '%s'", str(self.obj_type).capitalize(), self.obj['name'])
# Get linked templates id
if self.obj.get('templates'):
req['templates'] = self.zapi.get_id(
'template',
self.obj['templates'])
# Get current template id
self.template_id = self.zapi.get_id('template', self.obj['name'])
if self.template_id:
req['templateid'] = self.template_id
result = self.zapi.template.update(req)
else:
req['host'] = self.obj['name']
result = self.zapi.template.create(req)
result = result['templateids'][0]
return result
class ZabbixTemplateFile(dict):
"""
Load and Save locally resulting zabbix template.
Attributes:
name (str): Name of template for load
pattern (str): Pattern to search for templates. Default: '*'
basedir (str): Directory that store zabbix templates.
Default: './templates'
file_extension (str): Extension for template files. Default: '.yaml'
"""
def __init__(
self,
name,
pattern='*',
basedir='./',
file_extension='.yaml',
templates_dir=None):
self.name = name
self.file_extension = file_extension
self.pattern = pattern + self.file_extension
self.basedir = '{0}/{1}'.format(basedir, name)
self.templates_dir = templates_dir
# Load template from files
self.template = {}
self.processed_items = 0
self.template = self._load(self.basedir)
def _walk(self, basedir):
"""
Return list of files for current template
"""
result = []
for root, dirs, files in os.walk(basedir):
for file_ in fnmatch.filter(files, self.pattern):
result.append(os.path.join(root, file_))
return result
def _search(self):
"""
Try to find template in template_dir
"""
result = None
# get list of files
files_list = self._walk(self.templates_dir)
for file_ in files_list:
with open(file_, 'r') as f:
line = f.readline()
if line[0:5] == 'name:':
name = line[5:].strip(' \'\"\n')
if name == self.name:
result = os.path.dirname(file_)
log.debug('Found Template: "%s" in %s', name, result)
break
return result
def _merge(self, t1, t2):
"""
Merge two templates.
Attributes:
t1 (dict)
t2 (dict)
"""
for k, v in t2.iteritems():
if t1.get(k, {}) == {}:
t1[k] = v
else:
if isinstance(t2.get(k, {}), dict):
t1.get(k, {}).update(v)
elif isinstance(t2.get(k), list):
t1.get(k).extend(v)
else:
t1[k] = v
log.debug('Template result:\n%s', t1)
def _load(self, basedir):
"""
Load current template from files and save it class variable.
"""
result = {}
files_list = self._walk(basedir)
log.debug("Template files list: %s", files_list)
for file_ in files_list:
# Read template file
with open(file_) as f:
str_buf = f.read()
# Load template
template = yaml.safe_load(str_buf)
log.debug(
'Template loaded from "%s":\n%s',
file_,
template)
# Merge template
self._merge(result, template)
if not result:
log.debug("Trying find template in %s", self.templates_dir)
template_dir = self._search()
if template_dir:
result = self._load(template_dir)
else:
# Save template in class variable
log.debug('Combined template:\n%s', result)
log.info("Template '%s' was fully loaded.", result['name'])
return result
def __getitem__(self, item, value=None):
return self.template.get(item, value)
def __setitem__(self, item, value):
self.template[item] = value
def __repr__(self):
pf = pprint.PrettyPrinter(indent=4).pformat
return pf(self.template)
def iteritems(self):
return iter(self.template.iteritems())
def get(self, item, value=None):
return self.__getitem__(item, value)
def __bool__(self):
return bool(self.template)
__nonzero__ = __bool__ | zabbixcli | /zabbixcli-1.0.6.tar.gz/zabbixcli-1.0.6/zabbixlib/template.py | template.py |
import logging
from object import ZabbixObject
log = logging.getLogger(__name__)
class ZabbixGraph(ZabbixObject):
"""
Implements working with zabbix graph objects.
Arguments:
zapi (ZabbixAPI) ZabbixAPI connector to send request.
obj (dict) Dictionary discribed zabbix graph template.
defaults (ZabbixDefaults) Default values.
template_id (int) Zabbix Template id.
"""
def __init__(self, zapi, obj, defaults, template_id):
self.zapi = zapi
self.obj = obj
self.defaults = defaults
self.template_id = template_id
self.obj_type = 'graph'
self.zbx_item_class = 'item'
ZabbixObject(self.zapi, self.obj, self.template_id)
def _get_y_value(self, type_, value):
result = None
if type_ == self.defaults['y_min_max_type'].index('fixed'):
result = float(value)
elif type_ == self.defaults['y_min_max_type'].index('item'):
result = self.zapi.get_id(
self.zbx_item_class,
value,
hostid=self.template_id)
logging.debug(
'_get_y_valye({0},{1}): {2}'.format(
type_,
value,
result))
return result
def _create_graph_items_req(self, req):
"""
Create request for graph items changes.
Return (str) Request for changes.
"""
gitems = self.obj.get('items', [])
req['gitems'] = []
for gitem in gitems:
item_id = self.zapi.get_id(
self.zbx_item_class,
gitem['item'],
hostid=self.template_id)
item = {
'itemid': item_id,
'color': gitem.get(
'color',
self.defaults['default']['graph']['color']),
'sorted': gitems.index(gitem),
'calc_fnc': self.defaults['graph_func'].get(
gitem.get(
'func',
self.defaults['default']['graph']['func']).lower()),
'yaxisside': {
'left': 0,
'right': 1}.get(
gitem.get(
'y_side',
self.defaults['default']['graph']['y_side']).lower()),
}
type_ = self.obj.get(
'type',
self.defaults['default']['graph']['type']).lower()
item.update(
{
'normal': {
'drawtype': self.defaults['graph_style'].index(
gitem.get(
'style', self.defaults['default']['graph']['style']).lower())}, 'pie': {
'type': {
'simple': 0, 'graph sum': 2}.get(
gitem.get(
'type', self.defaults['default']['graph']['gitype']).lower())}}.get(
type_, {}))
req['gitems'].append(item)
def _create_request(self):
"""
Create request for changes.
Return (str) Request for changes.
"""
result = {
'name': self.obj['name'],
'width': int(
self.obj.get(
'width',
self.defaults['default']['graph']['width'])),
'height': int(
self.obj.get(
'height',
self.defaults['default']['graph']['height'])),
'graphtype': self.defaults['graph_type'].index(
self.obj.get(
'type',
self.defaults['default']['graph']['type']).lower()),
}
type_ = self.obj.get(
'type',
self.defaults['default']['graph']['type']).lower()
{
'normal': self._normal_graph_req,
'stacked': self._stacked_graph_req,
'pie': self._pie_graph_req,
'exploded': self._exploded_graph_req,
}[type_](result)
return result
def _stacked_graph_req(self, req):
"""
Create request for Stacked graph changes.
Return (str) Request for changes.
"""
self._create_graph_items_req(req)
req.update(
{
'show_legend': int(
bool(
self.obj.get(
'show_legend',
self.defaults['default']['graph']['show_legend']))),
'show_work_period': int(
bool(
self.obj.get(
'show_working_time',
self.defaults['default']['graph']['show_working_time']))),
'show_triggers': int(
bool(
self.obj.get(
'show_triggers',
self.defaults['default']['graph']['show_triggers']))),
'ymin_type': self.defaults['graph_y_type'].index(
self.obj.get(
'y_min_type',
self.defaults['default']['graph']['y_min_type']).lower()),
'ymax_type': self.defaults['graph_y_type'].index(
self.obj.get(
'y_max_type',
self.defaults['default']['graph']['y_max_type']).lower()),
})
if req['ymin_type'] == self.defaults['y_min_max_type'].index('fixed'):
req.update(
{'yaxismin': self._get_y_value(req['ymin_type'], self.obj.get('y_min')), })
elif req['ymin_type'] == self.defaults['y_min_max_type'].index('item'):
req.update(
{'ymin_itemid': self._get_y_value(req['ymin_type'], self.obj.get('y_min')), })
if req['ymax_type'] == self.defaults['y_min_max_type'].index('fixed'):
req.update(
{'yaxismax': self._get_y_value(req['ymax_type'], self.obj.get('y_max')), })
elif req['ymax_type'] == self.defaults['y_min_max_type'].index('item'):
req.update(
{'ymax_itemid': self._get_y_value(req['ymax_type'], self.obj.get('y_max')), })
log.debug('Stacked graph:')
def _normal_graph_req(self, req):
"""
Create request for Normal graph changes.
Return (str) Request for changes.
"""
self._stacked_graph_req(req)
req.update({'percent_right': self.obj.get('percent_right',
self.defaults['default']['graph']['percent_right']),
'percent_left': self.obj.get('percent_left',
self.defaults['default']['graph']['percent_left']),
})
log.debug('Normal graph:')
def _pie_graph_req(self, req):
"""
Create request for Pie graph changes.
Return (str) Request for changes.
"""
self._create_graph_items_req(req)
req.update(
{
'show_legend': int(
bool(
self.obj.get(
'show_legend',
self.defaults['default']['graph']['show_legend']))),
'show_3d': int(
bool(
self.obj.get(
'3d_view',
self.defaults['default']['graph']['show_legend'])))})
log.debug('Pie graph:')
def _exploded_graph_req(self, req):
"""
Create request for Exploded graph changes.
Return (str) Request for changes.
"""
self._pie_graph_req(req)
log.debug('Exploded graph:')
def apply(self):
"""
Push graph object to zabbix server.
"""
result = None
req = self._create_request()
log.info("%s: '%s'", str(self.obj_type).capitalize(), self.obj['name'])
# Get 'graph' or 'graphprototype' object id
obj_id = self.zapi.get_id(
self.obj_type,
self.obj['name'],
hostid=self.template_id)
if obj_id:
req.update({'graphid': obj_id})
zbx_method = 'update'
else:
zbx_method = 'create'
func = "self.zapi.{obj_type}.{zbx_method}".format(
obj_type=self.obj_type,
zbx_method=zbx_method)
log.debug('%s: %s', func, req)
result = eval(func)(req)
return result
class ZabbixGraphPrototype(ZabbixGraph):
"""
Implements working with zabbix graph prototype objects.
Arguments:
zapi (ZabbixAPI) ZabbixAPI connector to send request.
obj (dict) Dictionary discribed zabbix graph prototype template.
defaults (ZabbixDefaults) Default values.
template_id (int) Zabbix Template id.
"""
def __init__(self, zapi, obj, defaults, template_id):
self.zapi = zapi
self.obj = obj
self.defaults = defaults
self.template_id = template_id
ZabbixGraph(self.zapi, self.obj, self.defaults, self.template_id)
def _create_request(self):
"""
Create request for graph prototype changes.
Return (str) Request for changes.
"""
self.obj_type = 'graphprototype'
self.zbx_item_class = 'itemprototype'
return super(ZabbixGraphPrototype, self)._create_request() | zabbixcli | /zabbixcli-1.0.6.tar.gz/zabbixcli-1.0.6/zabbixlib/graph.py | graph.py |
import logging
from group import ZabbixGroups
from object import ZabbixObject
# Connect to logger object
log = logging.getLogger(__name__)
class ZabbixTriggerAction(ZabbixObject):
"""
Implements working with zabbix trigger action objects.
Arguments:
zapi (ZabbixAPI) ZabbixAPI connector to send request.
obj (dict) Dictionary discribed zabbix application template.
"""
def __init__(self, zapi, obj, defaults, template_id, template_name):
self.zapi = zapi
self.obj = obj
self.obj_type = 'action'
self.defaults = defaults
self.template_id = template_id
self.template_name = template_name
ZabbixObject(self.zapi, self.obj)
def _create_request(self):
"""
Create request for changes.
Return (str) Request for changes.
"""
result = {}
if self.obj:
result['name'] = '{0}: {1}'.format(self.template_name, self.obj['name'])
result['def_shortdata'] = self.obj.get('subject',
self.defaults['default']['alert']['subject'])
result['def_longdata'] = self.obj.get('text',
self.defaults['default']['alert']['text'])
if bool(
self.obj.get(
'recovery',
self.defaults['default']['alert']['recovery'])):
result['recovery_msg'] = 1
result['r_shortdata'] = self.obj.get('recovery_subject',
self.defaults['default']['alert']['recovery_subject'])
result['r_longdata'] = self.obj.get('recovery_text',
self.defaults['default']['alert']['recovery_text'])
result['eventsource'] = self.defaults[
'default']['alert']['eventsource']
result['status'] = int(
bool(
self.obj.get(
'disabled',
self.defaults['default']['disabled'])))
result['esc_period'] = self.obj.get(
'escalation_time',
self.defaults['default']['alert']['escalation_time'])
result['evaltype'] = self.obj.get(
'eval',
self.defaults['default']['alert']['eval'])
alert_severity = self.defaults['warn_level'].index(
self.obj.get(
'severity',
self.defaults['default']['alert']['warn_level']).lower())
alert_severity_cmp = self.defaults['cmp'][
self.obj.get(
'severity_cmp',
self.defaults['default']['alert']['cmp']).lower()]
alert_trigger_status = self.defaults['trigger_status'].index(
self.obj.get(
'trigger_status',
self.defaults['default']['alert']['trigger_status']).lower())
result['conditions'] = [
# actionid: Mainenance status - 16, not in - 7
{'conditiontype': 16, 'operator': 7},
# actionid: Trigger value - 5, equal - 0, PROBLEM - 1
{'conditiontype': 5,
'operator': 0,
'value': alert_trigger_status},
# actionid: Trigger severity - 4, equal - 0, Warning - 2
{'conditiontype': 4,
'operator': alert_severity_cmp,
'value': alert_severity},
]
if 'group' in self.obj:
result['conditions'].append({
'conditiontype': 0,
'operator': 0,
'value': self.zapi.get_id(
'hostgroup',
self.obj['group'])
})
elif not 'all' in self.obj:
# actionid: Template - 13, like - 2
result['conditions'].append({
'conditiontype': 13,
'operator': 0,
'value': self.template_id})
result['operations'] = []
# fill operations for alert
for op in self.obj.get('do', []):
# check if we need to send a message to user or group
if op.get(
'action',
self.defaults['default']['alert']['action']) == 'message':
# basic config for message
do_obj = {
'operationtype': 0,
'esc_step_to': 1,
'esc_step_from': 1,
'esc_period': 0,
'evaltype': 0,
}
do_obj.update(
{
'opmessage': {
'mediatypeid': self.zapi.get_id(
'mediatype',
op.get(
'over',
self.defaults['default']['alert']['over'])),
'default_msg': 1,
}})
if op.get('to_user'):
do_obj.update(
{'opmessage_usr': [self.zapi.get_id('user', op['to_user'], with_id=True)]})
if op.get('to_group'):
do_obj.update(
{'opmessage_grp': [self.zapi.get_id('usergroup', op['to_group'], with_id=True)]})
result['operations'].append(do_obj)
# TODO: elif = 'exec' ... run
return result
def apply(self):
"""
Push action object to zabbix server.
"""
result = None
req = self._create_request()
log.info("%s: '%s'", str(self.obj_type).capitalize(), req['name'])
# Get 'action' object id
log.debug(
'ZabbixTriggerAction._create_request: {req}'.format(
req=req))
obj_id = self.zapi.get_id('action', req['name'])
if obj_id:
req['actionid'] = obj_id
req.pop('name')
obj_action = 'update'
else:
obj_action = 'create'
func = 'self.zapi.{obj_type}.{obj_action}'.format(
obj_type=self.obj_type,
obj_action=obj_action)
result = eval(func)(req)
return result | zabbixcli | /zabbixcli-1.0.6.tar.gz/zabbixcli-1.0.6/zabbixlib/trigger_action.py | trigger_action.py |
import logging
from object import ZabbixObject
log = logging.getLogger(__name__)
class ZabbixTrigger(ZabbixObject):
"""
Implements working with zabbix trigger objects.
Arguments:
zapi (ZabbixAPI) ZabbixAPI connector to send request.
obj (dict) Dictionary discribed zabbix trigger template.
defaults (ZabbixDefaults) Default values.
template_id (int) Zabbix Template id.
"""
def __init__(self, zapi, obj, defaults, template_id):
self.zapi = zapi
self.obj = obj
self.defaults = defaults
self.template_id = template_id
self.obj_type = 'trigger'
ZabbixObject(self.zapi, self.obj, self.template_id)
def _create_request(self):
"""
Create request for trigger changes.
Return (str) Request for changes.
"""
result = None
result = {
# In trigger objects 'description' = 'name'
'description': self.obj['name'],
'expression': self.obj['expression'],
'status': int(bool(self.obj.get('disabled', self.defaults['default']['disabled']))),
'priority': self.defaults['warn_level'].index(self.obj.get('warn_level', self.defaults['default']['trigger']['warn_level']).lower()),
'type': int(bool(self.obj.get('multiple_warn', self.defaults['default']['trigger']['multiple_warn']))),
'url': self.obj.get('url', ''),
}
return result
class ZabbixTriggerPrototype(ZabbixTrigger):
"""
Implements working with zabbix trigger prototype objects.
Arguments:
zapi (ZabbixAPI) ZabbixAPI connector to send request.
obj (dict) Dictionary discribed zabbix trigger prototype template.
defaults (ZabbixDefaults) Default values.
template_id (int) Zabbix Template id.
"""
def __init__(self, zapi, obj, defaults, template_id):
self.zapi = zapi
self.obj = obj
self.defaults = defaults
self.template_id = template_id
ZabbixTrigger(self.zapi, self.obj, self.defaults, self.template_id)
def _create_request(self):
"""
Create request for trigger changes.
Return (str) Request for changes.
"""
result = None
result = super(ZabbixTriggerPrototype, self)._create_request()
self.obj_type = 'triggerprototype'
return result | zabbixcli | /zabbixcli-1.0.6.tar.gz/zabbixcli-1.0.6/zabbixlib/trigger.py | trigger.py |
import collections
import json
import logging
import sys
import os
try:
import argparse
except:
raise Exception(
"You need python version 2.7+ or installed argparse module")
from app import ZabbixApp
from autoreg import ZabbixAutoreg
from defaults import ZabbixDefaults
from discovery import ZabbixDiscovery
from group import ZabbixGroup
from graph import ZabbixGraph, ZabbixGraphPrototype
from item import ZabbixItem, ZabbixItemPrototype
from macro import ZabbixMacro
from object import ZabbixObject
from template import ZabbixTemplate, ZabbixTemplateFile
from trigger import ZabbixTrigger, ZabbixTriggerPrototype
from trigger_action import ZabbixTriggerAction
from zabbix.api import ZabbixAPI
# Connect to logger object
log = logging.getLogger(__name__)
class ZabbixCLIArguments(object):
"""
Manage zabbixcli arguments
"""
def __init__(self):
# Create arguments perser object
self.argparser = argparse.ArgumentParser(
description='Template based zabbix configuration tool')
self.args = {}
self._loadFromEnvironment()
self._parse()
def _loadFromEnvironment(self):
"""
Load arguments from enviroment variables
"""
# Map env variables to cli arguments
args_map = {
'ZBXCLI_USER': 'user',
'ZBXCLI_PASS': 'pass',
'ZBXCLI_URL': 'server',
'ZBXCLI_TEMPLATES': 'templates_dir',}
# Load env variables
for ev, arg in args_map.iteritems():
if ev in os.environ:
self.args[arg] = os.environ[ev]
def _parse(self):
"""
Parse CLI arguments into self.args
"""
# Add arguments
self.argparser.add_argument(
'-t',
'--template',
action='store',
type=str,
help='Template name for sync')
self.argparser.add_argument(
'-s',
'--server',
action='store',
type=str,
help='Zabbix server URL')
self.argparser.add_argument(
'-u',
'--user',
action='store',
type=str,
help='Zabbix user name')
self.argparser.add_argument(
'-p',
'--pass',
action='store',
type=str,
help='Zabbix user password')
self.argparser.add_argument(
'-o',
'--only',
action='store_true',
help='Sync only specified templates')
self.argparser.add_argument(
'-d',
'--debug',
action='store_true',
help='Enable debug mode')
self.argparser.add_argument(
'-D',
'--delete',
action='store',
type=str,
nargs='+',
help='Delete object from zabbix. Example: -D item "Template OS Linux" "Available memory"')
# Updage arguments from CLI
self.args.update(
# filter out Null arguments
filter(
lambda x: x[1],
vars(
# Parse arguments
self.argparser.parse_args()).items()))
class ZabbixCLI(ZabbixCLIArguments):
def __init__(self, template=None):
ZabbixCLIArguments.__init__(self)
self._configureLogging()
log.debug('Parser arguments: %s', self.args)
# if no arguments, jsut print help
if len(sys.argv) <= 1:
self.argparser.print_help()
sys.exit()
if not self.args.get('template'):
sys.exit('Template should be specified.')
self.url = self.args['server']
try:
self.zapi = ZabbixAPI(
self.url,
user=self.args['user'],
password=self.args['pass'])
except:
log.error('Error while trying open connection to zabbix server: %s',
self.url)
# If we need to delete an object and exit
if self.args.get('delete'):
template_id = self.zapi.get_id('template', self.args['delete'][1])
if ZabbixObject(self.zapi,
{'name': self.args['delete'][2]},
template_id=template_id,
obj_type=self.args['delete'][0]).delete():
log.info(
'"{2}" {0} was deleted from "{1}"'.format(
*self.args['delete']))
else:
log.exit(
'Error while trying to delete: "{2}" {0} from "{1}"'.format(
*self.args['delete']))
exit()
# Set template name from __init__ params or args
if template:
self.template_name = template
else:
self.template_name = self.args.get('template')
# Load template from file
self.template = ZabbixTemplateFile(self.template_name, templates_dir=self.args.get('templates_dir'))
self.template_id = None
# When template loaded, set defaults and run apply process
if self.template:
self.config = ZabbixDefaults()
self.apply()
def _configureLogging(self):
"""
Configure logging output. Format and colors.
"""
# Set logging level
if self.args.get('debug'):
logLevel = logging.DEBUG
else:
logLevel = logging.INFO
# Set colored output
colors = {'reset': '\033[0m', 'green': '\x1b[32m', 'cyan': '\x1b[36m'}
logFormat = '{reset}{cyan}[{green}%(asctime)s{cyan}]{reset} %(message)s'.format(
**colors)
logging.basicConfig(
level=logLevel,
format=logFormat,
datefmt='%d/%m/%Y %H:%M:%S')
def _apply_linked_templates(self):
"""
Recursive apply list of linked templates. They will applied before main
template will start applying.
"""
if self.template.get('templates') and not self.args.get('only', False):
log.info('%s depends from:', self.template.get('name'))
# Show linked template list before applying
for linked_template in self.template.get('templates', []):
log.info("\t\t%s", linked_template)
# Apply list of linked templates
for linked_template in self.template.get('templates', []):
ZabbixCLI(template=linked_template)
def _apply_template(self, template):
return ZabbixTemplate(self.zapi, template).apply()
def _apply_macro(self, macro):
ZabbixMacro(self.zapi, macro, self.template_id).apply()
def _apply_macros(self):
for macro in self.template.get('macros', []):
self._apply_macro(macro)
def _apply_app(self, app):
return ZabbixApp(self.zapi, app, self.template_id).apply()
def _apply_item(self, item):
ZabbixItem(self.zapi, item, self.config, self.template_id).apply()
def _apply_items(self, items, app_id):
for item in items:
item['app_id'] = app_id
self._apply_item(item)
def _apply_item_prototype(self, prototype):
ZabbixItemPrototype(self.zapi, prototype, self.config, self.template_id).apply()
def _apply_item_prototypes(self, discovery, app_id):
items = discovery.get('items', [])
rule_id = self.zapi.get_id(
'discoveryrule',
discovery['name'],
templateid=self.template_id)
for item in items:
item.update({'rule_id': rule_id, 'app_id': app_id})
self._apply_item_prototype(item)
def _apply_graph(self, graph):
ZabbixGraph(self.zapi, graph, self.config, self.template_id).apply()
def _apply_graphs(self):
for graph in self.template.get('graphs', []):
self._apply_graph(graph)
def _apply_graph_prototype(self, prototype):
ZabbixGraphPrototype(self.zapi, prototype, self.config, self.template_id).apply()
def _apply_graph_prototypes(self, discovery):
graphs = discovery.get('graphs', [])
for graph in graphs:
self._apply_graph_prototype(graph)
def _apply_trigger(self, trigger):
ZabbixTrigger(self.zapi, trigger, self.config, self.template_id).apply()
def _apply_triggers(self):
for trigger in self.template.get('triggers', []):
self._apply_trigger(trigger)
def _apply_trigger_prototype(self, prototype):
ZabbixTriggerPrototype(self.zapi, prototype, self.config, self.template_id).apply()
def _apply_trigger_prototypes(self, discovery):
triggers = discovery.get('triggers', [])
for triggers in triggers:
self._apply_trigger_prototype(triggers)
def _apply_autoreg(self):
autoreg = self.template.get('autoreg')
if autoreg:
ZabbixAutoreg(self.zapi, self.template).apply()
def _apply_trigger_action(self):
alerts = self.template.get('alerts', [])
for alert in alerts:
ZabbixTriggerAction(self.zapi, alert, self.config, self.template_id, self.template_name).apply()
def _apply_discovery(self, discovery):
ZabbixDiscovery(self.zapi, discovery, self.config, self.template_id).apply()
def _apply_discoveries(self):
discoveries = self.template.get('discovery', {})
for app, discovery in discoveries.iteritems():
app_id = self._apply_app(app)
self._apply_discovery(discovery)
self._apply_item_prototypes(discovery, app_id)
self._apply_graph_prototypes(discovery)
self._apply_trigger_prototypes(discovery)
def _disable_item(self, id_):
ZabbixItem(self.zapi).disable(id_)
def _disable_app(self, app):
items = self.zapi.get_id(
'item',
None,
hostid=self.template_id,
app_name=app)
for item in items:
self._disable_item(item)
def clean(self):
"""
Find and clean unused zabbix objects in current template.
"""
def getUnusedObjects(type_, o):
"""
Return list of unused zabbix objects of specific type in current template
"""
unused_items = []
# Get current objects from current template
current_items = self.zapi.get_id(type_, templateids=self.template_id, name=True)
if current_items:
log.debug("Current %s: %s", type_, current_items)
template_items = []
for item in o:
if isinstance(item, dict):
template_items.append(item.get('name'))
else:
template_items.append(item)
log.debug("Template %s: %s", type_, template_items)
# Filter out similar objects from zabbix and template
unused_items = filter(lambda x: x not in template_items, current_items)
log.debug("Unused %s: %s", type_, unused_items)
return { type_: unused_items }
def removeObjects(template_objects):
"""
Remove unused zabbix objects in current template.
"""
# Find unused objects
for objects in template_objects:
unused_objects = getUnusedObjects(*objects)
# Process each object type
for type_, objects_list in unused_objects.iteritems():
# Get zabbix id for each unused object
for name in objects_list:
object_id = self.zapi.get_id(type_, name)
if object_id:
# Make function to remove object
func = 'self.zapi.{object_type}.delete'.format(object_type=type_)
log.info('Unused: %s \'%s\' was removed', type_, name)
eval(func)(object_id)
# Init lists for objects
items = apps = discovery = itemprototype = graphprototype = triggerprototype = []
for app, item in self.template.get('applications', {}).iteritems():
apps.append(app)
items.extend(item)
for app, disc in self.template.get('discovery', {}).iteritems():
apps.append(app)
discovery.append(disc)
itemprototype.extend(disc.get('items', {}))
graphprototype.extend(disc.get('graphs',{}))
triggerprototype.extend(disc.get('triggers', {}))
# Cleanup should be executed in folowing order
obj_for_cleanup = collections.OrderedDict()
obj_for_cleanup['application'] = apps
obj_for_cleanup['item'] = items
obj_for_cleanup['usermacro'] = map(lambda x: {'name':x.get('macro')}, self.template.get('macros', []))
obj_for_cleanup['graph'] = self.template.get('graphs', [])
obj_for_cleanup['trigger'] = self.template.get('triggers', [])
obj_for_cleanup['discoveryrule'] = discovery
obj_for_cleanup['itemprototype'] = itemprototype
obj_for_cleanup['graphprototype'] = graphprototype
obj_for_cleanup['triggerprototype'] = triggerprototype
# Make tuple (obj_type, value) to compare with
template_objects = []
for k,v in obj_for_cleanup.iteritems():
template_objects.append((k,v))
# Remove unused objects
removeObjects(template_objects)
def apply(self):
"""
Apply current template to zabbix.
"""
self._apply_linked_templates()
self.template_id = self._apply_template(self.template)
# Cleanup unused objects
self.clean()
apps = self.template.get('applications', {})
for app, items in apps.iteritems():
# check if disabled whole app
if str(items).lower() == 'disabled':
self._disable_app(app)
else:
app_id = self._apply_app(app)
self._apply_items(items, app_id)
self._apply_macros()
self._apply_graphs()
self._apply_triggers()
self._apply_discoveries()
self._apply_autoreg()
self._apply_trigger_action()
log.info("Done: '%s'", self.template.get('name')) | zabbixcli | /zabbixcli-1.0.6.tar.gz/zabbixcli-1.0.6/zabbixlib/cli.py | cli.py |
# zabbixctl
## Install
```bash
sudo pip install zabbixctl
#or
sudo make install
#or
sudo python setup.py install
```
## Usage
```
usage: zabbixctl [-h] [-d] [-i] [-V] [-c CACERT] [-t TIMEOUT] [-H HOSTS]
[-U USER]
{help,get,create,update,export,delete} ...
Zabbix CLI
positional arguments:
{help,get,create,update,export,delete}
get Zabbix API Method for get
export Zabbix API Method for export
update Zabbix API Method for update
create Zabbix API Method for create
delete Zabbix API Method for delete
help Display link for Zabbix wiki
optional arguments:
-h, --help show this help message and exit
-d, --debug increase output verbosity (default: False)
-i, --http Use http instead of https (default: False)
-V, --noverify Do not verify the SSL (default: False)
-c CACERT, --cacert CACERT
Path to the SSL CA Certificateexample:
/etc/pki/tls/certs/ca-bundle.crt (default: None)
-t TIMEOUT, --timeout TIMEOUT
Zabbix API read timeout in seconds (default: 30)
-H HOSTS, --hosts HOSTS
Zabbix API host(s).example: zabbixhost.example.com
(default: None)
-p URI_PATH, --uri-path URI_PATH
URI path to zabbix api. default: zabbix (default:
zabbix)
-U USER, --user USER Zabbix API user (default: system username)
usage: zabbixctl get [-h] [-a ARGUMENTS] type
positional arguments:
type Zabbix API get method
(host.get,hostgroups.get,usergroups.get)
optional arguments:
-h, --help show this help message and exit
-a ARGUMENTS, --arguments ARGUMENTS
RPC params
-k, --listkeys Returns a list of keys for the given resource type
-f FILTER, --filter FILTER
Takes "key=value" args that are sent to the zabbix api
in the filter parameter
-s SEARCH, --search SEARCH
Takes "key=value" args that are sent to the zabbix api
in the search parameter
```
## Examples
```bash
zabbixctl -H zabbix.yourdomain.com get trigger -a 'hostids=[10167]' -a 'expandExpression=true' -a 'expandDescription=true'
zabbixctl -H zabbix.yourdomain.com get trigger -a 'search={"host":"syslog"}' -a 'expandExpression=true' -a 'searchWildcardsEnabled=true' -a 'selecthosts=extend'
zabbixctl -H zabbix.yourdomain.com get trigger -a 'triggerids=[14924]'
zabbixctl -H zabbix.yourdomain.com get trigger -a 'search={"host":"syslog"}' -a 'expandExpression=true'
zabbixctl -H zabbix.yourdomain.com get user
zabbixctl -H zabbix.yourdomain.com get host
zabbixctl -H zabbix.yourdomain.com get host -a 'search={"host":"syslog"}' -a 'searchWildcardsEnabled=true'
zabbixctl -H zabbix.yourdomain.com get alert -a 'time_from=1409611855' -a 'output=extend'
zabbixctl -H zabbix.yourdomain.com get itemprototype
zabbixctl -H zabbix.yourdomain.com get event -a 'time_from=1409611855' -a 'output=extend' -a 'selectRelatedObject=["description"]'
zabbixctl -H zabbix.yourdomain.com get alert -a 'time_from=1409611855' -a 'output=extend' -a 'selectHosts=["host"]'
zabbixctl -H zabbix.yourdomain.com get template -a 'output=["host"]' -a 'selectItems=extend' -a 'templateids=[10167]'
```
## Known Issues
Error: ImportError: cannot import name to_native_string
Solution: Upgrade to at least version 2.0 of requests
| zabbixctl | /zabbixctl-1.1.6.tar.gz/zabbixctl-1.1.6/README.md | README.md |
# zabbixsim
Zabbix Agent Simulator (active)
The Zabbix Agent Simulator is intended to be used instead of the regular Zabbix agent in scenarios, where the use of the real Zabbix Agnet is not partical or desirable. This includes testing scenarios that are not possible on a live system.
This is works well with the snmpsim to provide simulation data for Zabbix.
<https://github.com/etingof/snmpsim>
<https://pypi.org/project/snmpsim/>
The simulator is based on the protocol documented in the Zabbbix manual.
<https://www.zabbix.com/documentation/current/manual/appendix/items/activepassive>
## Installation
```bash
pip install zabbixsim
```
## Usage
### Record simulation file
Record a simulation file from a Zabbix Server
```bash
zabbixrec
```
### Run simulation file
Run a simulation file with a Zabbix Server
```bash
zabbixsim
```
Copyright (c) 2021, [Adam Leggo](mailto:[email protected]). All rights reserved.
| zabbixsim | /zabbixsim-0.1.0.tar.gz/zabbixsim-0.1.0/README.md | README.md |
from typing import Any, Dict, Union
import json
from .exceptions import ApiError
from .servers import entrypoint, DEFAULT_HEADERS
########################################################################
## Constants
KEY = r'[a-z0-9A-Z-_.]+'
VALUE = r'[a-z0-9A-Z-_.]+'
EQUAL_EXPR = rf'^({KEY})\s*([=!]?=)\s*({VALUE})$'
SET_EXPR = rf'^({KEY})\s+(in|notin)\s+\(({VALUE}(\s*,\s*{VALUE})*)\)$'
EXISTS_EXPR = rf'^{KEY}$'
########################################################################
## Interfaces
## Fabric
class ApiService:
"""Abstract Api Service Wrapper."""
class Controller:
"""Abstract Controller Wrapper."""
class Image:
"""Abstract Image Wrapper.
Provides a minimal set of features an image must provide:
- constructor (`__init__`)
- a `run()` method
Implementing classes must have a constructor with the following
signature:
```python
def __init__(self):
...
```
The `run()` method takes any number of parameters. It represents
the image activity.
"""
## Core
class Manager:
"""Abstract Manager Wrapper.
A simple marker for manager classes.
# Properties
| Property name | Description | Default implementation? |
| ------------- | -------------------- | ----------------------- |
| `platform` | The platform the
manager is part of. | Yes (read/write) |
"""
_platform: Any
@property
def platform(self) -> Any:
"""Return the Platform the manager is attached to."""
return self._platform
@platform.setter
def platform(self, value: Any) -> None:
"""Set the Platform the manager is attached to."""
# pylint: disable=attribute-defined-outside-init
self._platform = value
class ManagedProjectDefinition(Dict[str, Any]):
"""Managed Project Definition.
Provides a simple wrapper for _managed projects definitions_.
Managed projects definitions are JSON files (handled as dictionaries
in Python).
The _ManagedProjectDefinition_ helper class inherits from `dict`,
and provides a single class method, `from_dict()`.
"""
@classmethod
def from_dict(cls, source: Dict[str, Any]) -> 'ManagedProjectDefinition':
"""Convert a dictionary to a _ManagedProjectDefinition_ object.
# Required parameters
- source: a dictionary
Should a platform implementation provide its own wrapper, it
will most likely have to override this class method.
"""
definition = cls()
for key in source:
definition[key] = source[key]
return definition
class ManagedAccount(Dict[str, Any]):
"""Managed Account.
Provides a simple wrapper for _managed accounts_.
Managed accounts are object describing realm accounts (users,
technical users, readers, admins, ...).
Realm implementations may decide to provide their own wrapper, to
help manage managed accounts.
A managed account is attached to a realm.
The _ManagedAccount_ helper class inherits from dict, and provides a
single class method, `from_dict()`.
"""
@classmethod
def from_dict(cls, source: Dict[str, Any]) -> 'ManagedAccount':
"""Convert a dictionary to a _ManagedAccount_ object.
# Required parameters
- source: a dictionary
Should a platform implementation provide its own wrapper, it
will most likely have to override this class method.
"""
definition = cls()
for key in source:
definition[key] = source[key]
return definition
def _read_server_params(args, host, port):
host = args[args.index('--host') + 1] if '--host' in args else host
port = int(args[args.index('--port') + 1]) if '--port' in args else port
return host, port
class BaseService(Image):
"""Abstract Service Wrapper.
Provides a minimal set of features a service must provide.
- accessors for name and platform
_BaseService_ instances are expected to expose some entrypoints and
make them available through a web server.
This class provides a default implementation of such a server and
exposes the defined entrypoints.
Its `run()` that takes any number of string arguments. It starts a
web server on the host and port provided via `--host` and `--port`
arguments, or, if not specified, via the `host` and `port` instance
attributes, or `localhost` on port 8080 if none of the above are
available:
```python
# Explicit host and port
foo.run('--host', '0.0.0.0', '--port', '80')
# Explicit host, default port (8080)
foo.run('--host', '192.168.12.34')
# Host specified for the object, default port (8080)
foo.host = '10.0.0.1'
foo.run()
# Default host and port (localhost:8080)
foo.run()
```
The exposed entrypoints are those defined on all instance members.
The entrypoint definitions are inherited (i.e., you don't have to
redefine them if they are already defined).
```python
class Foo(BasicService):
@entrypoint('/foo/bar')
def get_bar():
...
class FooBar(Foo):
def get_bar():
return 'foobar.get_bar'
FooBar().run() # curl localhost:8080/foo/bar -> foobar.get_bar
```
**Note**: You can redefine the entrypoint attached to a method.
Simply add a new `@entrypoint` decorator to the method. And, if
you want to disable the entrypoint, use `[]` as the path.
**Note**: The web server is implemented using Bottle. If you prefer
or need to use another wsgi server, simple override the `run()`
method in your class. Your class will then have no dependency on
Bottle.
# Properties
| Property name | Description | Default implementation? |
| ------------- | -------------------- | ----------------------- |
| `metadata` | The service metadata
(a dictionary). | Yes (read/write) |
| `name` | The service name. | Yes (read/write) |
| `platform` | The platform the
service is part of. | Yes (read/write) |
# Declared Methods
| Method name | Default implementation? |
| ------------------------- | ----------------------- |
| #ensure_authn() | No |
| #ensure_authz() | No |
| #run() | Yes |
Unimplemented features will raise a _NotImplementedError_ exception.
Some features provide default implementation, but those default
implementations may not be very efficient.
"""
_metadata: Dict[str, Any]
@property
def metadata(self) -> Any:
"""Return the service metadata."""
return self._metadata
@metadata.setter
def metadata(self, value: Dict[str, Any]) -> None:
"""Set the service metadata."""
self._metadata = value
@property
def name(self) -> str:
"""Return the service name.
This value is defined in the platform definition.
On a platform, all services have a unique name, so this may be
used to disambiguate services of the same type in logging
functions.
# Returned value
A string.
# Raised exceptions
An _ApiError_ exception is raised if the service does not have
a name.
"""
result = self.metadata.get('name')
if result is None:
raise ApiError('No service_name defined.')
return result # type: ignore
@name.setter
def name(self, value: str) -> None:
"""Set the service name."""
# pylint: disable=attribute-defined-outside-init
self.metadata['name'] = value
_platform: Any
@property
def platform(self) -> Any:
"""Return the platform the service is attached to."""
return self._platform
@platform.setter
def platform(self, value: Any) -> None:
"""Set the platform the service is attached to."""
# pylint: disable=attribute-defined-outside-init
self._platform = value
def ensure_authn(self) -> str:
"""Ensure the incoming request is authenticated.
This method is abstract and should be implemented by the
concrete service class.
# Returned value
A string, the subject identity.
# Raised exceptions
Raises a _ValueError_ exception if the incoming request is not
authenticated. The ValueError argument is expected to be a
_status_ object with an `Unauthorized` reason.
"""
raise NotImplementedError
def ensure_authz(self, sub) -> None:
"""Ensure the incoming request is authorized.
This method is abstract and should be implemented by the
concrete service class.
# Required parameters
- sub: a string, the subject identity
# Returned value
None.
# Raised exception
Raises a _ValueError_ exception if the subject is not allowed
to perform the operation. The ValueError argument is expected
to be a _status_ object with a `Forbidden` reason.
"""
raise NotImplementedError
def run(self, *args):
"""Start a bottle app for instance.
Routes that requires RBAC will call #ensure_authn()
and #ensure_authz().
# Optional parameters
- *args: strings. See class definition for more details.
# Returned value
If the server thread dies, returns the exception. Does not
return otherwise.
"""
# pylint: disable=import-outside-toplevel
from bottle import Bottle, request, response
def wrap(handler, rbac: bool):
def inner(*args, **kwargs):
for header, value in DEFAULT_HEADERS.items():
response.headers[header] = value
if rbac:
try:
self.ensure_authz(self.ensure_authn())
except ValueError as err:
resp = err.args[0]
response.status = resp['code']
return resp
if request.json:
kwargs['body'] = request.json
try:
result = json.dumps(handler(*args, **kwargs))
return result
except ValueError as err:
resp = err.args[0]
response.status = resp['code']
return resp
return inner
if not hasattr(self, 'port'):
# pylint: disable=attribute-defined-outside-init
self.port = 8080
if not hasattr(self, 'localhost'):
# pylint: disable=attribute-defined-outside-init
self.host = 'localhost'
# pylint: disable=attribute-defined-outside-init
self.app = Bottle()
for name in dir(self):
method = getattr(self, name, None)
if method:
# The 'entrypoint routes' attr may be on a super method
sms = [getattr(c, name, None) for c in self.__class__.mro()]
eps = [getattr(m, 'entrypoint routes', None) for m in sms]
for route in next((routes for routes in eps if routes), []):
self.app.route(
path=route['path'].replace('{', '<').replace('}', '>'),
method=route['methods'],
callback=wrap(method, route['rbac']),
)
host, port = _read_server_params(args, host=self.host, port=self.port)
try:
self.app.run(host=host, port=port)
except Exception as err:
return err
class Utility(BaseService):
"""Abstract Shared Service Wrapper.
This class extends #BaseService and is abstract. It declares a
minimal set of features a utility (a shared service) must provide,
in addition to the #BaseService ones.
"""
class ManagedService(BaseService):
"""Abstract Managed Service Wrapper.
This class extends #BaseService and is abstract. It declares a
minimal set of features a managed service must provide, in addition
to the #BaseService ones:
- canonical user names management
- members getters
- project push and pull
# Added Methods
| Method name | Default implementation? | Exposed? |
| -------------------------- | ------------------------| -------- |
| #get_canonical_member_id() | No | No |
| #get_internal_member_id() | No | No |
| #list_members() | No | Yes |
| #get_member() | No | Yes |
| #push_project() | No | Yes |
| #push_users() | No | Yes |
| #pull_project() | No | Yes |
| #pull_users() | No | Yes |
Unimplemented features will raise a _NotImplementedError_
exception.
"""
def get_canonical_member_id(self, user: Any) -> str:
"""Return the canonical member ID.
# Required parameters
- user: a service-specific user representation
`user` is the service internal user representation. It may be
a service-specific object or class.
# Returned value
A string.
"""
raise NotImplementedError
def get_internal_member_id(self, member_id: str) -> Union[str, int]:
"""Return the internal name.
# Required parameters
- member_id: a string
`member_id` is the canonical member ID.
# Returned value
A string or an integer, depending on the service internals.
"""
raise NotImplementedError
@entrypoint('/v1/members')
def list_members(self) -> Dict[str, Any]:
"""Return the members on the service.
# Returned values
A dictionary. The keys are the canonical IDs and the values are
the representations of a user for the service.
"""
raise NotImplementedError
@entrypoint('/v1/members/{member_id}')
def get_member(self, member_id: str) -> Any:
"""Return details on user.
# Required parameters
- member_id: a string
`member_id` is the canonical member ID.
# Returned value
The representation of the user for the service, which is
service-specific.
"""
raise NotImplementedError
@entrypoint('/v1/managedprojects/{project}', methods=['PUT'])
def push_project(self, project: str) -> None:
"""Push (aka publish) managed project on service.
Members defined for the project are not pushed on service. Use
#push_users() for that purpose.
# Required parameters
- project: a managed project definition name
# Raised exceptions
Raises an exception if the managed project is not successfully
pushed.
"""
raise NotImplementedError
@entrypoint('/v1/managedprojects/{project}/members', methods=['PUT'])
def push_users(self, project: str) -> None:
"""Push (aka publish) managed project users on service.
It assumes the project has been previously successfully pushed.
It may fail otherwise.
# Required parameters
- project: a managed project definition name
It assumes the project has been previously successfully pushed
on the service.
# Raised exception
Raises an exception if the managed project users are not
successfully pushed.
"""
raise NotImplementedError
@entrypoint('/v1/managedprojects/{project}', methods=['GET'])
def pull_project(self, project: str) -> Any:
"""Pull (aka extract) managed project users on service.
# Required parameters
- project: a managed project definition name
"""
raise NotImplementedError
@entrypoint('/v1/managedprojects/{project}/members', methods=['GET'])
def pull_users(self, project: str) -> Any:
"""Pull (aka extract) managed project definition on service.
# Required parameters
- project: a managed project definition name
"""
raise NotImplementedError | zabel-commons | /zabel_commons-1.7.0-py3-none-any.whl/zabel/commons/interfaces.py | interfaces.py |
from typing import Any, Dict, Optional
import requests
from requests.utils import cookiejar_from_dict
class PersistentSession:
"""Persistent Sessions
Persistent sessions are not opened at initialization time.
They must be called at least once to open.
"""
def __init__(
self,
auth: Any,
cookies: Optional[Dict[str, str]] = None,
verify: bool = True,
) -> None:
"""Initialize a new requests.Session object.
# Required parameters
- auth: an object
# Optional parameters
- cookies: a dictionary or None (None by default)
- verify: a boolean (True by default)
"""
self.auth = auth
self.cookies = cookies
self.verify = verify
self.session: Optional[requests.Session] = None
def __call__(self) -> requests.Session:
if self.session is None:
self.session = requests.Session()
self.session.auth = self.auth
if self.cookies:
self.session.cookies = cookiejar_from_dict(
self.cookies, self.session.cookies
)
self.session.verify = self.verify
return self.session
def prepare_session(
auth: Any, cookies: Optional[Dict[str, str]] = None, verify: bool = True
) -> PersistentSession:
"""Return a new Persistent Session object.
# Required parameters
- auth: an object
# Optional parameters
- cookies: a dictionary or None (None by default)
- verify: a boolean (True by default)
# Returned value
A new #PersistentSession instance.
# Sample usage
```python
>>> from commons.sessions import prepare_session
>>>
>>> session = prepare_session('token')
>>> # The HTTP session doesn't exist yet.
>>> session().get('http://example.com/foo')
>>> # The HTTP session is now open
>>> session().put('http://example.com/foo', data='bar')
>>> # The same HTTP session was used
```
"""
return PersistentSession(auth, cookies, verify) | zabel-commons | /zabel_commons-1.7.0-py3-none-any.whl/zabel/commons/sessions.py | sessions.py |
from typing import (
Any,
Callable,
Dict,
Iterable,
Mapping,
MutableMapping,
Optional,
Tuple,
TypeVar,
Union,
)
from functools import wraps
import inspect
import requests
from .exceptions import ApiError
__all__ = [
'api_call',
'xml_to_dict',
'dict_to_xml',
'add_if_specified',
'join_url',
'patch',
'ensure_instance',
'ensure_noneorinstance',
'ensure_nonemptystring',
'ensure_noneornonemptystring',
'ensure_onlyone',
'ensure_in',
'BearerAuth',
]
########################################################################
########################################################################
# decorators
FuncT = TypeVar('FuncT', bound=Callable[..., Any])
def api_call(function: FuncT) -> FuncT:
"""Decorate function so that failed API calls raise _ApiError_.
If `function` returns a _Response_ object, its JSON content
(possibly None if empty) is returned if the status code is in the
2xx range. An _ApiError_ is raised otherwise.
If `function` does not return a _Response_ object, its returned
value is passed unchanged.
_requests.RequestException_ are wrapped in an _ApiError_ exception.
_ApiError_ and _ValueError_ are not nested, but other exceptions
raised by `function` will be wrapped in an _ApiError_ exception.
# Sample use
```python
@api_call
def foo(self, a, b, c):
return 42
```
"""
@wraps(function)
def _inner(*args: Any, **kwargs: Any) -> Any:
try:
response = function(*args, **kwargs)
if isinstance(response, requests.Response):
if response.status_code // 100 == 2:
return None if response.text == '' else response.json()
raise ApiError(response.text)
return response
except requests.exceptions.RequestException as err:
raise ApiError(f'{err.__class__.__name__}: {err}') from err
except ValueError:
raise
except ApiError:
raise
except Exception as err:
raise ApiError(f'{err.__class__.__name__}: {err}') from err
return _inner # type: ignore
def api_client(cls):
"""Decorate class methods with api_call.
Does not wraps class methods and static methods.
"""
for name, val in vars(cls).items():
if callable(val):
setattr(cls, name, api_call(val))
return cls
########################################################################
########################################################################
# XML helpers
def xml_to_dict(xml: Any) -> Dict[str, Any]:
"""Convert an XML document to a corresponding dictionary.
!!! important
There should be no `'element text'` tag in the XML document.
"""
dct = {xml.tag: [xml_to_dict(x) for x in list(xml)]}
dct.update(('@%s' % key, val) for key, val in xml.attrib.items())
dct['element text'] = xml.text
return dct
def dict_to_xml(dct: Mapping[str, Any]) -> str:
"""Convert a dictionary to a corresponding XML string.
`dct` should follow the conventions of #xml_to_dict(): attributes
are prefixed with an `'@'` symbol, which will be removed in the
generated string.
KLUDGE: recreate an XML tree, and convert it to a string. Right now,
it does not support CDATA.
"""
tag = [k for k in dct if k != 'element text' and k[0] != '@'][0]
return '\n<{tag}{attrs}>{body}{text}</{tag}>'.format(
tag=tag,
attrs=''.join(
[' %s="%s"' % (k[1:], v) for k, v in dct.items() if k[0] == '@']
),
body=''.join(dict_to_xml(dd) for dd in dct[tag]),
text=(dct['element text'] or '') if 'element text' in dct else '',
)
########################################################################
########################################################################
# misc. helpers
def patch(
destination: Dict[str, Any], changes: Dict[str, Any]
) -> Dict[str, Any]:
"""Deep-merge two dictionaries.
# Required parameters
- `destination`: a dictionary
- `changes`: a dictionary
Overwrites entries in `destination` with values in `changes`. In
other words, `changes` is a sparse dictionary, and its entries will
overwrite existing ones in `destination`. Entries in `destination`
that are not in `changes` will be preserved as-is.
# Returned value
The patched dictionary.
```python
>>> a = {'first': {'all_rows': {'pass': 'dog', 'number': '1'}}}
>>> b = {'first': {'all_rows': {'fail': 'cat', 'number': '5'}}}
>>> patch(a, b) == {'first': {'all_rows': {'pass': 'dog',
>>> 'fail': 'cat',
>>> 'number': '5'}}}
```
"""
for key, value in changes.items():
if isinstance(value, dict):
# get node or create one
node = destination.setdefault(key, {})
patch(node, value)
else:
destination[key] = value
return destination
def add_if_specified(
dct: MutableMapping[str, Any], key: str, val: Any
) -> None:
"""Add a key:value pair to dictionary if value is not None.
# Required parameters
- dct: a dictionary
- key: a string
- val: anything
# Returned value
None.
"""
if val is not None:
dct[key] = val
def join_url(lhs: str, rhs: str) -> str:
"""Join two parts to make an URL.
It does not try to interpret the URL. In particular, it differs
from `urllib.path.urljoin` in that:
```python
>>> join_url('https://example.com/foo', 'bar')
'https://example.com/foo/bar'
>>> urljoin('https://example.com/foo', 'bar')
'https://example.com/bar'
```
"""
return lhs.rstrip('/') + '/' + rhs.lstrip('/')
########################################################################
########################################################################
# parameters checks
def _isnoneorinstance(
val: Optional[type], typ: Union[type, Tuple[type, ...]]
) -> bool:
"""Return True if val is either None or an instance of class typ.
`typ` can be a type or a tupple of types.
"""
return val is None or isinstance(val, typ)
def _isnonemptystring(val: Any) -> bool:
"""Return True if val is a non-empty string."""
return isinstance(val, str) and len(val) > 0
def _isnoneornonemptystring(val: Any) -> bool:
"""Return True if val is either None or a non-empty string."""
return val is None or _isnonemptystring(val)
def _getlocal(val: Optional[Any], name: str) -> Any:
if val is None:
raise SystemError('No previous frame, should not happen, aborting.')
return val.f_back.f_locals[name]
# assertions
def _describe(typ: Union[type, Tuple[type, ...]]) -> str:
"""Return a human-friendly description of typ.
`typ` may be a type or a tuple of types.
"""
if isinstance(typ, tuple):
return (', '.join(i.__name__ for i in typ[1:])) + (
', or %s' % typ[0].__name__
)
return typ.__name__
def ensure_instance(name: str, typ: Union[type, Tuple[type, ...]]) -> None:
"""Ensure name is an instance of typ.
# Required parameters
- name: a string, the name of the local variable to check
- typ: a type or a tuple of types
# Raised exceptions
Raise _ValueError_ if the condition is not satisfied.
"""
ensure_nonemptystring('name')
val = _getlocal(inspect.currentframe(), name)
if not isinstance(val, typ):
raise ValueError('%s must be of type %s.' % (name, _describe(typ)))
def ensure_noneorinstance(
name: str, typ: Union[type, Tuple[type, ...]]
) -> None:
"""Ensure name is either None or of classe typ.
# Required parameters
- name: a string, the name of the local variable to check
- typ: a type or a tuple of types
# Raised exceptions
Raise _ValueError_ if the condition is not satisfied.
"""
ensure_nonemptystring('name')
val = _getlocal(inspect.currentframe(), name)
if not _isnoneorinstance(val, typ):
raise ValueError(
'%s must be either None or of type %s.' % (name, _describe(typ))
)
def ensure_nonemptystring(name: str) -> None:
"""Ensure name is a non-empty string.
# Required parameters
- name: a string, the name of the local variable to check
# Raised exceptions
Raise _ValueError_ if the condition is not satisfied.
"""
# we have to check parameter validity, but a recursive call won't do
if not _isnonemptystring(name):
raise ValueError('Parameter \'name\' must be a string.')
val = _getlocal(inspect.currentframe(), name)
if not _isnonemptystring(val):
raise ValueError(f'{name} must be a non-empty string.')
def ensure_noneornonemptystring(name: str) -> None:
"""Ensure name is either None or a non-empty string.
# Required parameters
- name: a string, the name of the local variable to check
# Raised exceptions
Raise _ValueError_ if the condition is not satisfied.
"""
ensure_nonemptystring('name')
val = _getlocal(inspect.currentframe(), name)
if not _isnoneornonemptystring(val):
raise ValueError(f'{name} must be a non-empty string if specified.')
def ensure_onlyone(*names: str) -> None:
"""Ensure one and only one name is not None.
# Required parameters
- names: a non-empty tuple of non-empty string, the names of the
local variables to check
# Raised exceptions
Raise _ValueError_ if the condition is not satisfied.
"""
ensure_instance('names', tuple)
current_frame = inspect.currentframe()
existing = [_getlocal(current_frame, name) for name in names]
if existing.count(None) != len(existing) - 1:
bad = [
name for name, exist in zip(names, existing) if exist is not None
]
raise ValueError(
'Was expecting only one of %s, got %s.' % (list(names), list(bad))
)
def ensure_in(name: str, values: Iterable[str]) -> None:
"""Ensure name value is in values.
# Required parameters
- name: a string, the name of a local variable to check
- values: a list of strings
# Raised exceptions
Raise _ValueError_ if the condition is not satisfied.
"""
ensure_nonemptystring('name')
val = _getlocal(inspect.currentframe(), name)
if val not in values:
raise ValueError(
'%s not an allowed value, expecting one of %s.'
% (val, ', '.join(values))
)
class BearerAuth(requests.auth.AuthBase):
"""A Bearer handler class for requests."""
def __init__(self, pat: str):
self.pat = pat
def __eq__(self, other):
return self.pat == getattr(other, 'pat', None)
def __ne__(self, other):
return not self == other
def __call__(self, r):
r.headers['Authorization'] = f'Bearer {self.pat}'
return r | zabel-commons | /zabel_commons-1.7.0-py3-none-any.whl/zabel/commons/utils.py | utils.py |
from typing import Any, Dict, List, Optional, Union
########################################################################
########################################################################
# Security Headers
DEFAULT_HEADERS = {
'Content-Type': 'application/json',
'Strict-Transport-Security': 'max-age=31536000; includeSubDomains',
'X-Frame-Options': 'SAMEORIGIN',
'X-Content-Type-Options': 'nosniff',
'Referrer-Policy': 'no-referrer',
'Content-Security-Policy': 'default-src \'none\'',
}
# API Server Helpers
REASON_STATUS = {
'OK': 200,
'Created': 201,
'NoContent': 204,
'BadRequest': 400,
'Unauthorized': 401,
'PaymentRequired': 402,
'Forbidden': 403,
'NotFound': 404,
'AlreadyExists': 409,
'Conflict': 409,
'Invalid': 422,
}
def make_status(
reason: str, message: str, details: Optional[Dict[str, Any]] = None
) -> Dict[str, Any]:
"""Return a new status object.
# Required parameters
- reason: a non-empty string (must exist in `REASON_STATUS`)
- message: a string
# Optional parameters:
- details: a dictinnary or None (None by default)
# Returned value
A _status_. A status is a dictionary with the following entries:
- kind: a string (`'Status'`)
- apiVersion: a string (`'v1'`)
- metadata: an empty dictionary
- status: a string (either `'Success'` or `'Failure'`)
- message: a string (`message`)
- reason: a string (`reason`)
- details: a dictionary or None (`details`)
- code: an integer (derived from `reason`)
"""
code = REASON_STATUS[reason]
return {
'kind': 'Status',
'apiVersion': 'v1',
'metadata': {},
'status': 'Success' if code // 100 == 2 else 'Failure',
'message': message,
'reason': reason,
'details': details,
'code': code,
}
def make_items(kind: str, what: List[Dict[str, Any]]) -> Dict[str, Any]:
"""Return list object.
# Required parameters
- kind: a non-empty string
- what: a list of dictionaries
# Returned value
A _list_. A list is a dictionary with the following entries:
- kind: a string
- apiVersion: a string (`'v1'`)
- items: a list of dictionaries (`what`)
"""
return {'apiVersion': 'v1', 'kind': f'{kind}List', 'items': what}
# Decorators
DEFAULT_METHODS = {
'list': ['GET'],
'get': ['GET'],
'create': ['POST'],
'update': ['PUT'],
'delete': ['DELETE'],
'patch': ['PATCH'],
}
ATTR_NAME = 'entrypoint routes'
def entrypoint(
path: Union[str, List[str]],
methods: Optional[List[str]] = None,
rbac: bool = True,
):
"""Decorate a function so that it is exposed as an entrypoint.
If the function it decorates does not have a 'standard' name,
or if its name does not start with a 'standard' prefix, `methods`
must be specified.
`path` may contain _placeholders_, that will be mapped to function
parameters at call time:
```python
@entrypoint('/foo/{bar}/baz/{foobar}')
def get(self, bar, foobar):
pass
@entrypoint('/foo1')
@entrypoint('/foo2')
def list():
pass
@entrypoint(['/bar', '/baz'])
def list():
pass
```
Possible values for strings in `methods` are: `'GET'`, `'POST'`,
`'PUT'`, `'DELETE'`, `'PATCH'`, and `'OPTIONS'`.
The corresponding 'standard' names are `'list'` and `'get'`,
`'create'`, `'update'`, `'delete'`, and `'patch'`. There is no
'standard' name for the `'OPTIONS'` method.
'Standard' prefixes are standard names followed by `'_'`, such
as `'list_foo'`.
Decorated functions will have an `entrypoint routes` attribute
added, which will contain a list of a dictionary with the following
entries:
- path: a non-empty string or a list of non-empty strings
- methods: a list of strings
- rbac: a boolean
The decorated functions are otherwise unmodified.
There can be as many entrypoint decorators as required for a
function.
# Required parameters
- path: a non-empty string or a list of non-empty strings
# Optional parameters
- methods: a list of strings or None (None by default).
- rbac: a boolean (True by default).
# Raised exceptions
A _ValueError_ exception is raised if the wrapped function does not
have a standard entrypoint name and `methods` is not specified.
A _ValueError_ exception is raised if `methods` is specified and
contains unexpected values (must be a standard HTTP verb).
"""
def inner(f):
for prefix, words in DEFAULT_METHODS.items():
if f.__name__ == prefix or f.__name__.startswith(f'{prefix}_'):
_methods = words
break
else:
_methods = None
if _methods is None and methods is None:
raise ValueError(
f"Nonstandard entrypoint '{f.__name__}', 'methods' parameter required."
)
setattr(
f,
ATTR_NAME,
getattr(f, ATTR_NAME, [])
+ [
{'path': p, 'methods': methods or _methods, 'rbac': rbac}
for p in paths
],
)
return f
paths = [path] if isinstance(path, str) else path
return inner | zabel-commons | /zabel_commons-1.7.0-py3-none-any.whl/zabel/commons/servers.py | servers.py |
from typing import Any, Dict, Iterable, List
from time import sleep, time
import re
from zabel.commons.exceptions import ApiError
from zabel.commons.utils import (
api_call,
ensure_instance,
ensure_nonemptystring,
join_url,
)
from .base.jenkins import CloudBeesJenkins as Base
CREDENTIAL_UP_CONFIG_TEMPLATE = '''<?xml version="1.0" encoding="UTF-8"?>
<com.cloudbees.plugins.credentials.impl.UsernamePasswordCredentialsImpl
plugin="credentials">
<id>{id}</id>
<description>{description}</description>
<username>{username}</username>
<password>{password}</password>
</com.cloudbees.plugins.credentials.impl.UsernamePasswordCredentialsImpl>
'''
CREDENTIAL_AC_CONFIG_TEMPLATE = '''<?xml version="1.0" encoding="UTF-8"?>
<com.cloudbees.jenkins.plugins.awscredentials.AWSCredentialsImpl
plugin="aws-credentials">
<scope>GLOBAL</scope>
<id>{id}</id>
<description>{description}</description>
<accessKey>{accesskey}</accessKey>
<secretKey>{secretkey}</secretKey>
<iamRoleArn/>
<iamMfaSerialNumber/>
<!-- EXTRA -->
</com.cloudbees.jenkins.plugins.awscredentials.AWSCredentialsImpl>
'''
CREDENTIAL_ST_CONFIG_TEMPLATE = '''<?xml version="1.0" encoding="UTF-8"?>
<org.jenkinsci.plugins.plaincredentials.impl.StringCredentialsImpl
plugin="plain-credentials">
<id>{id}</id>
<description>{description}</description>
<secret>{text}</secret>
</org.jenkinsci.plugins.plaincredentials.impl.StringCredentialsImpl>
'''
# curl -X POST \
# https://jenkins.local/job/TEAM-FOLDER/credentials/store/folder/domain/
# _/createCredentials \
# -F secret=@/Users/maksym/secret \
# -F 'json={"": "4",
# "credentials": {
# "file": "secret",
# "id": "test",
# "description": "HELLO-curl",
# "stapler-class":
# "org.jenkinsci.plugins.plaincredentials.impl.FileCredentialsImpl",
# "$class":
# "org.jenkinsci.plugins.plaincredentials.impl.FileCredentialsImpl"
# }
# }'
# >>> files = {'upload_file': open('file.txt','rb')}
# >>> values = {'DB': 'photcat', 'OUT': 'csv', 'SHORT': 'short'}
# >>>
# >>> r = requests.post(url, files=files, data=values)
CREDENTIAL_FC_CONFIG_TEMPLATE = '''<?xml version="1.0" encoding="UTF-8"?>
<org.jenkinsci.plugins.plaincredentials.impl.FileCredentialsImpl
plugin="[email protected]">
<id>{id}</id>
<description>{description}</description>
<fileName>{filename}</fileName>
<secretBytes>
<secret-redacted/>
</secretBytes>
</org.jenkinsci.plugins.plaincredentials.impl.FileCredentialsImpl>
'''
PIPELINETRIGGERSJOBPROPERTY = (
'org.jenkinsci.plugins.workflow.job.properties.PipelineTriggersJobProperty'
)
class CloudBeesJenkins(Base):
"""CloudBeesJenkins Low-Level Wrapper.
There can be as many CloudBeesJenkins instances as needed.
This class depends on the public **requests** library. It also
depends on three **zabel-commons** modules, #::commons.exceptions,
#::commons.sessions, and #::commons.utils.
# Description
Mostly a Jenkins low-level API wrapper, but taking into account
the presence of an Operations Center (i.e., when there are more than
one Jenkins master).
This class uses an Operations Center as its entry point.
There are three levels of APIs:
- the Operations Center level
- the managed master level
- anything below (project/subproject/...)
It is up to the caller to ensure inter-level consistency while
handling groups and roles.
Item creations and handling functions make use of two functions
provided by the #::commons.utils module,
#::commons.utils#dict_to_xml() and #::commons.utils#xml_to_dict().
Things to check: <https://github.com/cloudbees/jenkins-scripts>
# Implemented features
- buildinfos
- credentials
- domains
- folders
- groups
- items
- jobs
- managedmasters
- metrics
- plugins
- projects
- roles
- scripts
- users
- misc. operations (status, ping, version, ...)
# Sample use
```python
>>> from zabel.elements.clients import Jenkins
>>>
>>> url = 'https://pse.example.com'
>>> jenkins = Jenkins(url, user, token)
>>> jenkins.list_oc_managedmasters()
```
# Attributes
This class exposes templates that can be used while creating
domains and credentials. The credentials attributes all have
an `id` and `description` parameters.
| Attribute | Description |
| ------------------------------- | ------------------------------ |
| `DOMAIN_CONFIG_TEMPLATE` | A template for credentials
domains, with one parameter,
`domain`, that can be used when
calling `create_project_domain`
method. |
| `CREDENTIAL_CONFIG_TEMPLATES` | A dictionary of templates for
Jenkins credentials that can
be used when calling the
`create_domain_credential`
method. They have two common
parameters, `id` and
`description`.<br/>
The following templates are
available:<br/>
- `'AC'`: AWS access key /
secret key credentials.
It has two parameters in
addition to `id` and
`description`, `accesskey` and
`secretkey`.<br/>
- `'FC'`: file credentials.
It has one parameter in
addition to `id` and
`description`, `filename`.<br/>
- `'ST'`: secret text
credentials.
It has one parameter in
addition to `id` and
`description`, `text`.<br/>
- `'UP'`: user / password
credentials.
It has two parameters in
addition to `id` and
`description`, `user` and
`password`. |
"""
@api_call
def update_item_crontab(self, url: str, crontab: str) -> None:
"""Update item crontab.
# Required parameters
- url: a non-empty string, the item url
- crontab: a possibly empty string, the new item crontab
"""
ensure_nonemptystring('url')
ensure_instance('crontab', str)
configuration = self.get_item_configuration(url)
properties = [
x['properties']
for x in configuration['flow-definition']
if 'properties' in x
][0]
triggers = [
x[PIPELINETRIGGERSJOBPROPERTY]
for x in properties
if PIPELINETRIGGERSJOBPROPERTY in x
][0]
triggers[0]['triggers'][0]['hudson.triggers.TimerTrigger'][0][
'element text'
] = crontab
self.update_item_configuration(url, configuration)
@api_call
def install_managedmaster_plugins(
self, managedmaster_url: str, plugins: Iterable[str]
) -> None:
"""Install the requested plugins.
# Required parameters
- managedmaster_url: a non-empty string
- plugins: a list of non-empty strings
Items in `plugins` are plugin short names.
# Returned value
Returns None.
"""
ensure_nonemptystring('managedmaster_url')
for plugin in plugins:
self.install_managedmaster_plugin(managedmaster_url, plugin)
@api_call
def install_managedmaster_plugin_fromfile(
self, managedmaster_url: str, plugin_url: str
) -> bool:
"""Install a specific plugin.
# Required parameters
- managedmaster_url: a non-empty string
- plugin_url: a non-empty string
# Returned value
A boolean. True if successful.
"""
ensure_nonemptystring('managedmaster_url')
ensure_nonemptystring('plugin_url')
with open(plugin_url, 'rb') as plugin:
files = {'plugin.hpi': plugin.read()}
return (
self._post(
join_url(
self.get_managedmaster_endpoint(managedmaster_url),
'pluginManager/uploadPlugin',
),
files=files,
).status_code
== 200
)
@api_call
def restart_managedmaster(
self, managedmaster_url: str, force: bool = False
) -> str:
"""Restart managed master.
Perform a safe restart unless the `force` parameter is set to
True.
# Required parameters
- managedmaster_url: a non-empty string
# Optional parameters
- force: a boolean (False by default)
"""
ensure_nonemptystring('managedmaster_url')
ensure_instance('force', bool)
call = 'restart' if force else 'doSafeRestart'
return self.run_managedmaster_script(
managedmaster_url, f'Jenkins.getInstance().{call}()'
)
@api_call
def await_managedmaster(
self, managedmaster_url: str, time_out: int = 120, path: str = ''
) -> None:
"""Awaits for managed master readiness.
# Required parameters
- managedmaster_url: a non-empty string
# Optional parameters
- time_out: an integer (120 by default)
- path: a string (empty by default)
`time_out` is in seconds.
It will check managed master readiness at least once, even if
`time_out` is set to 0.
If the managed master is not ready after `time_out` seconds,
an _ApiError_ ('Timeout exhausted, managed master not ready')
exception is raised.
"""
ensure_nonemptystring('managedmaster_url')
ensure_instance('time_out', int)
ensure_instance('path', str)
start = time()
while True:
try:
if self.ping_managedmaster(managedmaster_url, path):
return
except ApiError:
pass
if (time() - start) > time_out:
raise ApiError('Timeout exhausted, managed master not ready')
sleep(2)
####################################################################
# cbj managedmasters
#
# list_managedmaster_groups
@api_call
def list_managedmaster_groups(
self, managedmaster_url: str
) -> List[Dict[str, Any]]:
"""Return the list of groups for the specified managed master.
!!! important
Not using the managed master endpoint here, as it's not
what is used, at least with CloudBees Jenkins.
# Required parameters
- managedmaster_url: a non-empty string
# Returned value
A list of _groups_. Each group is a dictionary with the
following entries:
- description: a string
- name: a string
- url: a string
- members: a list of strings
- roles: a list of strings
- roleAssignments: a list of dictionaries
The returned groups are expanded.
"""
ensure_nonemptystring('managedmaster_url')
return self._get_json(
join_url(managedmaster_url, 'groups'), params={'depth': 1}
)['groups']
####################################################################
# users
#
# delete_user
@api_call
def delete_oc_user(self, user_id: str) -> None:
"""Delete an user in the cjoc
# Required parameters
- user_id: a non-empty string
"""
ensure_nonemptystring('user_id')
response = self._get(join_url(self.url, f'cjoc/user/{user_id}/delete'))
re_match = re.search(
r'data-crumb-value="(\w+)"',
response.text,
)
if not re_match:
raise ValueError(
f"Couldn't get required Jenkins-Crumb for user {user_id}"
)
jenkins_crumb = re_match.group(1)
self.delete_item(
join_url(self.url, f'cjoc/user/{user_id}'),
params={'Jenkins-Crumb': jenkins_crumb},
)
####################################################################
# credentials templates
DOMAIN_CONFIG_TEMPLATE = '''<?xml version="1.0" encoding="UTF-8"?>
<com.cloudbees.plugins.credentials.domains.Domain plugin="credentials">
<name>{domain}</name>
<specifications/>
</com.cloudbees.plugins.credentials.domains.Domain>'''
CREDENTIAL_CONFIG_TEMPLATES = {
'UP': CREDENTIAL_UP_CONFIG_TEMPLATE,
'AC': CREDENTIAL_AC_CONFIG_TEMPLATE,
'ST': CREDENTIAL_ST_CONFIG_TEMPLATE,
'CF': CREDENTIAL_FC_CONFIG_TEMPLATE,
} | zabel-elements | /zabel_elements-1.15.0-py3-none-any.whl/zabel/elements/clients/jenkins.py | jenkins.py |
from typing import Any, Dict, List, Union, Optional
from zabel.commons.utils import (
api_call,
ensure_instance,
ensure_noneornonemptystring,
)
from .base.confluence import Confluence as Base
class Confluence(Base):
"""Confluence Low-Level Wrapper.
There can be as many Confluence instances as needed.
This class depends on the public **requests** library. It also
depends on three **zabel-commons** modules,
#::zabel.commons.exceptions, #::zabel.commons.sessions,
and #::zabel.commons.utils.
# Reference URL
- <https://developer.atlassian.com/confdev/confluence-server-rest-api>
- <https://docs.atlassian.com/atlassian-confluence/REST/latest-server/>
- <https://developer.atlassian.com/server/confluence/remote-confluence-methods>
A non-admin interface (no API for user&group admin features) to
Confluence.
Groups and users are defined on Jira or Crowd. Changes can take up
to one hour to propagate.
# Implemented features
- groups&users
- pages
- search
- spaces
What is accessible through the API depends on account rights.
Whenever applicable, the provided features handle pagination (i.e.,
they return all relevant elements, not only the first n).
# Sample use
```python
>>> from zabel.elements.clients import Confluence
>>>
>>> url = 'https://confluence.example.com'
>>> confluence = Confluence(url, user, token)
>>> confluence.list_users()
```
"""
@api_call
def list_users(self) -> List[str]:
"""Return a list of confluence users.
# Returned value
A list of _users_. Each user is a string (the user 'username').
Users are not properly speaking managed by Confluence. The
returned list is the aggregation of group member, with no
duplication.
The 'jira-*' groups are ignored.
Handles pagination (i.e., it returns all group users, not only
the first n users).
"""
return list(
{
u['username']
for g in self.list_groups()
for u in self.list_group_members(g['name'])
if not g['name'].startswith('jira-')
}
)
@api_call
def update_page_content(
self,
page_id: Union[str, int],
content: str,
title: Optional[str] = None,
) -> Dict[str, Any]:
"""Change page content, creating a new version.
# Required parameters
- page_id: an integer or a string
- content: a string
The new version number is 1 plus the current version number.
# Optional parameters
- title: a non-empty string or None (None by default)
# Returned value
A dictionary. Refer to #create_page() for more information.
"""
ensure_instance('page_id', (str, int))
ensure_instance('content', str)
ensure_noneornonemptystring('title')
page = self.get_page(page_id)
if title:
page['title'] = title
page['body']['storage']['value'] = content
page['version'] = {'number': page['version']['number'] + 1}
return self.update_page(page_id, page) | zabel-elements | /zabel_elements-1.15.0-py3-none-any.whl/zabel/elements/clients/confluence.py | confluence.py |
from typing import Any, Dict, List, Optional
from .base.artifactory import Artifactory as Base
class Artifactory(Base):
"""Artifactory Low-Level Wrapper.
There can be as many Artifactory instances as needed.
This class depends on the public **requests** library. It also
depends on three **zabel.commons** modules,
#::zabel.commons.exceptions, #::zabel.commons.sessions,
and #::zabel.commons.utils.
# Reference URL
- <https://www.jfrog.com/confluence/display/RTF/Artifactory+REST+API>
# Implemented features
- users
- groups
- repositories
- permission
- storageinfo
- token
- ping
# Sample use
```python
>>> from zabel.elements.clients import Artifactory
>>>
>>> url = 'https://artifactory.example.com/artifactory/api/'
>>> af = Artifactory(url, user, token)
>>> af.get_users()
```
"""
def list_users_details(
self, users: Optional[List[str]] = None
) -> List[Dict[str, Any]]:
"""Return the users list, including details.
If `users` is not provided, will return the details for all
users.
# Optional parameters
- users: a list of strings (users names) or None (None by
default)
# Returned value
A list of _users_. Each user is a dictionary with the following
entries:
- admin: a boolean
- disableUIAccess: a boolean
- email: a string
- groups: a list of strings
- lastLoggedInMillis: an integer
- ?lastLoggedIn: a string representing a date
- name: a string
- offlineMode: a boolean
- profileUpdatable: a boolean
- realm: a string
# Raised exceptions
An _ApiError_ exception is raised if a user does not exist.
"""
return self._get_batch(
f'security/users/{u["name"]}'
for u in [{'name': n} for n in users or []] or self.list_users()
)
def list_permissions_details(
self, permissions: Optional[List[str]] = None
) -> List[Dict[str, Any]]:
"""Return the permission targets list, including details.
If `permissions` is not provided, will return the details for
all permission targets.
# Optional parameters
- permissions: a list of strings (permissions names) or None
(None by default)
# Returned value
A list of _permission targets_. A permission target is a
dictionary with the following entries:
- name: a string
- repositories: a list of strings
- includesPattern: a string
- excludesPattern: a string
- principals: a dictionary
# Raised exceptions
An _ApiError_ exception is raised if a permission does not
exist.
"""
return self._get_batch(
f'security/permissions/{p["name"]}'
for p in [{'name': n} for n in permissions or []]
or self.list_permissions()
) | zabel-elements | /zabel_elements-1.15.0-py3-none-any.whl/zabel/elements/clients/artifactory.py | artifactory.py |
from typing import Any, Dict, List, Optional
import base64
import csv
import time
from zabel.commons.exceptions import ApiError
from zabel.commons.utils import (
api_call,
ensure_instance,
ensure_nonemptystring,
ensure_noneorinstance,
ensure_noneornonemptystring,
join_url,
)
from .base.github import GitHub as Base
class GitHub(Base):
"""GitHub Low-Level Wrapper.
There can be as many GitHub instances as needed.
This class depends on the public **requests** library. It also
depends on three **zabel-commons** modules,
#::zabel.commons.exceptions, #::zabel.commons.sessions,
and #::zabel.commons.utils.
# Reference URLs
- <https://developer.github.com/v3/>
- <https://developer.github.com/enterprise/2.20/v3>
- <https://stackoverflow.com/questions/10625190>
# Implemented features
- hooks
- organizations
- repositories
- users
- misc. operations (version, staff reports & stats)
# Sample use
```python
>>> from zabel.elements.clients import GitHub
>>>
>>> # standard use
>>> url = 'https://github.example.com/api/v3/'
>>> gh = GitHub(url, user, token)
>>> gh.get_users()
>>> # enabling management features
>>> mngt = 'https://github.example.com/'
>>> gh = GitHub(url, user, token, mngt)
>>> gh.create_organization('my_organization', 'admin')
```
"""
####################################################################
# GitHub misc. operations
#
# get_staff_report
@api_call
def get_staff_report(self, report: str) -> List[List[str]]:
"""Return staff report.
# Required parameters
- report: a non-empty string
# Returned value
A list of lists, one entry per line in the report. All items in
the sublists are strings.
"""
ensure_nonemptystring('report')
if self.management_url is None:
raise ApiError('Management URL is not defined')
retry = True
while retry:
rep = self.session().get(join_url(self.management_url, report))
retry = rep.status_code == 202
if retry:
print('Sleeping...')
time.sleep(5)
what = list(csv.reader(rep.text.split('\n')[1:], delimiter=','))
if not what[-1]:
what = what[:-1]
return what
####################################################################
# GitHub branch
#
# create_branch_from_default
# delete_branch
@api_call
def create_branch_from_default(
self, organization_name: str, repository_name: str, branch: str
) -> Dict[str, Any]:
"""Create a branch from the head of the default branch.
# Required parameters
- organization_name: a non-empty string
- repository_name: a non-empty string
- branch: a non-empty string
# Returned value
A _reference_. A reference is a dictionary with the following
entries:
- ref: a string
- node_id: a string
- url: a string
- object: a dictionary
The `object` dictionary has the following entries:
- type: a string
- sha: a string
- url: a string
"""
ensure_nonemptystring('organization_name')
ensure_nonemptystring('repository_name')
ensure_nonemptystring('branch')
repo = self.get_repository(organization_name, repository_name)
default = self.get_repository_reference(
organization_name,
repository_name,
f'heads/{repo["default_branch"]}',
)
result = self.create_repository_reference(
organization_name,
repository_name,
f'refs/heads/{branch}',
default['object']['sha'],
)
return result
@api_call
def delete_branch(
self, organization_name: str, repository_name: str, branch: str
) -> None:
"""Delete a branch.
# Required parameters
- organization_name: a non-empty string
- repository_name: a non-empty string
- branch: a non-empty string
# Returned value
No content.
"""
ensure_nonemptystring('organization_name')
ensure_nonemptystring('repository_name')
ensure_nonemptystring('branch')
return self.delete_repository_reference(
organization_name, repository_name, f'refs/heads/{branch}'
)
####################################################################
# GitHub repository contents
#
# get_repository_textfile
# create_repository_textfile
# update_repository_textfile
def get_repository_textfile(
self,
organization_name: str,
repository_name: str,
path: str,
ref: Optional[str] = None,
) -> Any:
"""Return the text file content.
# Required parameters
- organization_name: a non-empty string
- repository_name: a non-empty string
- path: a string
# Optional parameters
- ref: a non-empty string or None (None by default)
# Returned value
A dictionary with the following entries:
- name: a string
- path: a string
- sha: a string
- size: an integer
- content: a string
- url, html_url, git_url, download_url: strings
- _links: a dictionary
"""
ensure_nonemptystring('organization_name')
ensure_nonemptystring('repository_name')
ensure_instance('path', str)
ensure_noneornonemptystring('ref')
result = self.get_repository_content(
organization_name, repository_name, path, ref
)
if result.get('encoding') != 'base64':
raise ApiError('Content not in base64')
if result.get('type') != 'file':
raise ApiError('Content is not a file')
result['content'] = str(base64.b64decode(result['content']), 'utf-8')
del result['encoding']
return result
def create_repository_textfile(
self,
organization_name: str,
repository_name: str,
path: str,
message: str,
content: str,
branch: Optional[str] = None,
committer: Optional[Dict[str, str]] = None,
author: Optional[Dict[str, str]] = None,
) -> Dict[str, Any]:
"""Create a new repository text file.
The created text file must not already exist. `content` is
expected to be an utf-8-encoded string.
# Required parameters
- organization_name: a non-empty string
- repository_name: a non-empty string
- path: a string
- message: a string
- content: a string
# Optional parameters
- branch: a string or None (None by default)
- committer: a dictionary or None (None by default)
- author: a dictionary or None (None by default)
# Returned value
A dictionary.
"""
ensure_nonemptystring('organization_name')
ensure_nonemptystring('repository_name')
ensure_instance('path', str)
ensure_instance('message', str)
ensure_instance('content', str)
ensure_noneornonemptystring('branch')
ensure_noneorinstance('committer', dict)
ensure_noneorinstance('author', dict)
return self.create_repository_file(
organization_name,
repository_name,
path,
message,
str(base64.b64encode(bytes(content, encoding='utf-8')), 'utf-8'),
branch,
committer,
author,
)
def update_repository_textfile(
self,
organization_name: str,
repository_name: str,
path: str,
message: str,
content: str,
sha: Optional[str] = None,
branch: Optional[str] = None,
committer: Optional[Dict[str, str]] = None,
author: Optional[Dict[str, str]] = None,
) -> Dict[str, Any]:
"""Update a repository text file.
The file must already exist on the repository. `content` is
expected to be an utf-8-encoded string.
You must specify at least `sha` or `branch` (you can specify
both).
# Required parameters
- organization_name: a non-empty string
- repository_name: a non-empty string
- path: a string
- message: a string
- content: a string
# Optional parameters
- sha: a non-empty string or None (None by default)
- branch: a string or None (None by default)
- committer: a dictionary or None (None by default)
- author: a dictionary or None (None by default)
# Returned value
A dictionary.
"""
ensure_nonemptystring('organization_name')
ensure_nonemptystring('repository_name')
ensure_instance('path', str)
ensure_instance('message', str)
ensure_instance('content', str)
ensure_noneornonemptystring('sha')
ensure_noneornonemptystring('branch')
ensure_noneorinstance('committer', dict)
ensure_noneorinstance('author', dict)
if sha is None and branch is None:
raise ValueError('You must specify at least one of: sha, branch.')
if sha is None:
file: Dict[str, str] = self.get_repository_content(
organization_name,
repository_name,
path,
ref=f'refs/heads/{branch}',
)
sha = file['sha']
return self.update_repository_file(
organization_name,
repository_name,
path,
message,
str(base64.b64encode(bytes(content, encoding='utf-8')), 'utf-8'),
sha,
branch,
committer,
author,
) | zabel-elements | /zabel_elements-1.15.0-py3-none-any.whl/zabel/elements/clients/github.py | github.py |
from typing import Any, IO, Union
from .base.kubernetes import Kubernetes as Base
class Kubernetes(Base):
"""Kubernetes Low-Level Wrapper.
!!! warning
Preliminary work. Not stable. May change at any time.
There can be as many _Kubernetes_ instances as needed.
This module depends on the public **kubernetes** library. It also
depends on one **zabel-commons** module, #::zabel.commons.utils.
# Reference URL
- <https://github.com/kubernetes-client/python>
# Implemented features
- namespaces
- resource quota
- create and patch from YAML manifests
# Sample use
Using the default context as defined in the `~/.kube/config`
configuration file:
```python
>>> from zabel.elements.clients import Kubernetes
>>>
>>> k8s = Kubernetes()
>>> namespaces = k8s.list_namespaces()
```
Using explicit configuration:
```python
>>> from zabel.elements.clients import Kubernetes
>>>
>>> K8S_URL = 'https://kubernetes.example.com'
>>> k8s = Kubernetes(
>>> config={
>>> 'url': K8S_URL,
>>> 'api_key': '...',
>>> 'verify': False,
>>> }
>>> )
>>> namespaces = k8s.list_namespaces()
```
"""
def create_from_yaml(
self,
source: Union[str, IO[str]],
namespace: str = 'default',
**kwargs: Any,
) -> None:
"""Create Kubernetes object(s) as defined in source stream.
If more than one object is specified in `source`, it will
attempt to create all objects. If one creation fails, the
remaining operations will be attempted nonetheless.
# Required parameters
- source: a non-empty string or an IO object
# Optional parameters
- namespace: a non-empty string (`'default'` by default)
- ...
Other keywords parameters can be specified. They will be passed
as-is to the Kubernetes API.
# Returned value
None.
# Raised exceptions
If at least one Kubernetes operation fails, a _KubernetesError_
exception is raised. Its `api_exceptions` attribute contain the
list of exceptions raised by the failed Kubernetes operations.
"""
from .kubehelpers import _map, _create_from_dict
_map(
_create_from_dict,
source,
self._get_apiclient(),
namespace=namespace,
**kwargs,
)
def apply_from_yaml(
self,
source: Union[str, IO[str]],
namespace: str = 'default',
**kwargs: Any,
) -> None:
"""Apply Kubernetes object(s) as defined in source stream.
If more than one object is specified in `source`, it will
attempt to apply all objects. If one apply fails, the
remaining operations will be attempted nonetheless.
# Required parameters
- source: a non-empty string or an IO object
# Optional parameters
- namespace: a non-empty string (`'default'` by default)
- ...
Other keywords parameters can be specified. They will be passed
as-is to the Kubernetes API.
# Returned value
None.
# Raised exceptions
If at least one Kubernetes operation fails, a _KubernetesError_
exception is raised. Its `api_exceptions` attribute contain the
list of exceptions raised by the failed Kubernetes operations.
"""
from .kubehelpers import _map, _apply_from_dict
_map(
_apply_from_dict,
source,
self._get_apiclient(),
namespace=namespace,
**kwargs,
) | zabel-elements | /zabel_elements-1.15.0-py3-none-any.whl/zabel/elements/clients/kubernetes.py | kubernetes.py |
from typing import Iterable, List, Dict, Any
import asyncio
from zabel.commons.exceptions import ApiError
from .base.okta import Okta as Base, OktaException
class Okta(Base):
"""Okta Low-Level Wrapper.
# Reference url
<https://developer.okta.com/docs/reference/api/groups/>
# Implemented features
- #add_users_to_group
- #remove_users_from_group
- #get_user_info
- #get_group_by_name
- #add_user_to_group
- #remove_user_from_group
# Sample use
(assuming an `okta` entry in your credentials that contains the
token api `token`)
```
>>> from zabel.elements.clients import Okta
>>> url = 'https://okta.example.com'
>>> okta = Okta(
>>> url,
>>> token
>>> )
>>> user = okta.get_user_info('JP5300')
```
"""
def add_users_to_group(self, group: str, users: Iterable[str]):
"""Add users to Okta group.
This method retrieve Okta groupId and userIds and after this
these users are added to group.
# Required parameters
- group: a non-empty string
- users: an list of strings
"""
okta_group = self.get_group_by_name(group)
okta_group_id = okta_group['id']
loop = asyncio.get_event_loop()
okta_users = loop.run_until_complete(
asyncio.gather(*[self._client().get_user(u) for u in users])
)
loop.run_until_complete(
asyncio.gather(
*[
self._client().add_user_to_group(
userId=u[0].id, groupId=okta_group_id
)
for u in okta_users
if u[0]
]
)
)
def remove_users_from_group(self, group: str, users: Iterable[str]):
"""Remove users from Okta group.
This method retrieve Okta groupId and userIds and after this
these users are removed from group.
# Required parameters
- group: a non-empty string
- users: an list of strings
"""
okta_group = self.get_group_by_name(group)
okta_group_id = okta_group['id']
for user in users:
try:
okta_user = self.get_user_info(user)
except OktaException as ex:
print(
f'Could not remove user {user} from group {group}, because : {str(ex)}'
)
continue
okta_user_id = okta_user['id']
try:
self.remove_user_from_group(okta_group_id, okta_user_id)
except ApiError:
print(f'Could not remove user {user} from group {group}')
def list_group_users(self, group_name) -> List[Dict[str, Any]]:
"""List users in Okta group.
Retrieve the Okta groupId and collecting users in group.
# Required parameters
- group_name: a non-empty string
# Raised exceptions
Raises an _ApiError_ exception if error is throw by Okta.
# Returned value
Return a list of users. Refer to #get_user_info() for more information.
"""
okta_group = self.get_group_by_name(group_name)
return self.list_users_by_group_id(okta_group['id'])
def list_user_groups(self, user_login: str):
"""List user groups by login
# Required parameters
- user_login: a non-empty string
# Raised exceptions
Raises an _ApiError_ exception if error is throw by Okta.
# Returned value
Return a list of groups. Refer to #get_group_by_name() for more information.
"""
try:
user = self.get_user_info(user_login)
return self.list_users_by_group_id(user['id'])
except OktaException as ex:
# just wrap the exception as the contract method
# says we can expect this.
raise ApiError(ex) | zabel-elements | /zabel_elements-1.15.0-py3-none-any.whl/zabel/elements/clients/okta.py | okta.py |
from typing import Any, Dict, Iterable, List, Optional, Union
from zabel.commons.exceptions import ApiError
from zabel.commons.utils import (
api_call,
ensure_instance,
ensure_nonemptystring,
)
from .base.jira import Jira as Base
class Jira(Base):
"""JIRA Low-Level Wrapper.
There can be as many Jira instances as needed.
This class depends on the public **requests** and **jira.JIRA**
libraries. It also depends on two **zabel-commons** modules,
#::zabel.commons.exceptions and #::zabel.commons.utils.
# Reference URLs
- <https://docs.atlassian.com/jira/REST/server/>
- <https://docs.atlassian.com/software/jira/docs/api/REST/8.7.1/>
# Agile references
- <https://docs.atlassian.com/jira-software/REST/8.7.1/>
# Using the jira.JIRA python library
<http://jira.readthedocs.io/en/latest/>
# Other interesting links
The various WADLS, such as:
- <https://jira.tools.digital.engie.com/rest/greenhopper/1.0/application.wadl>
- <https://jira.tools.digital.engie.com/rest/bitbucket/1.0/application.wadl>
# Implemented features
- boards
- groups
- issues
- issuetypeschemes
- issuetypescreenschemes
- notificationschemes
- permissionschemes
- projects
- screenschemes
- search
- sprints
- users
- workflowschemes
- misc. features (reindexing, plugins & server info)
Works with basic authentication as well as OAuth authentication.
It is the responsibility of the user to be sure the provided
authentication has enough rights to perform the requested operation.
# Expansion
The Jira REST API uses resource expansion. This means the API will
only return parts of the resource when explicitly requested.
Many query methods have an `expand` parameter, a comma-separated
list of entities that are to be expanded, identifying each of them
by name.
Here are the default values for the main Jira entities:
| Entity | Default value |
| ------------------------- | ------------------------------------ |
| PERMISSIONSCHEME_EXPAND | permissions, user, group,
projectRole, field, all |
| NOTIFICATIONSCHEME_EXPAND | notificationSchemeEvents,
user, group, projectRole, field, all |
| PROJECT_EXPAND | description, lead, url, projectKeys |
| USER_EXPAND | groups, applicationRoles |
To discover the identifiers for each entity, look at the `expand`
properties in the parent object. In the example below, the
resource declares _widgets_ as being expandable:
```json
{
"expand": "widgets",
"self": "http://www.example.com/jira/rest/api/resource/KEY-1",
"widgets": {
"widgets": [],
"size": 5
}
}
```
The dot notation allows to specify expansion of entities within
another entity. For example, `expand='widgets.fringels'` would
expand the widgets collection and also the _fringel_ property of
each widget.
# Sample use
```python
>>> from zabel.elements.clients import Jira
>>>
>>> url = 'https://jira.example.com'
>>> jc = Jira(url, basic_auth=(user, token))
>>> jc.get_users()
```
!!! note
Reuse the JIRA library whenever possible, but always returns
'raw' values (dictionaries, ..., not classes).
"""
@api_call
def get_ids_for_users(self, users_names: List[str]) -> Dict[str, Any]:
"""Return a dictionary of ID for users.
# Required parameters
- users_names: a list of strings
# Returned value
A dictionary. In the returned dictionary, keys are the items in
`users_names`, and the values are one of (1) the ID of the
corresponding user if it exists, (2) None if no users with that
name exist, or (3) ... (Ellipsis) if more than one user matches.
"""
def _user_id(
users: List[Dict[str, Any]], cnt: int
) -> Union[str, None, Ellipsis]:
return users[0]['name'] if cnt == 1 else None if cnt == 0 else ...
return {
n: _user_id(us, len(us))
for n, us in zip(
users_names, [self.search_users(name) for name in users_names]
)
}
@api_call
def get_ids_for_projectkeys(
self, keys: Iterable[str]
) -> Dict[str, Optional[str]]:
"""Return a dictionary of project names for keys.
# Required parameters
- keys: a list of strings
# Returned value
A dictionary. In the returned dictionary, keys are items in
`keys`, and the value is either the project name or None, if no
project with key exist.
"""
def _project_name(key: str) -> Optional[str]:
try:
return self.get_project(key)['name'] # type: ignore
except ApiError:
return None
return {key: _project_name(key) for key in keys}
@api_call
def get_ids_for_sprints(
self, sprints: List[str], project_key: str
) -> Dict[str, Optional[int]]:
"""Return a dictionary of sprint ids for projects.
# Required parameters
- sprints: a list of strings
- project_key: a string
# Returned value
A dictionary. In the returned dictionary, keys are items in
`sprints`, and the value is either the sprint ID or None.
"""
def _get_sprint_id(
name: str, sprints: List[Dict[str, Any]]
) -> Optional[int]:
for sprint in sprints:
if sprint['name'] == name:
return sprint['id'] # type: ignore
return None
all_sprints: List[Dict[str, Any]] = []
for board in self.list_project_boards(project_key):
all_sprints += self.list_board_sprints(board['id'])
return {s: _get_sprint_id(s, all_sprints) for s in sprints}
@api_call
def list_picker_users(
self,
servicedesk_id: str,
query: str,
field_name: str,
project_id: str,
fieldconfig_id: int,
_: int,
) -> List[Dict[str, Any]]:
"""Return list of users matching query hint.
Simulates /rest/servicedesk/{n}/customer/user-search.
# Required parameters
- servicedesk_id: a non-empty string
- query: a string
- field_name: a string
- project_id: a string
- fieldconfig_id: an integer
- _: an integer
# Returned value
A possibly empty list of _user infos_. Each user info is a
dictionary with the following fields:
- id: a string
- emailAddress: a string
- displayName: a string
- avatar: a string (an URL)
"""
def _email(html: str) -> str:
text = html.replace('<strong>', '').replace('</strong>', '')
return text.split(' - ')[1].split(' ')[0]
def _avatar(url: str) -> str:
if 'avatarId=' in url:
return (
f'/rest/servicedesk/{servicedesk_id}/servicedesk/customer/avatar/'
+ url.split('avatarId=')[1]
+ '?size=xsmall'
)
return url
ensure_nonemptystring('servicedesk_id')
ensure_instance('query', str)
ensure_instance('field_name', str)
ensure_instance('project_id', str)
picked = self._get(
f'/rest/api/2/user/picker?query={query}',
params={'maxResults': 10, 'showAvatar': True},
).json()['users']
return [
{
'id': user['name'],
'displayName': user['displayName'],
'emailAddress': _email(user['html']),
'avatar': _avatar(user['avatarUrl']),
}
for user in picked
] | zabel-elements | /zabel_elements-1.15.0-py3-none-any.whl/zabel/elements/clients/jira.py | jira.py |
from typing import Any, Callable, Dict, IO, Iterable, List, Mapping, Union
import io
import re
import yaml
from kubernetes import client as klient
from kubernetes.client.rest import ApiException
from zabel.commons.exceptions import ApiError
########################################################################
# Inspired from:
# <https://github.com/kubernetes-client/python/blob/master/kubernetes/utils/create_from_yaml.py>
def _map(
func: Callable[..., Any],
source: Union[str, IO[str]],
client: klient.ApiClient,
**kwargs: Any,
) -> None:
if isinstance(source, str):
with io.StringIO(source) as src:
yml_document_all: Iterable[Dict[str, Any]] = list(
d for d in yaml.safe_load_all(src)
)
else:
yml_document_all = yaml.safe_load_all(source)
failures: List[ApiException] = []
for yml_document in yml_document_all:
try:
func(client, yml_document, **kwargs)
except KubernetesError as failure:
failures.extend(failure.api_exceptions)
if failures:
raise KubernetesError(failures)
def _create_from_dict(
client: klient.ApiClient, data: Mapping[str, Any], **kwargs: Any
) -> None:
"""
Perform a create action from a dictionary containing valid
kubernetes API object (i.e. List, Service, etc).
# Required parameters
- client: an ApiClient object, initialized with the client args.
- data: a dictionary holding valid kubernetes objects
# Raised exceptions
_FailToCreateError_ which holds list of `ApiException` instances for
each object that failed to create.
"""
# If it is a list type, will need to iterate its items
api_exceptions = []
command = 'create'
if 'List' in data['kind']:
# Could be "List" or "Pod/Service/...List"
# This is a list type. iterate within its items
kind = data['kind'].replace('List', '')
for yml_object in data['items']:
# Mitigate cases when server returns a xxxList object
# See kubernetes-client/python#586
if kind != '':
yml_object['apiVersion'] = data['apiVersion']
yml_object['kind'] = kind
try:
_do_for_yaml_single_item(command, client, yml_object, **kwargs)
except ApiException as api_exception:
api_exceptions.append(api_exception)
else:
# This is a single object. Call the single item method
try:
_do_for_yaml_single_item(command, client, data, **kwargs)
except ApiException as api_exception:
api_exceptions.append(api_exception)
# In case we have exceptions waiting for us, raise them
if api_exceptions:
raise KubernetesError(api_exceptions)
def _apply_from_dict(
client: klient.ApiClient, data: Mapping[str, Any], **kwargs: Any
) -> None:
"""
Perform an apply action from a dictionary containing valid
kubernetes API object (i.e. List, Service, etc).
# Required parameters
- client: an ApiClient object, initialized with the client args.
- data: a dictionary holding valid kubernetes objects
# Raised exceptions
_FailToApplyError_ which holds list of `client.rest.ApiException`
instances for each object that failed to apply.
"""
# If it is a list type, will need to iterate its items
api_exceptions: List[ApiException] = []
if 'List' in data['kind']:
# Could be "List" or "Pod/Service/...List"
# This is a list type. iterate within its items
kind = data['kind'].replace('List', '')
for body in data['items']:
# Mitigate cases when server returns a xxxList object
# See kubernetes-client/python#586
if kind != '':
body['apiVersion'] = data['apiVersion']
body['kind'] = kind
_safe_apply_from_yaml_single_item(
client, body, api_exceptions, **kwargs
)
else:
# This is a single object. Call the single item method
_safe_apply_from_yaml_single_item(
client, data, api_exceptions, **kwargs
)
# In case we have exceptions waiting for us, raise them
if api_exceptions:
raise KubernetesError(api_exceptions)
def _safe_apply_from_yaml_single_item(
client: klient.ApiClient,
body: Mapping[str, Any],
api_exceptions: List[Any],
**kwargs: Any,
) -> None:
"""
Perform an "apply" action (create if it does not exist, patch if it
already does).
"""
exists = False
try:
_do_for_yaml_single_item('read', client, body, **kwargs)
exists = True
_do_for_yaml_single_item('patch', client, body, **kwargs)
except ApiException as api_exception1:
if not exists:
try:
_do_for_yaml_single_item('create', client, body, **kwargs)
except ApiException as api_exception2:
api_exceptions.append(api_exception2)
else:
api_exceptions.append(api_exception1)
def _do_for_yaml_single_item(
command: str,
client: klient.ApiClient,
body: Mapping[str, Any],
**kwargs: Any,
) -> None:
group, _, version = body['apiVersion'].partition('/')
if version == '':
version = group
group = 'core'
# Take care for the case e.g. api_type is "apiextensions.k8s.io"
# Only replace the last instance
group = ''.join(group.rsplit('.k8s.io', 1))
# convert group name from DNS subdomain format to
# python class name convention
group = ''.join(word.capitalize() for word in group.split('.'))
fcn_to_call = '{0}{1}Api'.format(group, version.capitalize())
k8s_api = getattr(klient, fcn_to_call)(client)
# Replace CamelCased action_type into snake_case
kind = body['kind']
kind = re.sub(r'(.)([A-Z][a-z]+)', r'\1_\2', kind)
kind = re.sub(r'([a-z0-9])([A-Z])', r'\1_\2', kind).lower()
# Expect the user to create namespaced objects more often
if hasattr(k8s_api, f'{command}_namespaced_{kind}'):
# Decide which namespace we are going to put the object in,
# if any
if 'namespace' in body['metadata']:
namespace = body['metadata']['namespace']
kwargs['namespace'] = namespace
getattr(k8s_api, f'{command}_namespaced_{kind}')(body=body, **kwargs)
else:
kwargs.pop('namespace', None)
getattr(k8s_api, f'{command}_{kind}')(body=body, **kwargs)
class KubernetesError(ApiError):
"""
An exception class for handling error if an error occurred when
handling a YAML file.
"""
def __init__(self, api_exceptions: List[ApiException]) -> None:
self.api_exceptions = api_exceptions
def __str__(self) -> str:
msg = ''
for api_exception in self.api_exceptions:
msg += 'Error from server ({0}): {1}'.format(
api_exception.reason, api_exception.body
)
return msg | zabel-elements | /zabel_elements-1.15.0-py3-none-any.whl/zabel/elements/clients/kubehelpers.py | kubehelpers.py |
from typing import Any, Dict, Iterable, List, Mapping, Optional, Union
import requests
from zabel.commons.exceptions import ApiError
from zabel.commons.sessions import prepare_session
from zabel.commons.utils import (
api_call,
add_if_specified,
ensure_in,
ensure_instance,
ensure_noneorinstance,
ensure_nonemptystring,
ensure_noneornonemptystring,
join_url,
)
########################################################################
########################################################################
# SquashTM low-level api
PROJECT_PERMISSIONS = [
'validator',
'project_viewer',
'advanced_tester',
'project_manager',
'test_designer',
'test_runner',
'test_editor',
]
class SquashTM:
"""Squash-TM Base-Level Wrapper.
# Reference URL
<https://www.squashtest.org/fr/actualites/faq-squash-tm/fonctionnalites/api-squash-tm-documentation>
<https://squash-tm.tools.digital.engie.com/squash/api/rest/latest/docs/api-documentation.html>
# Implemented features
- projects
- teams
- users
# Sample use
```python
>>> from zabel.elements.clients import SquashTM
>>>
>>> url = 'https://squash-tm.example.com/squash/api/rest/latest/'
>>> tm = SquashTM(url, user, token)
>>> tm.list_projects()
```
"""
def __init__(
self, url: str, user: str, token: str, verify: bool = True
) -> None:
"""Create a SquashTM instance object.
# Required parameters
- url: a string
- user: a string
- token: a string
The `url` parameter is the top-level API point. E.g.,
`https://squash-tm.example.com/squash/api/rest/latest`
# Optional parameters
- verify: a boolean (True by default)
`verify` can be set to False if disabling certificate checks for
SquashTM communication is required. Tons of warnings will occur
if this is set to False.
"""
self.url = url
self.auth = (user, token)
self.verify = verify
self.session = prepare_session(self.auth, verify=verify)
def __str__(self) -> str:
return f'{self.__class__.__name__}: {self.url}'
def __repr__(self) -> str:
return f'<{self.__class__.__name__}: {self.url!r}, {self.auth[0]!r}>'
####################################################################
# squash-tm projects
#
# list_projects
# get_project
# create_project
# get_project_permissions
# add_project_permission
@api_call
def list_projects(self) -> List[Dict[str, Any]]:
"""Return the projects list.
# Returned value
A list of _projects_. Each project is a dictionary with the
following two entries:
- name: a string
- id: an integer
It may contain additional entries.
"""
return self._collect_data('projects', 'projects')
@api_call
def get_project(self, project_id: int) -> Dict[str, Any]:
"""Return project details.
# Required parameters
project_id: an integer
# Returned value
A dictionary with the following entries:
- _type: a string ('project')
- id: an integer
- name: a string
- label: a string
- description: a string
- active: a boolean
- attachments: a list
- _links: a dictionary
"""
ensure_instance('project_id', int)
return self._get(f'projects/{project_id}') # type: ignore
@api_call
def create_project(
self, name: str, label: str, description: str
) -> Dict[str, Any]:
"""Create project.
# Required parameters
- name: a non-empty string
- label: a string
- description: a non-empty string
# Returned value
A dictionary. Please refer to #get_project() for more
informations.
"""
ensure_nonemptystring('name')
ensure_instance('label', str)
ensure_nonemptystring('description')
data = {
'_type': 'project',
'name': name,
'label': label,
'description': description,
}
result = self._post('projects', json=data)
return result # type: ignore
@api_call
def get_project_permissions(self, project_id: int) -> Dict[str, List[int]]:
"""Return project permissions.
# Required parameters
- project_id: an integer
# Returned value
A dictionary with one entry per defined permission. Keys are
permission names and values are lists of items.
Items in the lists are either _teams_ or _users_.
"""
ensure_instance('project_id', int)
result = self._get(f'projects/{project_id}/permissions').json()
return result['content'] # type: ignore
@api_call
def add_project_permission(
self, project_id: int, permission: str, ids: Iterable[int]
) -> Dict[str, Any]:
"""Add users and teams to project permission.
# Required parameters
- project_id: an integer
- permission: a non-empty string
- ids: a list of integers
# Returned value
A dictionary.
"""
ensure_instance('project_id', int)
ensure_in('permission', PROJECT_PERMISSIONS)
ensure_instance('ids', list)
result = self._post(
f'projects/{project_id}/permissions/{permission}',
params={'ids': ','.join(str(i) for i in ids)},
)
return result # type: ignore
####################################################################
# squash-tm teams
#
# list_teams
# get_team
# create_team
# delete_team
# list_team_members
# add_team_members
# remove_team_members
@api_call
def list_teams(self) -> List[Dict[str, Any]]:
"""Return the teams list.
# Returned value
A list of _teams_. Each team is a dictionary with at least the
two following entries:
- id: an integer
- name: a string
"""
return self._collect_data('teams', 'teams')
@api_call
def get_team(self, team_id: int) -> Dict[str, Any]:
"""Return team details.
# Required parameters
- team_id: an integer
# Returned value
A dictionary with the following entries:
- _type: a string
- id: an integer
- name: a string
- description: a string
- members: a list
- created_by: a string
- created_on: a string
- last_modified_by: a string
- last_modified_on: a string
- _links: a dictionary
"""
ensure_instance('team_id', int)
return self._get(f'teams/{team_id}') # type: ignore
@api_call
def create_team(self, name: str, description: str) -> Dict[str, Any]:
"""Create a new team.
# Required parameters
- name: a non-empt string
- description: a non-empty string
# Returned value
A dictionary. Please refer to #get_team() for more
informations.
"""
ensure_nonemptystring('name')
ensure_nonemptystring('description')
data = {'_type': 'team', 'name': name, 'description': description}
result = self._post('teams', json=data)
return result # type: ignore
@api_call
def delete_team(self, team_id: int) -> None:
"""Delete team.
# Required parameters
- team_id: an integer
# Returned value
None.
"""
ensure_instance('team_id', int)
return self._delete(f'teams/{team_id}') # type: ignore
@api_call
def list_team_members(self, team_id: int) -> List[Dict[str, Any]]:
"""Return list of team members.
# Required parameters
- team_id: an integer
# Returned value
A list of _users_. Please refer to #get_user() for more
informations.
"""
ensure_instance('team_id', int)
return self._collect_data(f'teams/{team_id}/members', 'members')
@api_call
def add_team_members(
self, team_id: int, members_ids: Iterable[int]
) -> None:
"""Add members to team.
Unknown or invalid IDs, as well as IDs already in team, are
silently ignored.
# Required parameters
- team_id: an integer
- members_ids: a list of integers
# Returned value
None.
"""
ensure_instance('team_id', int)
ensure_instance('members_ids', list)
result = self._post(
f'teams/{team_id}/members',
params={'userIds': ','.join(str(m) for m in members_ids)},
)
return result # type: ignore
@api_call
def remove_team_members(
self, team_id: int, members_ids: Iterable[int]
) -> None:
"""Remove members from team.
Member IDs not part of the team are silently ignored.
# Required parameters
- team_id: an integer
- members_ids: a list of integers
# Returned value
None.
"""
ensure_instance('team_id', int)
ensure_instance('members_ids', list)
result = self._delete(
f'teams/{team_id}/members',
params={'userIds': ','.join(str(m) for m in members_ids)},
)
return result # type: ignore
####################################################################
# squash-tm users
#
# list_users
# get_user
# create_user
# delete_user
# update_user
@api_call
def list_users(self) -> List[Dict[str, Any]]:
"""Return the users list.
# Returned value
A list of _users_. Each user is a dictionary with at least the
following entries:
- id: an integer
- login: a string
- active: a boolean
- group: a string (`'user'` or `'admin'`)
"""
return self._collect_data('users', 'users')
@api_call
def get_user(self, user_id: int) -> Dict[str, Any]:
"""Return user details.
# Required parameters
- user_id: an integer
# Returned value
A dictionary with the following entries:
- _type: a string (`'user'`)
- id: an integer
- first_name: a string
- last_name: a string
- login: a string
- email: a string
- active: a boolean
- group: a string
- teams: a list
- last_connected_on: a string
- created_by: a string
- created_on: a string
- last_modified_by: a string
- last_modified_on: a string
- _links: a dictionary
"""
ensure_instance('user_id', int)
return self._get(f'users/{user_id}') # type: ignore
@api_call
def create_user(
self,
login: str,
password: str,
first_name: Optional[str] = None,
last_name: Optional[str] = None,
email: Optional[str] = None,
group: str = 'user',
) -> Dict[str, Any]:
"""Create a new user.
# Required parameters
- login: a non-empty string
- password: a non-empty string
# Optional parameters
- first_name: a non-empty string or None (None by default)
- last_name: a non-empty string or None (None by default)
- email: a non-empty or None (None by default)
- group: a non-empty string, either `'user'` or `'admin'`
(`'user'` by default)
If `last_name` is not provided (or is None), it will default to
the value of `login`.
# Returned value
A dictionary. Please refer to #get_user() for more
informations.
"""
ensure_nonemptystring('login')
ensure_nonemptystring('password')
ensure_noneornonemptystring('first_name')
ensure_noneornonemptystring('last_name')
ensure_noneornonemptystring('email')
ensure_in('group', ['user', 'admin'])
data = {
'_type': 'user',
'login': login,
'password': password,
'last_name': last_name or login,
'first_name': first_name or '',
'email': email or '',
'group': group,
}
result = self._post('users', json=data)
return result # type: ignore
@api_call
def delete_user(self, user_id: int) -> None:
"""Delete user.
# Required parameters
- user_id: an integer
# Returned value
None.
"""
ensure_instance('user_id', int)
return self._delete(f'users/{user_id}') # type: ignore
@api_call
def update_user(
self,
user_id: int,
first_name: Optional[str] = None,
last_name: Optional[str] = None,
email: Optional[str] = None,
active: Optional[bool] = None,
login: Optional[str] = None,
) -> Dict[str, Any]:
"""Update user.
# Required parameters
- user_id: an integer
# Optional parameters
- first_name: a non-empty string or None (None by default)
- last_name: a non-empty string or None (None by default)
- email: a non-empty string or None (None by default)
- active: a boolean or None (None by default)
# Returned value
A dictionary. Please refer to #get_user() for more
informations.
"""
ensure_instance('user_id', int)
ensure_noneornonemptystring('first_name')
ensure_noneornonemptystring('last_name')
ensure_noneornonemptystring('email')
ensure_noneorinstance('active', bool)
ensure_noneornonemptystring('login')
data = {'_type': 'user'}
add_if_specified(data, 'first_name', first_name)
add_if_specified(data, 'last_name', last_name)
add_if_specified(data, 'email', email)
add_if_specified(data, 'active', active)
add_if_specified(data, 'login', login)
result = self._patch(f'users/{user_id}', json=data)
return result # type: ignore
####################################################################
# squash-tm campaigns
#
# list_campaigns
# get_campaign
# list_campaign_iterations
# get_campaign_testplans
@api_call
def list_campaigns(self) -> List[Dict[str, Any]]:
"""Return the campaigns list.
# Returned value
A list of _campaigns_. Each campaign is a dictionary with at
least the two following entries:
- id: an integer
- name: a string
"""
return self._collect_data('campaigns', 'campaigns')
@api_call
def get_campaign(self, campaign_id: int) -> Dict[str, Any]:
"""Return campaign details.
# Required parameters
- campaign_id: an integer
# Returned value
A dictionary with the following entries:
- _type: a string
- id: an integer
- name: a string
- reference: a string
- description: a string
- status: a string
- project: a dictionnary
- path: a string
- parent: a dictionnary
- created_by: a string
- created_on: a string
- last_modified_by: a string
- last_modified_on: a string
- actual_start_date: a string
- actual_end_date: a string
- actual_start_auto: boolean
- actual_end_auto: boolean
- custom_fields: a list
- iterations: a list of dictionnaries
- attachments: a list
- _links: a dictionary
- iterations: a dictionary
"""
ensure_instance('campaign_id', int)
return self._get(f'campaigns/{campaign_id}') # type: ignore
@api_call
def list_campaign_iterations(
self, campaign_id: int
) -> List[Dict[str, Any]]:
"""Return iterations of a campaign.
# Returned value
A list of campaign _iterations_. Each iteration is a
dictionary with at least the two following entries:
- id: an integer
- name: a string
"""
ensure_instance('campaign_id', int)
return self._collect_data(
f'campaigns/{campaign_id}/iterations', 'iterations'
)
@api_call
def list_campaign_testplan(self, campaign_id: int) -> List[Dict[str, Any]]:
"""Return the test-plan of a campaign.
# Returned value
A list of _testplan items_. Each testplan item is a dictionary
with at least the two following entries:
- id: an integer
- name: a string
"""
ensure_instance('campaign_id', int)
return self._collect_data(
f'campaigns/{campaign_id}/test-plan', 'campaign-test-plan-items'
)
####################################################################
# squash-tm requirements
#
# list_requirements
# get_requirement
@api_call
def list_requirements(self) -> List[Dict[str, Any]]:
"""Return the requirements list.
# Returned value
A list of _requirements_. Each requirement is a dictionary with
at least the two following entries:
- id: an integer
- name: a string
"""
return self._collect_data('requirements', 'requirements')
@api_call
def get_requirement(self, requirement_id: int) -> Dict[str, Any]:
"""Return requirement details.
# Required parameters
- requirement_id: an integer
# Returned value
A dictionary with the following entries:
- _type: a string
- id: an integer
- name: a string
- project: a dictionnary
- path: a string
- parent: a dictionnary
- mode: a string
- current_version: a dictionnary
- versions: a list of dictionnaries
- _links: a dictionary
"""
ensure_instance('requirement_id', int)
return self._get(f'requirements/{requirement_id}') # type: ignore
####################################################################
# squash-tm test cases
#
# list_testcases
# get_testcase
@api_call
def list_testcases(self) -> List[Dict[str, Any]]:
"""Return the test-cases list.
# Returned value
A list of _test-cases_. Each test-case is a dictionary with at
least the two following entries:
- id: an integer
- name: a string
"""
return self._collect_data('test-cases', 'test-cases')
@api_call
def get_testcase(self, testcase_id: int) -> Dict[str, Any]:
"""Return test-case details.
# Required parameters
- testcase_id: an integer
# Returned value
A dictionary with the following entries:
- _type: a string
- id: an integer
- name: a string
- reference: a string
- kind:a string
- project: a dictionnary
- path: a string
- parent: a dictionnary
- created_by: a string
- created_on: a string
- last_modified_by: a string
- last_modified_on: a string
- importance: a string
- status: a string
- nature: a dictionnary
- type: a dictionnary
- prerequisite: a string
- description: a string
- custom_fields: a list of dictionnaries
- steps: a list of dictionnaries
- parameters: a list
- datasets: a list
- language: a string
- script: a string
- verified_requirements: a string
- attachments: a string
- _links: a dictionary
- iterations: a dictionary
"""
ensure_instance('testcase_id', int)
return self._get(f'test-cases/{testcase_id}') # type: ignore
####################################################################
# squash-tm Test suites
#
# get_testsuite_testplan
# get_testsuite
@api_call
def get_testsuite_testplan(
self, testsuite_id: int
) -> List[Dict[str, Any]]:
"""Return the test-plan of a test suite.
# Returned value
A list of ???. A test-suite is a
dictionary with at least the two following entries:
- TBD
"""
ensure_instance('testsuite_id', int)
return self._collect_data(
f'test-suites/{testsuite_id}/test-plan', 'test-plan'
)
@api_call
def get_testsuite(self, testsuite_id: int) -> Dict[str, Any]:
"""Return test-suite details.
# Required parameters
- testsuite_id: an integer
# Returned value
A dictionary with the following entries:
- _type: a string
- id: an integer
- name: a string
- description: a string
- parent: a dictionnary
- created_by: a string
- created_on: a string
- last_modified_by: a string
- last_modified_on: a string
- project: a dictionnary
- path: a string
- custom_fields: a dictionary
- test_plan: a list of dictionnaries
- attachments: a dictionnary
- _links: a dictionary
"""
ensure_instance('testsuite_id', int)
return self._get(f'test-suites/{testsuite_id}') # type: ignore
####################################################################
# squash-tm test steps
#
# get_teststep
@api_call
def get_teststep(self, teststep_id: int) -> Dict[str, Any]:
"""Return test step details.
# Required parameters
- teststep_id: an integer
# Returned value
A dictionary with the following entries:
- _type: a string
- id: an integer
- test_case: a dictionnary
- expected_result: a string
- index: a string
- custom_fields: a list
- attachments: a dictionnary
- _links: a dictionary
"""
ensure_instance('teststep_id', int)
return self._get(f'test-steps/{teststep_id}') # type: ignore
####################################################################
# squash-tm test case folders
#
# list_testcasefolders
# get_testcasefolder
# get_testcasefolder_content
@api_call
def list_testcasefolders(self) -> List[Dict[str, Any]]:
"""Return the test-case folders list.
# Returned value
A list of _test-case folders_. Each test-case folder is a
dictionary with at least the two following entries:
- id: an integer
- name: a string
"""
return self._collect_data('test-case-folders', 'test-case-folders')
@api_call
def get_testcasefolder(self, testcasefolder_id: int) -> Dict[str, Any]:
"""Return test-case folder details.
# Required parameters
- testcasefolder_id: an integer
# Returned value
A dictionary with the following entries:
- _type: a string
- id: an integer
- name: a string
- project: a dictionnary
- path: a string
- parent: a dictionnary
- created_by: a string
- created_on: a string
- last_modified_by: a string
- last_modified_on: a string
- description: a string
- attachments: a string
- _links: a dictionary
"""
ensure_instance('testcasefolder_id', int)
return self._get(f'test-case-folders/{testcasefolder_id}') # type: ignore
@api_call
def get_testcasefolder_content(
self, testcasefolder_id: int
) -> List[Dict[str, Any]]:
"""Return content of a test-case folder.
# Required parameters
- testcasefolder_id: an integer
# Returned value
A list of dictionaries with the following entries:
- _type: a string
- id: an integer
- name: a string
- reference: a string
- _links: a dictionary
"""
ensure_instance('testcasefolder_id', int)
return self._collect_data(
f'test-case-folders/{testcasefolder_id}/content', 'content'
)
####################################################################
# squash-tm executions
#
# get_execution
@api_call
def get_execution(self, execution_id: int) -> Dict[str, Any]:
"""Return execution details.
# Required parameters
- execution_id: an integer
# Returned value
A dictionary with the following entries:
- _type: a string
- id: an integer
- execution_order: an integer
- execution_status: a string
- last_excuted_by: a string
- last_executed_on: a string
- execution_mode: a string
- reference: a string
- dataset_label: a string
- execution_steps: a list of dictionnaries
- comment: a string
- prerequisite: a string
- description: a string
- importance: a string
- nature: a dictionnary
- type: a dictionnary
- test_case_status: a string
- test_plan_item: a dictionnary
- custom_fields: a list
- test_case_custom_fields: a list
- language: a string
- script_name: a string
- attachments: a dictionnary
- _links: a dictionary
"""
ensure_instance('execution_id', int)
return self._get(f'executions/{execution_id}') # type: ignore
@api_call
def list_execution_executionsteps(
self, execution_id: int
) -> List[Dict[str, Any]]:
"""Return the execution steps of an execution
# Returned value
A list of _execution steps_. Each execution step is a
dictionary with at least the two following entries:
- TBD
"""
ensure_instance('execution_id', int)
return self._collect_data(
f'executions/{execution_id}/execution-steps', 'execution-steps'
)
####################################################################
# squash-tm executions-steps
#
# get_executionstep
@api_call
def get_executionstep(self, executionstep_id: int) -> Dict[str, Any]:
"""Return execution step details.
# Required parameters
- executionstep_id: an integer
# Returned value
A dictionary with the following entries:
- _type: a string
- id: an integer
- execution_status: a string
- action: a string
- expected_result: a string
- comment: a string
- last_executed_by: a string
- last_executed_on: a string
- execution_step_order: an integer
- referenced_test_step: a dictionnary
- execution: a dictionnary
- custom_fields: a list
- test_step_custom_fields: a string
- attachments: a dictionnary
- _links: a dictionary
"""
ensure_instance('executionstep_id', int)
return self._get(f'execution-steps/{executionstep_id}') # type: ignore
####################################################################
# squash-tm iterations
#
# get_iteration_testplan
# list_iteration_testsuites
# get_iteration
@api_call
def get_iteration_testplan(
self, iteration_id: int
) -> List[Dict[str, Any]]:
"""Return the test-plan of an iteration
# Returned value
A list of ???. A test-plan of an iteration. A test-plan is a
dictionary with at least the following entries:
- TBD
"""
ensure_instance('iteration_id', int)
return self._collect_data(
f'iterations/{iteration_id}/test-plan', 'test-plan'
)
@api_call
def list_iteration_testsuites(
self, iteration_id: int
) -> List[Dict[str, Any]]:
"""Return the test-suites list of an iteration
# Returned value
A list of _test-suites_. Each test-suite is a dictionary with
at least the following entries:
- TBD
"""
ensure_instance('iteration_id', int)
return self._collect_data(
f'iterations/{iteration_id}/test-suites', 'test-suites'
)
@api_call
def get_iteration(self, iteration_id: int) -> Dict[str, Any]:
"""Return iteration details.
# Required parameters
- iteration_id: an integer
# Returned value
A dictionary with the following entries:
- _type: a string
- id: an integer
- name: a string
- reference: a string
- description: a string
- parent: a dictionnary
- created_by: a string
- created_on: a string
- last_modified_by: a string
- last_modified_on: a string
- actual_start_date: a string
- actual_end_date: a string
- actual_start_auto: a string
- actual_end_auto: a string
- custom_fields: a list of dictionnaries
- test_suites: a list of dictionnaries
- attachments: a string
- _links: a dictionary
"""
ensure_instance('iteration_id', int)
return self._get(f'iterations/{iteration_id}') # type: ignore
####################################################################
# squash-tm private helpers
def _get(self, api: str) -> requests.Response:
"""Returns squash-tm api call results, as JSON."""
api_url = join_url(self.url, api)
return self.session().get(api_url)
def _post(
self,
api: str,
json: Optional[Mapping[str, Any]] = None,
params: Optional[Mapping[str, Union[str, List[str], None]]] = None,
) -> requests.Response:
api_url = join_url(self.url, api)
return self.session().post(api_url, json=json, params=params)
def _patch(self, api: str, json: Mapping[str, Any]) -> requests.Response:
api_url = join_url(self.url, api)
return self.session().patch(api_url, json=json)
def _delete(
self,
api: str,
data: Optional[Union[Mapping[str, str], bytes]] = None,
params: Optional[Mapping[str, Union[str, List[str], None]]] = None,
) -> requests.Response:
api_url = join_url(self.url, api)
return self.session().delete(api_url, data=data, params=params)
def _collect_data(self, api: str, key: str) -> List[Any]:
"""Return SquashTM API call results, collecting key values.
The API call is expected to return a JSON structure.
"""
page_size = 1000
page = 0
try:
req = self._get(f'{api}?size={page_size}').json()
except ValueError:
raise ApiError(
'Unexpected response, was expecting JSON (%s)'
% join_url(self.url, api)
) from None
# no '_embedded' part of 'totalElements' is zero.
values: List[Any] = (
req['_embedded'][key] if req['page']['totalElements'] else []
)
while 'page' in req and len(values) < req['page']['totalElements']:
page += 1
req = self._get(f'{api}?size={page_size}&page={page}').json()
if req:
values += req['_embedded'][key]
else:
raise ApiError('Empty response (%s)' % join_url(self.url, api))
return values | zabel-elements | /zabel_elements-1.15.0-py3-none-any.whl/zabel/elements/clients/base/squashtm.py | squashtm.py |
from typing import Any, Dict, List, Mapping, MutableMapping, Optional, Union
from xml.etree import cElementTree as ET
import requests
from zabel.commons.exceptions import ApiError
from zabel.commons.sessions import prepare_session
from zabel.commons.utils import (
api_call,
dict_to_xml,
ensure_instance,
ensure_nonemptystring,
ensure_noneorinstance,
join_url,
xml_to_dict,
)
########################################################################
########################################################################
# CloudBees Jenkins low-level api
class CloudBeesJenkins:
"""Mostly a Jenkins low-level API wrapper, but taking into account
the presence of an Operations Center (i.e., when there are more than
one Jenkins master).
This library uses an Operations Center as its entry point.
There are three levels of APIs:
- the Operations Center level
- the managed master level
- anything below (project/subproject/...)
It is up to the caller to ensure inter-level consistency while
handling groups and roles.
Item creations and handling functions make use of two functions
provided by the _zabel.commons.utils_ module: `xml_to_dict` and
`dict_to_xml`.
Things to check: <https://github.com/cloudbees/jenkins-scripts>
```python
>>> from zabel.elements.clients import Jenkins
>>>
>>> url = 'https://pse.example.com'
>>> jenkins = Jenkins(url, user, token)
>>> jenkins.list_oc_managedmasters()
```
"""
def __init__(
self,
url: str,
user: str,
token: str,
cookies: Optional[Dict[str, str]] = None,
verify: bool = True,
) -> None:
"""Create a CloudBeesJenkins instance object.
# Required parameters
- url: a non-empty string, the URL of the top-level Jenkins
Operations Center
- user: a string, the account used to access the API
- token: a string, the token used to access the API
Sample `url` value:
`'https://pse.example.com'`
# Optional parameters
- cookies: a dictionary or None (None by default)
- verify: a boolean (True by default)
`verify` can be set to False if disabling certificate checks for
Jenkins communication is required. Tons of warnings will occur
if this is set to False.
"""
ensure_nonemptystring('url')
ensure_instance('user', str)
ensure_instance('token', str)
ensure_noneorinstance('cookies', dict)
self.url = url
self.auth = (user, token)
self.cookies = cookies
self.verify = verify
self.crumb: Optional[Dict[str, str]] = None
self.crumb_master: Optional[str] = None
self._endpoints: Dict[str, str] = {}
self.session = prepare_session(self.auth, self.cookies, verify=verify)
def __str__(self) -> str:
return f'{self.__class__.__name__}: {self.url}'
def __repr__(self) -> str:
return f'<{self.__class__.__name__}: {self.url!r}, {self.auth[0]!r}>'
####################################################################
# cbj operations center global features
#
# list_oc_groups
# list_oc_roles
# list_oc_users
# list_oc_managedmasters
# list_oc_folders
# get_oc_metrics
# run_oc_script
# get_version
MM_CLASS = 'com.cloudbees.opscenter.server.model.ManagedMaster'
CB_FOLDER = 'com.cloudbees.hudson.plugins.folder.Folder'
@api_call
def list_oc_groups(self) -> List[Dict[str, Any]]:
"""Return the Operations Center list of groups.
# Returned value
A list of _groups_. Each group is a dictionary with the
following entries:
- description: a string
- name: a string
- url: a string
- members: a list of strings
- roles: a list of strings
- roleAssignments: a list of dictionaries
- users: a list of strings
- groups: a list of strings
"""
result = self._get_json(
join_url(self.url, 'cjoc/groups'), params={'depth': '1'}
)
return result['groups'] # type: ignore
@api_call
def list_oc_roles(self) -> List[Dict[str, Any]]:
"""Return the Operations Center list of roles.
# Returned value
A list of _roles_. Each role is a dictionary with the following
entries:
- description: a string
- id: a string
- filterable: a boolean
- shortUrl: a string
- grantedPermissions: a list of strings
"""
result = self._get_json(
join_url(self.url, 'cjoc/roles'), params={'depth': '1'}
)
return result['roles'] # type: ignore
@api_call
def list_oc_users(self) -> List[Dict[str, Any]]:
"""Return the Operations Center list of known users.
# Returned value
A list of u_sers_. Each user is a dictionary with the following
entries:
- user: a dictionary
- project: None or ...
- lastChange: None or ...
The dictionary in the `user` key has the following entries:
- fullName: a string
- absoluteUrl: a string
"""
result = self._get_json(join_url(self.url, 'cjoc/asynchPeople'))
return result['users'] # type: ignore
@api_call
def list_oc_managedmasters(self, depth: int = 0) -> List[Dict[str, Any]]:
"""Returns a list of all managed masters.
This method retrieves all managed masters, irrespective of their
locations on the Operations Center.
# Optional parameters
- depth: an integer (`0` by default)
# Returned value
A list of _managed masters_. Managed masters in the list are
dictionaries with the following entries, assuming the default
value for `depth`:
- _class: a string
- name: a string
- url: a string
"""
ensure_instance('depth', int)
result = self._get_json(
join_url(self.url, 'cjoc/view/Controllers'),
params={'depth': str(depth)},
)
return result['jobs'] # type: ignore
@api_call
def list_oc_folders(self) -> List[Dict[str, Any]]:
"""Return the Operations Center list of folders.
# Returned value
A list of _folders_. Each folder is a dictionary with the
following entries:
- _class: a string
- url: a string
- name: a string
Folders will have a `_class` value of:
`'com.cloudbees.hudson.plugins.folder.Folder'`
"""
response = self._get_json(join_url(self.url, 'cjoc'))
return [
job for job in response['jobs'] if job['_class'] == self.CB_FOLDER
]
@api_call
def get_oc_metrics(self) -> Dict[str, Any]:
"""Return the Operations Center metrics.
More info on metrics is available here:
- <https://wiki.jenkins.io/display/JENKINS/Metrics+Plugin>
- <https://go.cloudbees.com/docs/cloudbees-documentation/cje-user-guide/index.html#monitoring>
# Returned value
A dictionary with the following entries:
- version: a string
- gauges: a dictionary
- counters: a dictionary
- histograms: a dictionary
- meters: a dictionary
- timers: a dictionary
`gauges` is a dictionary with one entry per metric. Each metric
is a dictionary with one entry, `value`.
`counters` is a dictionary with one entry per counter. Each
counter is a dictionary with one entry, `count`.
"""
return self._get_json(
join_url(self.url, 'cjoc/metrics/currentUser/metrics')
)
@api_call
def run_oc_script(self, script: str) -> str:
"""Execute a groovy script on the Operations Center.
# Required parameters
- script: a non-empty string
# Returned value
A string (what was produced by running the script). It is up to
the caller to process this returned string.
"""
ensure_nonemptystring('script')
return self._post(
join_url(join_url(self.url, 'cjoc'), 'scriptText'),
data={'script': script},
).text
@api_call
def get_version(self, path: str = 'cjoc') -> str:
"""Return the Operations Center or managed master version.
By default, looks for the Operations Center version.
# Optional parameters
- path: a string, where to look for managed master (`'cjoc'` by
default)
# Returned value
A string. For example, `'2.60.3.1'`.
"""
ensure_nonemptystring('path')
return (
self.session().get(join_url(self.url, path)).headers['X-Jenkins']
)
####################################################################
# cbj managedmasters
#
# list_managedmasters
# get_managedmaster_status
# get_managedmaster_endpoint
# list_managedmaster_projects
# list_managedmaster_roles
# list_managedmaster_users
# list_managedmaster_plugins
# install_managedmaster_plugins
# provision_and_start_managedmaster
# stop_managedmaster
# acknowledge_managedmaster_error
# get_managedmaster_metrics
# ping_managedmaster
# build_managedmaster_job
# get_managedmaster_queueitem
# get_managedmaster_buildinfo
@api_call
def list_managedmasters(self, path: str = 'cjoc') -> List[Dict[str, Any]]:
"""Return the list of managed masters.
This method only retrieves managed masters that are at the
highest level of the specified path.
# Optional parameters
- path: a string, where to look for managed masters (`cjoc` by
default)
# Returned value
A list of _managed masters_. Each managed master is a
dictionary with at least 3 keys:
- _class: a string
- url: a string
- name: a string
Managed masters will have a _class value of:
`'com.cloudbees.opscenter.server.model.ManagedMaster'`
"""
ensure_nonemptystring('path')
response = self._get_json(join_url(self.url, path))
return [
job for job in response['jobs'] if job['_class'] == self.MM_CLASS
]
@api_call
def get_managedmaster_status(
self, managedmaster_url: str
) -> Dict[str, Any]:
"""Return the manager status.
# Required parameters
- managedmaster_url: a non-empty string
# Returned value
A dictionary with the following entries:
- _class: a string
- actions: a list of dictionaries
- approved: a boolean
- description: a string
- displayName: a string
- displayNameOrNull: a string or None
- endpoint: a string
- fullDisplayName: a string (an URL)
- fullName: a string
- healthReport: a list of dictionaries
- name: a string
- online: a boolean
- state: a string
- url: a string (an URL)
- validActions: a list of strings
"""
ensure_nonemptystring('managedmaster_url')
return self._get_json(managedmaster_url)
def get_managedmaster_endpoint(self, managedmaster_url: str) -> str:
"""Return managedmaster endpoint.
Endpoints are cached, as they do not change.
# Required parameters
- managedmaster_url: a non-empty string
# Returned value
A string, the endpoint (an URL).
# Raised exceptions
May raise an _ApiError_ exception if master is initializing.
"""
ensure_nonemptystring('managedmaster_url')
if managedmaster_url not in self._endpoints:
endpoint = self._get_json(managedmaster_url)['endpoint']
if not endpoint:
raise ApiError(
'Endpoint not available for managed master %s'
% managedmaster_url
)
self._endpoints[managedmaster_url] = endpoint
return self._endpoints[managedmaster_url]
@api_call
def list_managedmaster_projects(
self, managedmaster_url: str
) -> List[Dict[str, Any]]:
"""Return the list of projects for the specified managed master.
# Required parameters
- managedmaster_url: a non-empty string
# Returned value
A list of _projects_. Each project is a dictionary with the
following entries:
- _class: a string
- name: a string
- url: a string
- color: a string
The first three entries are always present.
"""
ensure_nonemptystring('managedmaster_url')
result = self._get_json(
self.get_managedmaster_endpoint(managedmaster_url)
)
return result['jobs'] # type: ignore
@api_call
def list_managedmaster_roles(
self, managedmaster_url: str
) -> List[Dict[str, Any]]:
"""Return the list of roles for the specified managed master.
# Required parameters
- managedmaster_url: a non-empty string
# Returned value
A list of _roles_. Each role is a dictionary with the following
entries:
- description: a string
- id: a string
- filterable: a boolean
- shortUrl: a string
- grantedPermissions: a list of strings
The returned roles are expanded.
"""
ensure_nonemptystring('managedmaster_url')
result = self._get_json(
join_url(
self.get_managedmaster_endpoint(managedmaster_url), 'roles'
),
params={'depth': '1'},
)
return result['roles'] # type: ignore
@api_call
def list_managedmaster_users(
self, managedmaster_url: str
) -> List[Dict[str, Any]]:
"""Return the list of users for the specified managed master.
# Required parameters
- managedmaster_url: a non-empty string
# Returned value
A list of _users_. Each master user is a dictionary with the
following entries:
- project: a dictionary or None
- user: a dictionary
- lastChange: an integer or None
If `project` is None (or if `lastChange` is None), no activity
from this user has been recorded.
The dictionary for the `user` key contains the following
entries:
- absoluteUrl: a string
- fullName: a string
"""
ensure_nonemptystring('managedmaster_url')
result = self._get_json(
join_url(
self.get_managedmaster_endpoint(managedmaster_url),
'asynchPeople',
),
params={'depth': '1'},
)
return result['users'] # type: ignore
@api_call
def list_managedmaster_plugins(
self, managedmaster_url: str
) -> List[Dict[str, Any]]:
"""Return the list of installed plugins.
# Required parameters
- managedmaster_url: a non-empty string
# Returned value
A list of _plugins_. Each plugin is a dictionary with the
following entries:
- enabled: a boolean
- supportsDynamicLoad: a string
- requiredCoreVersion: a string
- deleted: a boolean
- bundled: a boolean
- backupVersion:
- longName: a string
- active: a boolean
- hasUpdate: a boolean
- dependencies: a list of dictionaries
- version: a string
- pinned: a boolean
- url: a string
- downgradable: a boolean
- shortName a string
The `dependencies` dictionaries have the following entries:
- optional: a boolean
- shortName: a string
- version: a string
"""
ensure_nonemptystring('managedmaster_url')
result = self._get_json(
join_url(
self.get_managedmaster_endpoint(managedmaster_url),
'pluginManager',
),
params={'depth': '2'},
)
return result['plugins'] # type: ignore
@api_call
def install_managedmaster_plugin(
self, managedmaster_url: str, plugin: str
) -> bool:
"""Install the requested plugin.
You may have to safe-restart the managed master after plugin
installation.
# Required parameters
- managedmaster_url: a non-empty string
- plugin: a non-empty string
`plugin` is the plugin short name.
# Returned value
A boolean. True if the installation was successful.
"""
ensure_nonemptystring('managedmaster_url')
ensure_nonemptystring('plugin')
return (
self._post(
join_url(
self.get_managedmaster_endpoint(managedmaster_url),
'pluginManager/installNecessaryPlugins',
),
# ???? add @latest or @current ?
data=bytes(
'<jenkins><install plugin="{plugin}" /></jenkins>'.format(
plugin=plugin
),
encoding='utf-8',
),
headers={'Content-Type': 'application/xml'},
).status_code
== 200
)
@api_call
def provision_and_start_managedmaster(
self, managedmaster_url: str
) -> None:
"""Provision and start managed master.
# Required parameters
- managedmaster_url: a non-empty string
"""
ensure_nonemptystring('managedmaster_url')
self._post(
join_url(managedmaster_url, 'provisionAndStartAction'),
data={"Submit": "Yes"},
)
@api_call
def stop_managedmaster(self, managedmaster_url: str) -> bool:
"""Stop managed master.
# Required parameters
- managedmaster_url: a non-empty string
# Returned value
A boolean. True if the command was successful.
"""
ensure_nonemptystring('managedmaster_url')
return (
self._post(
join_url(managedmaster_url, 'stopAction'),
data={'Submit': 'Wait'},
).status_code
== 200
)
@api_call
def acknowledge_managedmaster_error(self, managedmaster_url: str) -> None:
"""Acknowledge error on managed master.
# Required parameters
- managedmaster_url: a non-empty string
"""
ensure_nonemptystring('managedmaster_url')
self._post(
join_url(managedmaster_url, 'acknowledgeErrorAction'),
data={"Submit": "Yes"},
)
@api_call
def get_managedmaster_metrics(
self, managedmaster_url: str
) -> Dict[str, Any]:
"""Return managed master metrics.
# Required parameters
- managedmaster_url: a non-empty string
# Returned value
A dictionary with the following entries:
- version: a string
- gauges: a dictionary
- counters: a dictionary
- histograms: a dictionary
- meters: a dictionary
- timers: a dictionary
`gauges` is a dictionary with one entry per metric. Each metric
is a dictionary with one entry, `value`.
`counters` is a dictionary with one entry per counter. Each
counter is a dictionary with one entry, `count`.
More info on metrics is available here:
- <https://wiki.jenkins.io/display/JENKINS/Metrics+Plugin>
- <https://go.cloudbees.com/docs/cloudbees-documentation/cje-user-guide/index.html#monitoring>
"""
ensure_nonemptystring('managedmaster_url')
return self._get_json(
join_url(
self.get_managedmaster_endpoint(managedmaster_url),
'metrics/currentUser/metrics',
)
)
@api_call
def run_managedmaster_script(
self, managedmaster_url: str, script: str
) -> str:
"""Execute a groovy script on the managed master.
# Required parameters
- managedmaster_url: a non-empty string
- script: a non-empty string
# Returned value
A string (what was produced by running the script). It
is up to the caller to process this returned string.
"""
ensure_nonemptystring('managedmaster_url')
ensure_nonemptystring('script')
return self._post(
join_url(
self.get_managedmaster_endpoint(managedmaster_url),
'scriptText',
),
data={'script': script},
).text
@api_call
def ping_managedmaster(
self, managedmaster_url: str, path: str = ''
) -> bool:
"""Check if managed master is fully up and running.
# Required parameters
- managedmaster_url: a non-empty string
# Optional parameters
- path: a string (empty by default)
# Returned value
A boolean. True if the managed master is fully up and running,
False otherwise.
"""
ensure_nonemptystring('managedmaster_url')
ensure_instance('path', str)
return (
self.session()
.get(
join_url(
self.get_managedmaster_endpoint(managedmaster_url), path
)
)
.status_code
== 200
)
@api_call
def build_managedmaster_job(
self,
managedmaster_url: str,
path: str,
params: Optional[Mapping[str, str]] = None,
) -> int:
"""Build job.
# Required parameters
- managedmaster_url: a non-empty string
- path: a non-empty string
# Optional parameters
- params: a dictionary or None (None by default)
# Returned value
An integer, the _queueitem_ number. This queueitem number is
only valid for about five minutes after the job completes.
Use #get_managedmaster_queueitem() for more details on job
execution.
"""
ensure_nonemptystring('managedmaster_url')
ensure_nonemptystring('path')
ensure_noneorinstance('params', dict)
return int(
self._post(
join_url(
join_url(
self.get_managedmaster_endpoint(managedmaster_url),
path,
),
'build',
),
params=params,
)
.headers['location']
.strip('/')
.split('/')[-1]
)
@api_call
def get_managedmaster_queueitem(
self, managedmaster_url: str, queueitem: int
) -> Dict[str, Any]:
"""Get queueitem info.
# Required parameters
- managedmaster_url: a non-empty string
- queueitem: an integer
# Returned value
A dictionary with the following entries:
- _class: a string
- actions: a list of dictionaries
- blocked: a boolean
- buildable: a boolean
- cancelled: a boolean
- executable: a dictionary or None
- id: an integer
- inQueueSince: an integer (a timestamp)
- params: a string
- stuck: a boolean
- task: a dictionary
- url: a string
- why: a dictionary or None
If the queueitem is still waiting for an executor, the `why`
entry will not be None.
One the queueitem is running (or has completed), `executable`
will hold information on the corresponding build.
The `executable` entry, if not None, has the following entries:
- _class: a string
- number: an integer
- url: a string
"""
ensure_nonemptystring('managedmaster_url')
ensure_instance('queueitem', int)
return self._get_json(
join_url(
self.get_managedmaster_endpoint(managedmaster_url),
f'queue/item/{queueitem}',
)
)
@api_call
def get_managedmaster_buildinfo(
self, managedmaster_url: str, path: str, number: int
) -> Dict[str, Any]:
"""Get build info.
# Required parameters
- managedmaster_url: a non-empty string
- path: a non-empty string
- number: an integer
# Returned value
A dictionary with the following entries:
- _class: a string
- actions: a list of dictionaries
- artifacts: a list
- building: a boolean
- changeSets: a list
- culprits: a list
- description: a string or None
- displayName: a string
- duration: an integer
- estimatedDuration: an integer
- executor: None or ...
- fullDisplayName: a string
- id: a string
- keepLog: a boolean
- nextBuild: a dictionary or None
- number: an integer
- previousBuild: a dictionary or None
- queueId: an integer
- result: a string or None
- timestamp: an integer (a timestamp)
- url: a string
"""
ensure_nonemptystring('managedmaster_url')
ensure_nonemptystring('path')
ensure_instance('number', int)
return self._get_json(
join_url(
join_url(
self.get_managedmaster_endpoint(managedmaster_url), path
),
str(number),
)
)
####################################################################
# cbj projects
#
# list_project_jobs
# create_project_job
# list_project_roles
# add_project_role_filter
# list_project_groups
# create_project_group
# grant_project_group_role
# get_project_domains
# create_project_domain
# list_domain_credentials
# create_domain_credential
# delete_domain_credential
# add_group_user
# delete_group_user
@api_call
def list_project_jobs(self, project_url: str) -> List[Dict[str, Any]]:
"""Return the list of jobs that are direct children of project.
# Required parameters
- project_url: a non-empty string
# Returned value
A list of _jobs_. Each job is a dictionary with at least 3
keys:
- _class: a string
- url: a string
- name: a string
If a job class is
`'com.cloudbees.hudson.plugins.folder.Folder'`, it means it is
a sub-project, and it may contains other jobs.
# Raised exceptions
If the specified project is not a folder, an _ApiError_ is
raised.
"""
ensure_nonemptystring('project_url')
return self._get_json(project_url)['jobs'] # type: ignore
@api_call
def create_project_job(
self,
project_url: str,
job_name: str,
config: Union[str, Dict[str, Any]],
) -> None:
"""Create a job in project.
The project must already exists.
# Required parameters
- project_url: a non-empty string
- job_name: a non-empty string
- config: an XML dict or an XML string
The `config` dictionary must follow the 'XML' dictionary
conventions.
# Returned value
None.
# Raised exceptions
An _ApiError_ exception is raised if a job or folder with the
specified name already exists.
"""
ensure_nonemptystring('project_url')
ensure_nonemptystring('job_name')
ensure_instance('config', (str, dict))
if isinstance(config, str):
data = config.strip()
else:
data = '<?xml version="1.0" encoding="UTF-8"?>' + dict_to_xml(
config
)
result = self._post(
join_url(project_url, f'createItem?name={job_name}'),
data=bytes(data, encoding='utf-8'),
headers={'Content-Type': 'application/xml'},
)
return result # type: ignore
@api_call
def list_project_roles(self, project_url: str) -> List[Dict[str, Any]]:
"""Return the list of roles for the specified project.
# Required parameters
- project_url: a string
# Returned value
A list of _roles_. Each role is a dictionary with the following
entries:
- description: a string
- id: a string
- filterable: a boolean
- shortUrl: a string
- grantedPermissions: a list of strings
The returned roles are expanded.
"""
ensure_nonemptystring('project_url')
result = self._get_json(
join_url(project_url, 'roles'), params={'depth': '1'}
)
return result['roles'] # type: ignore
@api_call
def create_project_group(self, project_url: str, group: str) -> None:
"""Create a group in project.
The project must already exist.
# Required parameters
- project_url: a non-empty string
- group: a non-empty string
# Returned value
None.
"""
ensure_nonemptystring('project_url')
ensure_nonemptystring('group')
result = self._post(
join_url(project_url, f'groups/createGroup?name={group}')
)
return result # type: ignore
@api_call
def grant_project_group_role(
self,
project_url: str,
group: str,
role: str,
offset: int = 0,
inherited: Optional[bool] = False,
) -> None:
"""Grant a role to group in project.
The project and group must already exist.
# Required parameters
- project_url: a non-empty string
- group: a non-empty string
- role: a non-empty string
# Returned value
None.
"""
ensure_nonemptystring('project_url')
ensure_nonemptystring('group')
ensure_nonemptystring('role')
ensure_instance('offset', int)
ensure_noneorinstance('inherited', bool)
result = self._post(
join_url(
project_url,
f'groups/{group}/grantRole?role={role}&offset={offset}&inherited={inherited is True}',
)
)
return result # type: ignore
@api_call
def add_project_role_filter(self, project_url: str, role: str) -> None:
"""Add role filter to project.
# Required parameters
- project_url: a non-empty string
- role: a non-empty string
# Returned value
None.
"""
ensure_nonemptystring('project_url')
ensure_nonemptystring('role')
result = self._post(
join_url(project_url, f'groups/addRoleFilter?name={role}')
)
return result # type: ignore
@api_call
def list_project_groups(self, project_url: str) -> List[Dict[str, Any]]:
"""Return the list of groups for the specified project.
# Required parameters
- project_url: a non-empty string
# Returned value
A list of _groups_. Each group is a dictionary with the
following entries:
- description: a string
- name: a string
- url: a string
- members: a list of strings
- roles: a list of strings
- roleAssignments: a list of dictionaries
- users: a list of strings
- groups: a list of strings
The returned groups are expanded.
"""
ensure_nonemptystring('project_url')
result = self._get_json(
join_url(project_url, 'groups'), params={'depth': '1'}
)
return result['groups'] # type: ignore
@api_call
def get_project_domains(
self, project_url: str
) -> Dict[str, Dict[str, Any]]:
"""Return the domains for the specified project.
# Required parameters
- project_url: a non-empty string
# Returned value
A dictionary with the following possible entries:
- system: a dictionary
- folder: a dictionary
Some entries may be missing.
The `system` and `folder` dictionaries have the same format.
They contain the following entries:
- _class: a string
- domains: a dictionary
Each entry in the `domains` dictionary is a domain. The key is
the domain name, and the value is a dictionary with a `_class`
entry.
The `'_'` domain is the default domain.
"""
ensure_nonemptystring('project_url')
result = self._get_json(
join_url(project_url, 'credentials'), params={'depth': '1'}
)
return result['stores'] # type: ignore
@api_call
def create_project_domain(
self, project_url: str, config: str, system: bool = False
) -> None:
"""Create new domain for project.
# Required parameters
- project_url: a non-empty string
- config: a non-empty 'XML' string
# Optional parameters
- system: a boolean (False by default)
Non-system domains are located in the 'folder' store. System
domains are located in the 'system' store.
# Returned value
None.
# Raised exceptions
An _ApiError_ exception is raised if the domain already exists.
"""
ensure_nonemptystring('project_url')
ensure_instance('config', str)
ensure_instance('system', bool)
store = 'system' if system else 'folder'
data = config.strip()
result = self._post(
join_url(project_url, f'/credentials/store/{store}/createDomain'),
data=bytes(data, encoding='utf-8'),
headers={'Content-Type': 'application/xml'},
)
return result # type: ignore
@api_call
def list_domain_credentials(self, domain_url: str) -> List[Dict[str, Any]]:
"""Return the list of credentials for domain.
# Required parameters
- domain_url: a non-empty string
# Returned value
A list of _credentials_. A credential is a dictionary with the
following entries:
- id: a string, the credential id
- displayName: a string
- typeName: a string
- description: a string
- fingerprint: ?
- fullname: a string
Credentials as returned by this function contain no sensible
items (passwords, secret files, private keys, ...)
"""
ensure_nonemptystring('domain_url')
result = self._get_json(domain_url, params={'depth': '2'})
return result['credentials'] # type: ignore
@api_call
def get_domain_credential(
self, domain_url: str, credential_id: str
) -> str:
"""Get credential configuration.
# Required parameters
- domain_url: a non-empty string
- credential_id: a non-empty string
# Returned value
A string, the XML configuration element. Refer to
#create_domain_credential() for more details on this value.
Secrets in the returned value, if any, will be redacted.
"""
ensure_nonemptystring('domain_url')
ensure_nonemptystring('credential_id')
return (
self.session()
.get(
join_url(domain_url, f'credential/{credential_id}/config.xml')
)
.text
)
@api_call
def create_domain_credential(self, domain_url: str, config: str) -> None:
"""Create a credential in a domain.
The domain must already exists.
It may be used to add credentials to a credentials folder.
# Required parameters
- domain_url: a non-empty string
- config: an 'XML' string
`domain_url` is something like:
```
'https://jenkins.example.com/FOO/job/Bar/credentials/store/folder/domain/Test'
```
`config` is something like:
```xml
<?xml version="1.0"?>
<com.cloudbees.plugins.credentials.impl.UsernamePasswordCredentialsImpl
plugin="[email protected]">
<id>ML</id>
<description>my creds</description>
<username>me</username>
<password>
foobar
</password>
</com.cloudbees.plugins.credentials.impl.UsernamePasswordCredentialsImpl>
```
TODO: check password validity (spaces striped? ...)
# Returned value
None.
# Raised exceptions
An _ApiError_ exception is raised if the credentials already
exists.
"""
ensure_nonemptystring('domain_url')
ensure_instance('config', str)
result = self._post(
join_url(domain_url, 'createCredentials'),
data=bytes(config.strip(), encoding='utf-8'),
headers={'Content-Type': 'application/xml'},
)
return result # type: ignore
@api_call
def delete_domain_credential(
self, domain_url: str, credential_id: str
) -> bool:
"""Delete a credential in a domain.
# Required parameters
- domain_url: a non-empty string
- credential_id: a non-empty string
# Returned value
A boolean. True if successful.
"""
ensure_nonemptystring('domain_url')
ensure_nonemptystring('credential_id')
return self.delete_item(
join_url(domain_url, f'credential/{credential_id}')
)
@api_call
def add_group_user(self, group_url: str, user: str) -> None:
"""Add user to group.
# Required parameters
- group_url: a non-empty string
- user: a non-empty string
May not be the cleanest way to do it (using a form action).
"""
ensure_nonemptystring('group_url')
ensure_nonemptystring('user')
self._post(join_url(group_url, 'submitNewUser'), data={'user': user})
@api_call
def delete_group_user(self, group_url: str, user: str) -> None:
"""Delete user from group.
# Requirement parameters
- group_url: a non-empty string
- user: a non-empty string
May not be the cleanest way to do it (using a form action).
"""
ensure_nonemptystring('group_url')
ensure_nonemptystring('user')
self._post(
join_url(group_url, 'submitRemoveMember'),
params={'member': user, 'type': 'USER'},
)
@api_call
def migrate_group_member_as_user(self, group_url: str, user: str) -> None:
"""Migrate member of group as user
# Requirement parameters
- group_url: a non-empty string
- user: a non-empty string
"""
ensure_nonemptystring('group_url')
ensure_nonemptystring('user')
self._post(
join_url(group_url, 'submitMigrateMember'),
params={'member': user, 'as': 'user'},
)
####################################################################
# cbj items
#
# get_item_configuration (/config.xml)
# update_item_configuration (post config.xml)
# delete_item (/doDelete)
# TODO rename_item (/doRename)
# TODO disable_item (/disable)
# TODO enable_item (/enable)
@api_call
def get_item_configuration(self, url: str) -> Dict[str, Any]:
"""Return an 'XML' dictionary containing the item configuration.
# Required parameters
- url: a non-empty string, the item url
# Returned value
A dictionary that follows the 'XML' dictionary conventions.
"""
ensure_nonemptystring('url')
return xml_to_dict(
ET.XML(self.session().get(join_url(url, 'config.xml')).text)
)
@api_call
def update_item_configuration(
self, url: str, config: Dict[str, Any]
) -> None:
"""Update item configuration.
# Required parameters
- url: a non-empty string, the item url
- config: an 'XML' dictionary
"""
ensure_nonemptystring('url')
ensure_instance('config', dict)
result = self._post(
join_url(url, 'config.xml'),
data=bytes(dict_to_xml(config), encoding='utf-8'),
)
return result # type: ignore
@api_call
def delete_item(
self,
url: str,
params: Optional[Mapping[str, Union[str, List[str], None]]] = None,
) -> bool:
"""Delete item.
# Required parameters
- url: a non-empty string
# Returned value
A boolean. True if successful.
# Raised exceptions
Raises an _ApiError_ exception otherwise.
"""
ensure_nonemptystring('url')
result = self._post(join_url(url, 'doDelete'), params=params)
return result.status_code == 200
####################################################################
# cbj helpers
def _get(
self,
api_url: str,
params: Optional[Mapping[str, Union[str, List[str], None]]] = None,
) -> Dict[str, Any]:
"""Returns cloudbeesjenkins api call results.
# Required parameters
- api_url: a non-empty string (an URL)
# Optional parameters
- params: a dictionary
"""
return self.session().get(api_url, params=params)
def _get_json(
self,
api_url: str,
params: Optional[Mapping[str, Union[str, List[str], None]]] = None,
) -> Dict[str, Any]:
"""Returns cloudbeesjenkins api call results.
# Required parameters
- api_url: a non-empty string (an URL)
# Optional parameters
- params: a dictionary
"""
result = (
self.session()
.get(join_url(api_url, 'api/json'), params=params)
.json()
)
return result # type: ignore
def _post(
self,
api_url: str,
data: Optional[Union[MutableMapping[str, str], bytes]] = None,
headers: Optional[Mapping[str, str]] = None,
params: Optional[Mapping[str, Union[str, List[str], None]]] = None,
files: Optional[Mapping[str, Any]] = None,
) -> requests.Response:
master = self._extract_master(api_url)
if self.crumb is None or self.crumb_master != master:
self.crumb_master = master
self._initialize_crumb_data()
_headers = dict(headers or {})
if self.crumb is not None:
_headers[self.crumb['crumbRequestField']] = self.crumb['crumb']
result = self.session().post(
api_url, data=data, headers=_headers, params=params, files=files
)
if result.status_code == 403:
print('DEBUG: 403 403 403 403 403')
print('DEBUG: crumb:', self.crumb, '.')
print('DEBUG: crumb_master:', self.crumb_master, '.')
print('DEBUG: master:', master, '.')
print('DEBUG: api_url:', api_url, '.')
print('DEBUG: headers:', headers, '.')
return result
# crumbs helpers
def _extract_master(self, url: str) -> str:
# the operations center does not support crumbs. we have to
# get a crumb from the "right" managed instance
# ?? url = re.match('https://[^/]+/(\w+).*', url).group(1)
master = url[len(self.url) + 1 :]
master = master[: master.find('/')]
return master
def _initialize_crumb_data(self) -> None:
try:
if self.crumb_master is None:
raise ApiError('crumb_master is None.')
self.crumb = (
self.session()
.get(
join_url(
join_url(self.url, self.crumb_master),
'crumbIssuer/api/json',
),
)
.json()
)
except Exception:
self.crumb = None | zabel-elements | /zabel_elements-1.15.0-py3-none-any.whl/zabel/elements/clients/base/jenkins.py | jenkins.py |
from typing import Any, Dict, List, Mapping, Optional, Tuple, Union
import requests
from zabel.commons.exceptions import ApiError
from zabel.commons.sessions import prepare_session
from zabel.commons.utils import (
add_if_specified,
api_call,
ensure_in,
ensure_instance,
ensure_nonemptystring,
ensure_noneorinstance,
ensure_noneornonemptystring,
ensure_onlyone,
join_url,
)
########################################################################
########################################################################
# Helpers
def _get_atl_token(html: str) -> str:
atl_token = html[html.find('"atl_token"') :]
atl_token = atl_token[atl_token.find('value="') + 7 :]
return atl_token[: atl_token.find('"')]
# Confluence Jenkins low-level api
CONTENT_TYPES = ['page', 'blogpost', 'comment', 'attachment']
CONTENT_STATUSES = ['current', 'trashed', 'historical', 'draft']
class BearerAuth(requests.auth.AuthBase):
"""A Bearer handler class for requests."""
def __init__(self, pat: str):
self.pat = pat
def __eq__(self, other):
return self.pat == getattr(other, 'pat', None)
def __ne__(self, other):
return not self == other
def __call__(self, r):
r.headers['Authorization'] = f'Bearer {self.pat}'
return r
class Confluence:
"""Confluence Low-Level Wrapper.
# Reference URL
<https://docs.atlassian.com/ConfluenceServer/rest/latest>
<https://developer.atlassian.com/confdev/confluence-server-rest-api>
<https://docs.atlassian.com/atlassian-confluence/REST/latest-server/>
<https://developer.atlassian.com/server/confluence/remote-confluence
-methods>
A non-admin interface (no API for user&group admin features) to
Confluence.
Groups and users are defined on Jira or Crowd. Changes can take up
to one hour to propagate.
# Implemented features
- search
- groups&users
- pages
- spaces
What is accessible through the API depends on account rights.
Whenever applicable, the provided features handle pagination (i.e.,
they return all relevant elements, not only the first n).
# Sample use
```python
>>> from zabel.elements.clients import Confluence
>>>
>>> url = 'https://confluence.example.com'
>>> confluence = Confluence(url, basic_auth=(user, token))
>>> confluence.get_users()
```
"""
def __init__(
self,
url: str,
basic_auth: Optional[Tuple[str, str]] = None,
oauth: Optional[Dict[str, str]] = None,
bearer_auth: Optional[str] = None,
verify: bool = True,
) -> None:
"""Create a Confluence instance object.
You can only specify either `basic_auth`, `bearer_auth`, or
`oauth`.
The `oauth` dictionary is expected to have the following
entries:
- access_token: a string
- access_token_secret: a string
- consumer_key: a string
- key_cert: a string
Please note that the `bearer_auth` support does not give access
to JSON-RPC methods.
# Required parameters
- url: a non-empty string
- basic_auth: a string tuple (user, token)
- bearer_auth: a string
- oauth: a dictionary
# Optional parameters
- verify: a boolean (True by default)
`verify` can be set to False if disabling certificate checks for
Confluence communication is required. Tons of warnings will
occur if this is set to False.
"""
ensure_nonemptystring('url')
ensure_onlyone('basic_auth', 'oauth', 'bearer_auth')
ensure_noneorinstance('basic_auth', tuple)
ensure_noneorinstance('oauth', dict)
ensure_noneorinstance('bearer_auth', str)
ensure_instance('verify', bool)
self.url = url
self.basic_auth = basic_auth
self.oauth = oauth
self.bearer_auth = bearer_auth
self.verify = verify
if basic_auth is not None:
self.auth = basic_auth
if oauth is not None:
from requests_oauthlib import OAuth1
from oauthlib.oauth1 import SIGNATURE_RSA
self.auth = OAuth1(
oauth['consumer_key'],
'dont_care',
oauth['access_token'],
oauth['access_token_secret'],
signature_method=SIGNATURE_RSA,
rsa_key=oauth['key_cert'],
signature_type='auth_header',
)
if bearer_auth is not None:
self.auth = BearerAuth(bearer_auth)
self.session = prepare_session(self.auth, verify=verify)
def __str__(self) -> str:
return '{self.__class__.__name__}: {self.url}'
def __repr__(self) -> str:
if self.basic_auth:
rep = self.basic_auth[0]
elif self.bearer_auth:
rep = f'***{self.bearer_auth[-6:]}'
else:
rep = self.oauth['consumer_key'] # type: ignore
return f'<{self.__class__.__name__}: {self.url!r}, {rep!r}>'
####################################################################
# Confluence search
#
# search
@api_call
def search(
self,
cql: str,
cql_context: Optional[str] = None,
start: Optional[int] = None,
limit: int = 25,
expand: Optional[str] = None,
) -> List[Dict[str, Any]]:
"""Return the result of a query.
# Required parameters
- cql: a string
# Optional parameters
- cql_context: a string or None (None by default)
- start: an integer or None (None by default)
- limit: an integer (`25` by default)
- expand: a string or None (None by default)
# Returned value
A possibly empty list of items.
Items are dictionaries with the following entries (assuming the
default `expand` values):
- title: a string
- type: a string
- id: a string or an integer
- status: a string
- restrictions: a dictionary
# Raised exceptions
If the query is invalid, an _ApiError_ is raised.
"""
ensure_instance('cql', str)
ensure_noneorinstance('cql_context', str)
ensure_noneorinstance('start', int)
ensure_noneorinstance('limit', int)
ensure_noneorinstance('expand', str)
params = {'cql': cql, 'limit': str(limit)}
add_if_specified(params, 'start', start)
add_if_specified(params, 'cqlContext', cql_context)
add_if_specified(params, 'expand', expand)
return self._collect_data('content/search', params=params)
####################################################################
# Confluence groups&users
#
# list_groups
# create_group*
# delete_group*
# add_group_user*
# remove_group_user*
# list_group_members
# get_user
# create_user*
# delete_user*
# update_user*
# update_user2*
# list_user_groups
# get_user_current
# deactivate_user
#
# '*' denotes an API based on json-rpc, deprecated but not (yet?)
# available as a REST API. It is not part of the method name.
@api_call
def list_groups(self) -> List[Dict[str, Any]]:
"""Return a list of confluence groups.
# Returned value
A list of _groups_. Each group is a dictionary with the
following entries:
- name: a string
- type: a string ('group')
- _links: a transient dictionary
`_links` is a dictionary with the following entries:
- self: a string
Handles pagination (i.e., it returns all groups, not only the
first n groups).
"""
return self._collect_data('group')
@api_call
def create_group(self, group_name: str) -> bool:
"""Create a new group.
!!! warning
This uses the json-rpc interface that is deprecated (but
there is no substitute as of this writing).
`group_name` must be in lower case.
# Required parameters
- group_name: a non-empty string
# Returned value
A boolean. True if successful, False otherwise.
"""
ensure_nonemptystring('group_name')
return (
self.session()
.post(
join_url(
self.url, '/rpc/json-rpc/confluenceservice-v2/addGroup'
),
json=[group_name],
)
.text
== 'true'
)
@api_call
def delete_group(self, group_name: str) -> bool:
"""Delete group.
!!! warning
This uses the json-rpc interface that is deprecated (but
there is no substitute as of this writing).
# Required parameters
- group_name: a non-empty string
# Returned value
A boolean. True if successful, False otherwise.
"""
ensure_nonemptystring('group_name')
return (
self.session()
.post(
join_url(
self.url, '/rpc/json-rpc/confluenceservice-v2/removeGroup'
),
json=[group_name, None],
)
.text
== 'true'
)
@api_call
def add_group_user(self, group_name: str, user_name: str) -> bool:
"""Add user to group.
!!! warning
This uses the json-rpc interface that is deprecated (but
there is no substitute as of this writing).
# Required parameters
- group_name: a non-empty string
- user_name: a non-empty string
# Returned value
A boolean. True if successful, False if the operation failed.
"""
ensure_nonemptystring('group_name')
ensure_nonemptystring('user_name')
return (
self.session()
.post(
join_url(
self.url,
'/rpc/json-rpc/confluenceservice-v2/addUserToGroup',
),
json=[user_name, group_name],
)
.text
== 'true'
)
@api_call
def remove_group_user(self, group_name: str, user_name: str) -> bool:
"""Add user to group.
!!! warning
This uses the json-rpc interface that is deprecated (but
there is no substitute as of this writing).
# Required parameters
- group_name: a non-empty string
- user_name: a non-empty string
# Returned value
A boolean. True if successful, False if the operation failed.
"""
ensure_nonemptystring('group_name')
ensure_nonemptystring('user_name')
return (
self.session()
.post(
join_url(
self.url,
'/rpc/json-rpc/confluenceservice-v2/removeUserFromGroup',
),
json=[user_name, group_name],
)
.text
== 'true'
)
@api_call
def list_group_members(
self, group_name: str, expand: Optional[str] = None
) -> List[Dict[str, Any]]:
"""Return a list of users in the group.
# Required parameters
- group_name: a string
# Returned value
A list of _users_. Each user is a dictionary with the following
entries:
- username: a string
- displayName: a string
- userKey: a string
- profilePicture: a dictionary
- type: a string (`'known'`)
Handles pagination (i.e., it returns all group members, not only
the first n users).
"""
ensure_nonemptystring('group_name')
params = {}
add_if_specified(params, 'expand', expand)
ensure_noneorinstance('expand', str)
return self._collect_data(f'group/{group_name}/member', params=params)
@api_call
def get_user(
self,
user_name: Optional[str] = None,
key: Optional[str] = None,
expand: Optional[str] = None,
) -> Dict[str, Any]:
"""Return confluence user details.
# Required parameters
- `user_name` or `key`: a non-empty string
You can only specify one of them.
# Optional parameters
- expand: a string or None (None by default)
# Returned value
A dictionary with the following entries (assuming the default
for 'expand'):
type: a string
username: a string
userKey: a string
profilePicture: a dictionary
displayName: a string
It may also contains 'transient' entries (i.e., entries starting
with '_').
"""
ensure_onlyone('user_name', 'key')
ensure_noneornonemptystring('user_name')
ensure_noneornonemptystring('key')
ensure_noneorinstance('expand', str)
if user_name is not None:
params = {'username': user_name}
else:
params = {'key': key} # type: ignore
add_if_specified(params, 'expand', expand)
result = self._get('user', params=params)
return result # type: ignore
@api_call
def create_user(
self,
name: str,
password: Optional[str],
email_address: str,
display_name: str,
) -> bool:
"""Create a new user.
!!! warning
This uses the json-rpc interface that is deprecated (but
there is no substitute as of this writing).
`name` must be in lower case.
# Required parameters
- name: a non-empty string
- password: a non-empty string or None
- email_address: a non-empty string
- display_name: a string
# Returned value
True if the creation was successful, False otherwise.
"""
ensure_nonemptystring('name')
ensure_noneornonemptystring('password')
ensure_nonemptystring('email_address')
ensure_instance('display_name', str)
user = {'email': email_address, "fullname": display_name, "name": name}
return (
self.session()
.post(
join_url(
self.url, '/rpc/json-rpc/confluenceservice-v2/addUser'
),
json=[user, password],
)
.text
== ''
)
@api_call
def delete_user(self, user_name: str) -> bool:
"""Delete user.
!!! warning
This uses the json-rpc interface that is deprecated (but
there is no substitute as of this writing).
# Required parameters
- user_name: a non-empty string
# Returned value
True if the creation was successful, False otherwise.
"""
ensure_nonemptystring('user_name')
return (
self.session()
.post(
join_url(
self.url, '/rpc/json-rpc/confluenceservice-v2/removeUser'
),
json=[user_name],
)
.text
== 'true'
)
@api_call
def update_user(
self, user_name: str, user: Dict[str, Any]
) -> Dict[str, Any]:
"""Update user.
!!! warning
This uses the json-rpc interface that is deprecated (but
there is no substitute as of this writing).
`user` is a dictionary with possible replacement values for
those two entries: `fullname` and `email`. Other entries are
ignored.
# Required parameters
- user_name: a non-empty string
- user: a dictionary
# Returned value
True if the update was successful, False otherwise.
"""
ensure_nonemptystring('user_name')
ensure_instance('user', dict)
request = {'name': user_name}
if 'fullname' in user:
request['fullname'] = user['fullname']
if 'email' in user:
request['email'] = user['email']
return (
self.session()
.post(
join_url(
self.url, '/rpc/json-rpc/confluenceservice-v2/editUser'
),
json=[request],
)
.text
== 'true'
)
@api_call
def update_user2(self, user_name: str, new: str) -> bool:
"""Update username.
# Required parameters
- user_name: a non-empty string
- new: a non-empty string
# Returned value
True if the update was successful, False otherwise.
"""
ensure_nonemptystring('user_name')
ensure_nonemptystring('new')
user = self.get_user(user_name)
form = self.session().get(
join_url(self.url, '/admin/users/edituser.action'),
params={'username': user_name},
)
atl_token = form.text.split('atl_token=')[1].split('&')[0]
user_key = form.text.split(';userKey=')[1].split('"')[0]
email = (
form.text.split('id="email"')[1].split('value="')[1].split('"')[0]
)
result = self.session().post(
self.url + '/admin/users/doedituser.action',
params={'atl_token': atl_token, 'userKey': user_key},
data={
'username': new,
'email': email,
'fullName': user['displayName'],
'confirm': 'Submit',
},
cookies=form.cookies,
headers={
'Content-Type': 'application/x-www-form-urlencoded',
'X-Atlassian-Token': 'no-check',
},
)
return result.status_code == 200
@api_call
def deactivate_user(self, user_name) -> bool:
"""Deactivate confluence user.
!!! warning
This uses the json-rpc interface that is deprecated (but
there is no substitute as of this writing).
# Required parameters
- `user_name`: a non-empty string
# Returned value
True if the deactivation was successful, False otherwise.
"""
ensure_nonemptystring('user_name')
return (
self.session()
.post(
join_url(
self.url,
'/rpc/json-rpc/confluenceservice-v2/deactivateUser',
),
json=[user_name],
)
.text
== 'true'
)
@api_call
def list_user_groups(
self,
user_name: Optional[str] = None,
key: Optional[str] = None,
expand: Optional[str] = None,
) -> List[Dict[str, Any]]:
"""Return a list of groups user is a member of.
# Required parameters
- `user_name` or `key`: a non-empty string
You can only specify one of them.
# Optional parameters
- expand: a string or None (None by default)
# Returned value
A list of _groups_. Groups are dictionaries with the following
entries (assuming the default for `expand`):
- type: a string (`'group'`)
- name: a string
Handles pagination (i.e., it returns all groups, not only the
first _n_ groups the user is a member of).
"""
ensure_onlyone('user_name', 'key')
ensure_noneornonemptystring('user_name')
ensure_noneornonemptystring('key')
ensure_noneorinstance('expand', str)
if user_name is not None:
params = {'username': user_name}
else:
params = {'key': key} # type: ignore
add_if_specified(params, 'expand', expand)
return self._collect_data('user/memberof', params=params)
@api_call
def get_user_current(self, expand: Optional[str] = None) -> Dict[str, Any]:
"""Return confluence current user details.
# Optional parameters
- expand: a string or None (None by default)
# Returned value
A dictionary with the following entries (assuming the default
for `expand`):
- type: a string
- username: a string
- userKey: a string
- profilePicture: a dictionary
- displayName: a string
It may also contains 'transient' entries (i.e., entries starting
with '_').
"""
ensure_noneorinstance('expand', str)
params: Dict[str, str] = {}
add_if_specified(params, 'expand', expand)
result = self._get('user/current', params=params)
return result # type: ignore
####################################################################
# Confluence spaces
#
# list_spaces
# get_space
# get_space_content
# list_space_pages
# list_space_blogposts
# list_space_permissions*
# list_space_permissionsets*
# create_space
# add_space_label*
# remove_space_permission*
# add_space_permissions*
#
# '*' denotes an API based on json-rpc, deprecated but not (yet?)
# available as a REST API. It is not part of the method name.
@api_call
def list_spaces(self) -> List[Dict[str, Any]]:
"""Return a list of spaces.
# Returned value
A list of _spaces_. Each space is a dictionary with the
following entries:
- key: a string
- type: a string
- name: a string
- id: an integer
- _links: a dictionary
- _expandable: a dictionary
Handles pagination (i.e., it returns all spaces, not only the
first _n_ spaces).
"""
return self._collect_data('space')
@api_call
def get_space(
self, space_key: str, expand: Optional[str] = None
) -> Dict[str, Any]:
"""Return space details.
# Required parameters
- space_key: a non-empty string
# Optional parameters
- expand: a string or None (None by default)
# Returned value
A dictionary with the following entries:
- id: an integer
- key: a string
- name: a string
- type: a string
"""
ensure_nonemptystring('space_key')
ensure_noneorinstance('expand', str)
if expand is not None:
params: Optional[Dict[str, str]] = {'expand': expand}
else:
params = None
result = self._get(f'space/{space_key}', params=params)
return result # type: ignore
@api_call
def get_space_content(
self, space_key: str, expand: Optional[str] = None, depth: str = 'all'
) -> Dict[str, Any]:
"""Return space content.
# Required parameters
- space_key: a non-empty string
# Optional parameters
- expand: a string or None (None by default)
- depth: a string (`'all'` by default)
# Returned value
A dictionary with the following entries:
- page: a dictionary
- blogpost: a dictionary
- _links: a dictionary
`page` and `blogpost` are dictionaries with the following
entries:
- results: a list of dictionaries
- size: an integer
- limit: an integer
- start: an integer
- _links: a dictionary
`_links` is a dictionary with the following entries:
- context: a string
- base: a string (an URL)
"""
ensure_nonemptystring('space_key')
ensure_noneorinstance('expand', str)
ensure_in('depth', ['all', 'root'])
params = {'depth': depth}
add_if_specified(params, 'expand', expand)
return self._get(f'space/{space_key}/content') # type: ignore
@api_call
def list_space_pages(
self, space_key: str, expand: Optional[str] = None, limit: int = 200
) -> List[Dict[str, Any]]:
"""Return a list of all space pages.
# Required parameters
- space_key: a non-empty string
# Optional parameters
- expand: a string or None (None by default)
- limit: an integer
# Returned value
A list of _pages_. Each page is a dictionary. Refer to
#get_page() for details on its content.
"""
ensure_nonemptystring('space_key')
ensure_noneornonemptystring('expand')
params = {'limit': limit}
add_if_specified(params, 'expand', expand)
return self._collect_data(
f'space/{space_key}/content/page',
params,
)
@api_call
def list_space_blogposts(
self, space_key: str, expand: Optional[str] = None
) -> List[Dict[str, Any]]:
"""Return a list of all space blog posts.
# Required parameters
- space_key: a non-empty string
# Optional parameters
- expand: a string or None (None by default)
# Returned value
A list of _blog posts_. Each blog post is a dictionary.
"""
ensure_nonemptystring('space_key')
ensure_noneornonemptystring('expand')
return self._collect_data(
f'space/{space_key}/content/blogpost',
{'expand': expand} if expand else None,
)
@api_call
def list_space_permissions(self) -> List[str]:
"""Return the list of all possible permissions for spaces.
!!! warning
This uses the json-rpc interface that is deprecated (but
there is no substitute as of this writing).
# Returned value
A list of strings.
"""
return list(
set(
self.session()
.post(
join_url(
self.url,
'/rpc/json-rpc/confluenceservice-v2/getSpaceLevelPermissions',
),
json=[],
)
.json()
)
)
@api_call
def list_space_permissionsets(
self, space_key: str
) -> List[Dict[str, Any]]:
"""Return a list of all permissionsets for space.
!!! warning
This uses the json-rpc interface that is deprecated (but
there is no substitute as of this writing).
# Required parameters
- space_key: a non-empty string
# Returned value
A list of _permissionsets_. Each permissionset is a dictionary
with the following entries:
- type: a string
- spacePermissions: a list of dictionaries
`type` is a space permission (as returned by
#list_space_permissions()).
Dictionaries in `spacePermissions` have the following entries:
- type: a string
- groupName: a string
- userName: a string
"""
ensure_nonemptystring('space_key')
result = self.session().post(
join_url(
self.url,
'/rpc/json-rpc/confluenceservice-v2/getSpacePermissionSets',
),
json=[space_key],
)
return result # type: ignore
@api_call
def create_space(
self,
space_key: str,
name: str,
description: Optional[str] = None,
public: bool = True,
) -> Dict[str, Any]:
"""Create a new public space.
# Required parameters
- space_key: a non-empty string
- name: a non-empty string
# Optional parameters
- description: a non-empty string or None (None by default)
- public: a boolean (True by default)
# Returned value
A dictionary with the following entries:
- id: an integer
- key: a string
- name: a string
- description: a dictionary
- metadata: a dictionary
- _links: a dictionary
Some entries may be missing, and there may be additional ones.
"""
ensure_nonemptystring('space_key')
ensure_nonemptystring('name')
ensure_noneornonemptystring('description')
ensure_instance('public', bool)
definition: Dict[str, Any] = {'key': space_key, 'name': name}
if description:
definition['description'] = {
'plain': {'value': description, 'representation': 'plain'}
}
result = self._post(
'space' if public else 'space/_private', definition
)
return result # type: ignore
@api_call
def add_space_label(self, space_key: str, label: str) -> bool:
"""Add label to space.
!!! warning
This uses the json-rpc interface that is deprecated (but
there is no substitute as of this writing).
# Required parameters
- space_key: a non-empty string
- label: a string
# Returned value
True if successful.
"""
ensure_nonemptystring('space_key')
ensure_nonemptystring('label')
result = self.session().post(
join_url(
self.url,
'/rpc/json-rpc/confluenceservice-v2/addLabelByNameToSpace',
),
json=[f'team:{label}', space_key],
)
return result # type: ignore
@api_call
def remove_space_permission(
self, space_key: str, entity: str, permission: str
) -> bool:
"""Remove permission from space.
!!! warning
This uses the json-rpc interface that is deprecated (but
there is no substitute as of this writing).
The permission is removed from the existing entity
permissions. (It is not an error to remove a given permission
from an entity multiple times.)
An entity is either a group or a user. It must be known and
visible to Confluence.
# Required parameters
space_key: a non-empty string
entity: a non-empty string
permission: a non-empty string
# Returned value
True if successful.
"""
ensure_nonemptystring('space_key')
ensure_nonemptystring('entity')
ensure_nonemptystring('permission')
result = self.session().post(
join_url(
self.url,
'/rpc/json-rpc/confluenceservice-v2/removePermissionFromSpace',
),
json=[permission, entity, space_key],
)
return result # type: ignore
@api_call
def add_space_permissions(
self, space_key: str, entity: str, permissions: List[str]
) -> bool:
"""Add permissions to space.
!!! warning
This uses the json-rpc interface that is deprecated (but
there is no substitute as of this writing).
An `entity` is either a group or a user. It must be known and
visible to Confluence.
The permissions are added to the existing entity permissions.
(It is not an error to add a given permission to an entity
multiple times.)
# Required parameters
- space_key: a non-empty string
- entity: a non-empty string
- permissions: a list of strings
# Returned value
True if successful.
"""
ensure_nonemptystring('space_key')
ensure_nonemptystring('entity')
ensure_instance('permissions', list)
result = self.session().post(
join_url(
self.url,
'/rpc/json-rpc/confluenceservice-v2/addPermissionsToSpace',
),
json=[permissions, entity, space_key],
)
return result # type: ignore
####################################################################
# Confluence pages
#
# search_pages
# get_page
# create_page
# update_page
# delete_page
# list_page_versions
# delete_page_version
# list_page_labels
# add_page_labels
# list_page_children
# list_page_attachments
# add_page_attachment
# list_page_restrictions
# set_page_restrictions
@api_call
def search_pages(
self,
space_key: str,
status: str = 'current',
typ: str = 'page',
title: Optional[str] = None,
expand: Optional[str] = None,
start: Optional[int] = None,
posting_day: Optional[str] = None,
limit: int = 25,
) -> List[Dict[str, Any]]:
"""Return a list of contents.
# Required parameters
- space_key: a string
# Optional parameters
- status: a string (`'current'` by default)
- typ: a string (`'page'` by default)
- title: a string or None (None by default)
- expand: a string or None (None by default)
- start: an integer or None (None by default)
- posting_day: a string or None (None by default). **Required
if `typ` = `'blogpost'`.**
- limit: an integer (`25` by default)
# Returned value
A possibly empty list of items. Items are dictionaries.
Assuming the default `expand` values, an item contains the
following entries:
- title: a string
- type: a string
- id: an integer or a string
- status: a string
- extensions: a dictionary
"""
ensure_instance('space_key', str)
ensure_in('status', ['current', 'any', 'trashed'])
ensure_in('typ', ['page', 'blogpost'])
if typ == 'page':
ensure_nonemptystring('title')
if typ == 'blogpost':
ensure_nonemptystring('posting_day')
params = {'spaceKey': space_key, 'limit': str(limit), 'status': status}
add_if_specified(params, 'type', typ)
add_if_specified(params, 'title', title)
add_if_specified(params, 'expand', expand)
add_if_specified(params, 'postingDay', posting_day)
add_if_specified(params, 'start', start)
return self._collect_data('content', params=params)
@api_call
def list_page_children(
self,
page_id: Union[str, int],
typ: str,
expand: Optional[str] = None,
start: Optional[int] = None,
limit: int = 25,
parent_version: int = 0,
) -> List[Dict[str, Any]]:
"""Return a list of contents.
Valid values for `typ` are those in `CONTENT_TYPES`.
# Required parameters
- page_id: an integer or a string
- typ: a string
# Optional parameters
- expand: a string or None (None by default)
- start: an integer or None (None by default)
- limit: an integer (`25` by default)
- parent_version: an integer (`0` by default)
# Returned value
A possibly empty list of items. Items are dictionaries.
Assuming the default `expand` values, an item contains the
following entries:
- title: a string
- type: a string
- id: an integer or a string
- status: a string
- extensions: a dictionary
"""
ensure_instance('page_id', (str, int))
ensure_in('typ', CONTENT_TYPES)
ensure_noneornonemptystring('expand')
ensure_noneorinstance('start', int)
ensure_instance('limit', int)
ensure_instance('parent_version', int)
api = f'content/{page_id}/child'
if typ is not None:
api += f'/{typ}'
params = {'limit': str(limit), 'parentVersion': str(parent_version)}
add_if_specified(params, 'expand', expand)
add_if_specified(params, 'start', start)
return self._collect_data(api, params=params)
@api_call
def get_page(
self,
page_id: Union[str, int],
expand: str = 'body.storage,version',
version: Optional[int] = None,
) -> Dict[str, Any]:
"""Return the definition of a page.
# Required parameters
- page_id: an integer or a string
# Optional parameters
- expand: a string (`'body.storage,version'` by default)
- version: an integer or None (None by default)
# Returned value
A dictionary with the following entries (assuming the default
for `expand`):
- type: a string
- title: a string
- id: a string
- version: a dictionary
- body: a dictionary
`version` is a dictionary with the following entries:
- by: a dictionary
- number: an integer
- minorEdit: a boolean
- when: a string (a timestamp)
- message: a string
- hidden: a boolean
`by` is a dictionary with the following entries:
- type: a string
- username: a string
- userkey: a string
- displayName: a string
`body` is a dictionary with the following entries:
- storage: a dictionary
`storage` is a dictionary with the following entries:
- representation: a string
- value: a string
`value` is the HTML text of the page.
"""
ensure_instance('page_id', (str, int))
ensure_instance('expand', str)
params = {'expand': expand}
add_if_specified(params, 'version', version)
result = self._get(f'content/{page_id}', params=params)
return result # type: ignore
@api_call
def create_page(
self,
space_key: str,
title: str,
body: Optional[Dict[str, Any]] = None,
ancestors: Optional[List[Dict[str, Any]]] = None,
typ: str = 'page',
status: str = 'current',
) -> Dict[str, Any]:
"""Create a new page.
The `body` dictionary, if provided, is a standard Confluence
body specification. Please refer to #get_page() for more.
The `ancestors` list of dictionaries, if provided, is a standard
Confluence ancestors specification, with just the id, such as:
```python
[
{
'id': '1234'
}
]
```
Valid values for `typ` are those in `CONTENT_TYPES`.
Valid values for `status` are those in `CONTENT_STATUSES`.
# Required parameters
- space_key: a non-empty string
- title: a non-empty string
# Optional parameters
- body: a dictionary or None (None by default)
- ancestors: a list of dictionaries or None (None by default)
- typ: a string (`'page'` by default)
- status: a string (`'current'` by default)
# Returned value
A dictionary with the following entries:
- ancestors: a list of dictionaries
- body: a dictionary
- container: a dictionary
- extensions: a dictionary
- history: a dictionary
- id: an integer or a string
- space: a dictionary
- status: a string
- title: a string
- type: a string
- version: a dictionary
It may also contain standard _meta_ entries, such as `_links`
or `_expandable`.
Refer to #get_page() for more details on common entries in this
dictionary.
"""
ensure_nonemptystring('space_key')
ensure_nonemptystring('title')
ensure_noneorinstance('body', dict)
ensure_noneorinstance('ancestors', list)
ensure_in('typ', CONTENT_TYPES)
ensure_in('status', CONTENT_STATUSES)
definition = {
'space': {'key': space_key},
'title': title,
'type': typ,
'status': status,
}
add_if_specified(definition, 'body', body)
add_if_specified(definition, 'ancestors', ancestors)
result = self._post('content', json=definition)
return result # type: ignore
@api_call
def delete_page(self, page_id: Union[str, int]) -> bool:
"""Delete a page.
# Required parameters
- page_id: an integer or a string
# Returned value
A boolean. True if deletion was successful.
"""
ensure_instance('page_id', (str, int))
result = self.session().delete(
join_url(self.url, f'rest/api/content/{page_id}')
)
return result.status_code // 100 == 2
@api_call
def list_page_versions(
self, page_id: Union[str, int]
) -> List[Dict[str, Any]]:
"""Return all versions of a page
# Required parameters
- page_id: an integer or a string
# Returned value
A possibly empty list of versions. Versions are dictionaries.
An version contains the following entries:
- by: a dictionary
- when: a datetime as a string
- message: a string
- number: an integer
- minorEdit: a boolean
- hidden: a boolean
- links: a dictionary
- expandable: a dictionary
"""
ensure_instance('page_id', (str, int))
api_url = join_url(
self.url, f'rest/experimental/content/{page_id}/version'
)
collected: List[Any] = []
more = True
while more:
response = self.session().get(api_url)
if response.status_code // 100 != 2:
raise ApiError(response.text)
try:
workload = response.json()
collected += workload['results']
except Exception as exception:
raise ApiError(exception)
more = 'next' in workload['_links']
if more:
api_url = join_url(
workload['_links']['base'], workload['_links']['next']
)
return collected
@api_call
def delete_page_version(
self, page_id: Union[str, int], version: int
) -> None:
"""Delete a page version.
# Required parameters
- page_id: an integer or a string
- version: an integer
"""
ensure_instance('page_id', (str, int))
ensure_instance('version', int)
result = self.session().delete(
join_url(
self.url,
f'rest/experimental/content/{page_id}/version/{version}',
)
)
return result # type: ignore
@api_call
def update_page(
self, page_id: Union[str, int], page: Dict[str, Any]
) -> Dict[str, Any]:
"""Update an existing page.
This method should not be used to create a page. It is only
intended to update an existing one.
The typical usage is:
```python
>>> page = confluence.get_page(n)
>>> page['body']['storage']['value'] = '....'
>>> page['version'] = {'number': page['version']['number']+1}
>>> confluence.update_page(n, page)
```
See #get_page() for a description of the `page` dictionary.
# Required parameters
- page_id: an integer or a string
- page: a dictionary
# Returned value
A dictionary. Refer to #create_page() for more information.
"""
ensure_instance('page_id', (str, int))
ensure_instance('page', dict)
result = self._put(f'content/{page_id}', json=page)
return result # type: ignore
@api_call
def add_page_labels(
self, page_id: Union[str, int], labels: List[Mapping[str, str]]
) -> List[Dict[str, Any]]:
"""Add labels to page.
!!! warning
It only returns the first 200 labels by default. Use
#list_page_labels() if you want the complete list of labels
attached to a page.
# Required parameters
- page_id: an integer or a string
- labels: a non-empty list of dictionaries
Dictionaries in `labels` have the following entries:
- name: a string
- prefix: a string (`'global'`, ...)
Labels in the list are added to the page. Existing labels are
not removed if they are not in the list.
# Returned value
A list of _labels_, one per label attached to the page. Each
label is a dictionary with the following entries:
- id: an integer or a string
- name: a string
- prefix: a string
"""
ensure_instance('page_id', (str, int))
ensure_instance('labels', list)
if not labels:
raise ValueError('labels must not be empty.')
response = self._post(f'content/{page_id}/label', json=labels).json()
return response['results'] # type: ignore
@api_call
def list_page_labels(
self, page_id: Union[str, int]
) -> List[Dict[str, Any]]:
"""Get labels attached to page.
# Required parameters
- page_id: an integer or a string
# Returned value
A list of _labels_. Each label is a dictionary with the
following entries:
- id: an integer or a string
- name: a string
- prefix: a string
"""
ensure_instance('page_id', (str, int))
return self._collect_data(f'content/{page_id}/label')
@api_call
def list_page_attachments(
self, page_id: Union[str, int]
) -> List[Dict[str, Any]]:
"""Get attachments attached to page.
# Required parameters
- page_id: an integer or a string
# Returned value
A list of _labels_. Each label is a dictionary with the
following entries:
- id: an integer or a string
- name: a string
- prefix: a string
"""
ensure_instance('page_id', (str, int))
return self._collect_data(f'content/{page_id}/child/attachment')
@api_call
def add_page_attachment(
self,
page_id: Union[str, int],
filename: str,
comment: Optional[str] = None,
) -> Dict[str, Any]:
"""Add attachment to page.
# Required parameters
- page_id: an integer or a string
- filename: a string
# Optional parameters
- comment: a non-empty string or None (None by default)
# Returned value
A dictionary. Refer to #create_page() for more information.
"""
ensure_instance('page_id', (str, int))
ensure_nonemptystring('filename')
ensure_noneornonemptystring('comment')
with open(filename, 'rb') as f:
files = {'file': (filename, f.read())}
if comment:
data = {'comment': comment}
else:
data = None
api_url = join_url(
self.url, f'rest/api/content/{page_id}/child/attachment'
)
response = self.session().post(
api_url,
files=files,
data=data,
headers={'X-Atlassian-Token': 'nocheck'},
)
return response # type: ignore
@api_call
def update_page_attachment_data(
self,
page_id: Union[str, int],
attachment_id: Union[str, int],
filename: str,
comment: Optional[str] = None,
minor_edit: bool = True,
) -> Dict[str, Any]:
"""Update attachment content.
# Required parameters
- page_id: an integer or a string
- attachment_id: an integer or a string
- filename: a string
# Optional parameters
- comment: a non-empty string or None (None by default)
- minor_edit: a boolean (True by default)
# Returned value
A dictionary. Refer to #create_page() for more information.
"""
ensure_instance('page_id', (str, int))
ensure_instance('attachment_id', (str, int))
ensure_nonemptystring('filename')
ensure_noneornonemptystring('comment')
ensure_instance('minor_edit', bool)
with open(filename, 'rb') as f:
files = {'file': (filename, f.read())}
data = {'minorEdit': minor_edit}
if comment:
data['comment'] = comment
api_url = join_url(
self.url,
f'rest/api/content/{page_id}/child/attachment/{attachment_id}/data',
)
response = self.session().post(
api_url,
files=files,
data=data,
headers={'X-Atlassian-Token': 'nocheck'},
)
return response # type: ignore
@api_call
def list_page_restrictions(
self, page_id: Union[str, int]
) -> List[Dict[str, Any]]:
"""Returns the list of access restrictions on a given page.
# Required parameters
- `page_id` : integer or string
# Returned value
A list of _restrictions_ . Restrictions are structured as follow :
- `type`: str, either "Edit" or "View"
- `contentPermissions`: a dictionary structured as follow
* `type`: str, either "Edit" or "View"
* `userName`: str, or None if groupName is set,
* `groupName`: str, or None if userName is set
```
# See
<https://developer.atlassian.com/server/confluence/remote-confluence-methods/#permissions>
"""
ensure_instance('page_id', (str, int))
response = self.session().post(
join_url(
self.url,
'/rpc/json-rpc/confluenceservice-v2/getContentPermissionSets',
),
json=[page_id],
)
return response # type: ignore
@api_call
def set_page_restrictions(
self,
page_id: Union[str, int],
permission_type: str,
restrictions: List[Dict[str, Any]],
) -> bool:
"""
Will set the restrictions on a page given its id. The permission_type is either 'View' or 'Edit'.
# Required parameters
- `page_id`: integer or string
- `permission_type`: str, either "View" or "Edit"
- `restrictions`: a list of dictionaries structured as follow :
* `type`: string, either "Edit", "View" or None.
If set, must be consistent with `permission_type`.
If None, will inherit `permission_type`.
* `userName`: str, or None if `groupName` is set
* `groupName`: str, or None if `userName` is set
# Example
These rules means that this invocation :
```python
self.set_page_restrictions('page_id', 'Edit', [{'userName': 'bob'}, {'groupName': 'ATeam'}])
```
Is equivalent to the fully formed data as expected by the json-rpc API :
```python
self.set_page_restrictions(
'page_id',
'Edit',
[{'type': 'Edit', 'userName': 'bob', 'groupName': None},
{'type': 'Edit', 'userName': None, 'groupName': 'ATeam'}]
)
```
# Behavior rules
You may have noticed that permissions 'View' and 'Edit' are managed separately, but they need to be thought of together
when designing restrictions schemes. The default behavior when no permissions are set are the following:
- when no restrictions is set for type 'View' -> anyone can view the page.
- when no restrictions is set for type 'Edit' -> anyone can edit the page.
So if you want to absolutely restrict access to a particular user or group, be user to specify both 'View' and 'Edit'
restrictions (setting restrictions on 'Edit' only won't necessarily imply that 'View' restrictions will be set as well).
As a result you will often have to call this method twice in a row.
# See
<https://developer.atlassian.com/server/confluence/remote-confluence-methods/#permissions>
:param page_id:
:param permission_type:
:param restrictions:
:return:
"""
ensure_instance('page_id', (str, int))
ensure_in('permission_type', ('Edit', 'View'))
ensure_instance('restrictions', list)
sane_restrictions = self._sanitize_restrictions(
permission_type=permission_type, restrictions=restrictions
)
return (
self.session()
.post(
join_url(
self.url,
'/rpc/json-rpc/confluenceservice-v2/setContentPermissions',
),
json=[page_id, permission_type, sane_restrictions],
)
.text
== 'true'
)
####################################################################
# confluence helpers
def _get(
self,
api: str,
params: Optional[Mapping[str, Union[str, List[str], None]]] = None,
) -> requests.Response:
"""Return confluence GET api call results."""
api_url = join_url(join_url(self.url, 'rest/api'), api)
return self.session().get(api_url, params=params)
def _collect_data(
self,
api: str,
params: Optional[Mapping[str, Union[str, List[str], None]]] = None,
) -> List[Any]:
"""Return confluence GET api call results, collected."""
api_url = join_url(join_url(self.url, 'rest/api'), api)
collected: List[Any] = []
more = True
while more:
response = self.session().get(api_url, params=params)
if response.status_code // 100 != 2:
raise ApiError(response.text)
try:
workload = response.json()
collected += workload['results']
except Exception as exception:
raise ApiError(exception)
more = 'next' in workload['_links']
if more:
api_url = join_url(
workload['_links']['base'], workload['_links']['next']
)
params = {}
return collected
def _put(
self, api: str, json: Optional[Mapping[str, Any]] = None
) -> requests.Response:
"""Return confluence PUT api call results."""
api_url = join_url(join_url(self.url, 'rest/api'), api)
return self.session().put(api_url, json=json)
def _post(
self, api: str, json: Union[Mapping[str, Any], List[Mapping[str, Any]]]
) -> requests.Response:
"""Return confluence POST api call results."""
api_url = join_url(join_url(self.url, 'rest/api'), api)
return self.session().post(api_url, json=json)
def _sanitize_restrictions(
self, permission_type: str, restrictions: List[Dict[str, Any]]
) -> List[Dict[str, Any]]:
"""
Ensure that the _restrictions_ arguments for page restrictions are sanefor usage, see self#set_page_restrictions()
"""
for restriction in restrictions:
restriction.setdefault('type', permission_type)
if restriction['type'] != permission_type:
raise ValueError(
f"field 'type' is inconsistent with 'permission_type'. Got : 'permission_type'={permission_type}, restriction={restriction} "
)
has_user = restriction.setdefault('userName', None)
has_group = restriction.setdefault('groupName', None)
if has_group == has_user:
raise ValueError(
f"Confluence page restriction must have exactly one of : 'userName', 'groupName'. Got : {restriction} "
)
return restrictions | zabel-elements | /zabel_elements-1.15.0-py3-none-any.whl/zabel/elements/clients/base/confluence.py | confluence.py |
from typing import (
Any,
Dict,
Iterable,
List,
Mapping,
MutableMapping,
Optional,
Tuple,
Union,
)
import requests
from zabel.commons.exceptions import ApiError
from zabel.commons.sessions import prepare_session
from zabel.commons.utils import (
add_if_specified,
api_call,
ensure_in,
ensure_instance,
ensure_nonemptystring,
ensure_noneorinstance,
ensure_noneornonemptystring,
join_url,
)
########################################################################
########################################################################
PACKAGE_TYPES = [
'bower',
'chef',
'cocoapods', # new
'composer',
'conan',
'conda', # really new (not on API documents :( )
'cran', # new
'debian',
'docker',
'gems',
'generic',
'gitlfs',
'go', # new
'gradle',
'helm', # new
'ivy',
'maven',
'npm',
'nuget',
'opkg', # new
'puppet',
'pypi',
'rpm', # new
'sbt',
'vagrant',
'yum',
]
INCOMPATIBLE_PARAM = '%s cannot be specified when json is provided'
# Artifactory low-level api
class Artifactory:
"""Artifactory Base-Level Wrapper.
# Reference URLs
<https://www.jfrog.com/confluence/display/RTF/Artifactory+REST+API>
<https://www.jfrog.com/confluence/display/XRAY2X/Xray+REST+API>
# Implemented features
- users
- groups
- repositories
- permission
- storageinfo
- token
- ping
- xray indexing
# Sample use
```python
>>> from zabel.elements.clients import Artifactory
>>>
>>> url = 'https://artifactory.example.com/artifactory/api/'
>>> af = Artifactory(url, user, token)
>>> af.list_users()
```
"""
def __init__(
self,
url: str,
user: str,
token: str,
xray_url: Optional[str] = None,
verify: bool = True,
) -> None:
"""Create an Artifactory instance object.
# Required parameters
- url: a non-empty string
- user: a string
- token: a string
`url` is the top-level API endpoint. For example,
`'https://artifactory.example.com/artifactory/api/'`
# Optional parameters
- xray_url: a string or None (None by default)
- verify: a boolean (True by default)
`xray_url`, if specified, is the top-level jfrog-xray API
endpoint. If not specified, will be as `url` with the
'artifactory/api' ending replaced by `xray/api`
`verify` can be set to False if disabling certificate checks for
Artifactory communication is required. Tons of warnings will
occur if this is set to False.
"""
ensure_nonemptystring('url')
ensure_instance('user', str)
ensure_instance('token', str)
self.url = url
if xray_url is None:
xray_url = url.strip('/').split('/')
xray_url[-2] = 'xray'
xray_url = '/'.join(xray_url)
self.url_xray = xray_url
self.auth = (user, token)
self.verify = verify
self.session = prepare_session(self.auth, verify=verify)
def __str__(self) -> str:
return f'{self.__class__.__name__}: {self.url}'
def __repr__(self) -> str:
return f'<{self.__class__.__name__}: {self.url!r}, {self.auth[0]!r}>'
####################################################################
# artifactory builds
#
# list_builds
@api_call
def list_builds(self) -> List[Dict[str, Any]]:
"""Return the builds list.
# Returned value
A list of _builds_. Each build is a dictionary with the
following entries:
- time: an integer (a timestamp)
- lastBuildTime: a string (a timestamp)
- userCanDistribute: a boolean
- buildNumber: a string
- buildName: a string
"""
return self._get('builds') # type: ignore
####################################################################
# artifactory users
#
# list_users
# get_user
# create_or_replace_user
# update_user
# delete_user
# get_encryptedpassword
# get_apikey
# create_apikey
# revoke_apikey
@api_call
def list_users(self) -> List[Dict[str, Any]]:
"""Return the users list.
# Returned value
A list of _users_. Each user is a dictionary with the following
entries:
- name: a string
- realm: a string
- uri: a string
"""
return self._get('security/users') # type: ignore
@api_call
def get_user(self, user_name: str) -> Dict[str, Any]:
"""Return user details.
# Required parameters
- user_name: a non-empty string
# Returned value
A dictionary with the following entries:
- admin: a boolean
- disableUIAccess: a boolean
- email: a string
- groups: a list of strings
- lastLoggedInMillis: an integer
- ?lastLoggedIn: a string representing a date
- name: a string
- offlineMode: a boolean
- profileUpdatable: a boolean
- realm: a string
"""
ensure_nonemptystring('user_name')
return self._get(f'security/users/{user_name}') # type: ignore
@api_call
def create_or_replace_user(
self,
name: str,
email: str,
password: str,
admin: bool = False,
profile_updatable: bool = True,
disable_ui_access: bool = True,
internal_password_disabled: bool = False,
groups: Optional[List[str]] = None,
) -> None:
"""Create or replace a user.
!!! important
If the user already exists, it will be replaced and
unspecified parameters will have their default values. Use
#update_user() if you want to change a parameter of an
existing user while keeping the other parameters values.
# Required parameters
- name: a non-empty string
- email: a non-empty string
- password: a non-empty string
# Optional parameters
- admin: a boolean (False by default)
- profile_updatable: a boolean (True by default)
- disable_ui_access: a boolean (True by default)
- internal_password_disabled: a boolean (False by default)
- groups: a list of strings or None (None by default)
# Returned value
None.
"""
ensure_nonemptystring('name')
ensure_nonemptystring('email')
ensure_nonemptystring('password')
ensure_instance('admin', bool)
ensure_instance('profile_updatable', bool)
ensure_instance('disable_ui_access', bool)
ensure_instance('internal_password_disabled', bool)
ensure_noneorinstance('groups', list)
data = {
'name': name,
'email': email,
'password': password,
'admin': admin,
'profileUpdatable': profile_updatable,
'disableUIAccess': disable_ui_access,
'internalPasswordDisabled': internal_password_disabled,
}
add_if_specified(data, 'groups', groups)
result = self._put(f'security/users/{name}', json=data)
return result # type: ignore
@api_call
def update_user(
self,
name: str,
email: Optional[str] = None,
password: Optional[str] = None,
admin: Optional[bool] = None,
profile_updatable: Optional[bool] = None,
disable_ui_access: Optional[bool] = None,
internal_password_disabled: Optional[bool] = None,
groups: Optional[Iterable[str]] = None,
) -> None:
"""Update an existing user.
# Required parameters
- name: a non-empty string
# Optional parameters
- email: a non-empty string or None (None by default)
- password: a non-empty string or None (None by default)
- admin: a boolean or None (None by default)
- profile_updatable: a boolean or None (None by default)
- disable_ui_access: a boolean or None (None by default)
- internal_password_disabled: a boolean or None (None by
default)
- groups: a list of strings or None (None by default)
If an optional parameter is not specified, or is None, its
existing value will be preserved.
# Returned value
None.
"""
ensure_nonemptystring('name')
if (
email is None
and password is None
and admin is None
and profile_updatable is None
and disable_ui_access is None
and internal_password_disabled is None
and groups is None
):
raise ValueError(
'At least one parameter must be specified in '
'addition to the user name'
)
ensure_noneornonemptystring('email')
ensure_noneornonemptystring('password')
ensure_noneorinstance('admin', bool)
ensure_noneorinstance('profile_updatable', bool)
ensure_noneorinstance('disable_ui_access', bool)
ensure_noneorinstance('internal_password_disabled', bool)
ensure_noneorinstance('groups', list)
_user = self.get_user(name)
if admin is None:
admin = _user['admin']
if profile_updatable is None:
profile_updatable = _user['profileUpdatable']
if disable_ui_access is None:
disable_ui_access = _user['disableUIAccess']
if internal_password_disabled is None:
internal_password_disabled = _user['internalPasswordDisabled']
if groups is None:
groups = _user['groups'] if 'groups' in _user else None
data = {'name': name}
add_if_specified(data, 'email', email)
add_if_specified(data, 'password', password)
add_if_specified(data, 'admin', admin)
add_if_specified(data, 'profileUpdatable', profile_updatable)
add_if_specified(data, 'disableUIAccess', disable_ui_access)
add_if_specified(
data, 'internalPasswordDisabled', internal_password_disabled
)
add_if_specified(data, 'groups', groups)
result = self._post(f'security/users/{name}', json=data)
return result # type: ignore
@api_call
def delete_user(self, user_name: str) -> bool:
"""Delete user.
# Required parameters
- user_name: a non-empty string
# Returned value
A boolean. True if successful.
"""
ensure_nonemptystring('user_name')
return self._delete(f'security/users/{user_name}').status_code == 200
@api_call
def create_apikey(self, auth: Optional[Tuple[str, str]] = None) -> str:
"""Generate the user API key.
If `auth` is not specified, generate the current user API key.
# Optional parameters
- auth: a (string, string) tuple or None (None by default)
# Returned value
A string, the new API key.
# Raised exceptions
If the API key already exists, an _ApiError_ exception is
raised.
"""
result = self._post2('security/apiKey', auth=auth or self.auth).json()
if 'apiKey' not in result:
raise ApiError('Error while creating apiKey, already exists?')
return result['apiKey'] # type: ignore
@api_call
def get_apikey(
self, auth: Optional[Tuple[str, str]] = None
) -> Optional[str]:
"""Return the user API key.
If `auth` is not specified, return the current user API key.
# Optional parameters
- auth: a (string, string) tuple or None (None by default)
# Returned value
A string, the API key, or None, if no API key has been created
yet.
"""
result = (
self._get2('security/apiKey', auth=auth or self.auth)
.json()
.get('apiKey')
)
return result # type: ignore
@api_call
def revoke_apikey(self, auth: Optional[Tuple[str, str]] = None) -> None:
"""Revoke the user API key.
If `auth` is not specified, revoke the current user API key.
If no API key has been created, does nothing.
# Optional parameters
- auth: a (string, string) tuple or None (None by default)
# Return value
None.
# Raised exceptions
If the specified credentials are invalid, raises an _ApiError_
exception.
"""
result = self._delete2('security/apiKey', auth=auth or self.auth)
if 'errors' in result.json():
raise ApiError('Errors while revoking apiKey, bad credentials?')
@api_call
def get_encryptedpassword(
self, auth: Optional[Tuple[str, str]] = None
) -> str:
"""Return the user encrypted password.
If `auth` is not specified, return the current user encrypted
password.
# Optional parameters
- auth: a (string, string) tuple or None (None by default)
# Return value
A string.
"""
return self._get2(
'security/encryptedPassword', auth=auth or self.auth
).text
####################################################################
# artifactory groups
#
# list_groups
# get_group
# create_or_replace_group
# update_group
# delete_group
@api_call
def list_groups(self) -> List[Dict[str, Any]]:
"""Return the groups list.
# Returned value
A list of _groups_. Each group is a dictionary with the
following entries:
- name: a string
- uri: a string
"""
return self._get('security/groups') # type: ignore
@api_call
def get_group(self, group_name: str) -> Dict[str, Any]:
"""Return group details.
# Required parameters
- group_name: a non-empty string
# Returned value
A dictionary with the following entries:
- name: a string
- description: a string
- autoJoin: a boolean
- adminPrivileges: a string
- realm: a string
"""
ensure_nonemptystring('group_name')
return self._get(f'security/groups/{group_name}') # type: ignore
@api_call
def create_or_replace_group(
self,
name: str,
description: Optional[str] = None,
auto_join: bool = False,
admin_priviledge: bool = False,
realm: Optional[str] = None,
realm_attributes: Optional[str] = None,
) -> None:
"""Create or replace a group.
!!! important
If the group already exists, it will be replaced and
unspecified parameters will have their default values. Use
#update_group() if you want to change a parameter of an
existing group while keeping the other parameters values.
# Required parameters
- name: a non-empty string
# Optional parameters
- description: a non-empty string or None (None by default)
- auto_join: a boolean (False by default)
- admin_priviledge: a boolean (False by default)
- realm: a non-empty string or None (None by default)
- realm_attributes: a non-empty string or None (None by default)
# Returned value
None.
"""
ensure_nonemptystring('name')
if admin_priviledge and auto_join:
raise ValueError(
'auto_join cannot be True if admin_priviledge is True'
)
ensure_noneornonemptystring('description')
ensure_instance('auto_join', bool)
ensure_instance('admin_priviledge', bool)
# ?? is '' an allowed value for realm or realm_attributes?
ensure_noneornonemptystring('realm')
ensure_noneornonemptystring('realm_attributes')
data = {
'name': name,
'description': description,
'autoJoin': auto_join,
'adminPrivileges': admin_priviledge,
'realm': realm,
'realmAttributes': realm_attributes,
}
result = self._put(f'security/groups/{name}', json=data)
return result # type: ignore
@api_call
def update_group(
self,
name: str,
description: Optional[str] = None,
auto_join: Optional[bool] = None,
admin_priviledge: Optional[bool] = None,
realm: Optional[str] = None,
realm_attributes: Optional[str] = None,
) -> None:
"""Update an existing group.
# Required parameters
- name: a non-empty string
# Optional parameters
- description: a non-empty string or None (None by default)
- auto_join: a boolean or None (None by default)
- admin_priviledge: a boolean or None (None by default)
- realm: a non-empty string or None (None by default)
- realm_attributes: a non-empty string or None (None by default)
If an optional parameter is not specified, or is None, its
existing value will be preserved.
# Returned value
None.
"""
ensure_nonemptystring('name')
if (
admin_priviledge is not None
and admin_priviledge
and auto_join is not None
and auto_join
):
raise ValueError(
'auto_join cannot be True if admin_priviledge is True'
)
ensure_noneornonemptystring('description')
ensure_noneorinstance('auto_join', bool)
ensure_noneorinstance('admin_priviledge', bool)
# ?? is '' an allowed value for realm or realm_attributes?
ensure_noneornonemptystring('realm')
ensure_noneornonemptystring('realm_attributes')
_group = self.get_group(name)
if admin_priviledge is None:
admin_priviledge = _group['adminPrivileges']
if auto_join is None:
auto_join = _group['autoJoin']
data = {'name': name}
add_if_specified(data, 'adminPrivileges', admin_priviledge)
add_if_specified(data, 'autoJoin', auto_join)
add_if_specified(data, 'description', description)
add_if_specified(data, 'realm', realm)
add_if_specified(data, 'realmAttributes', realm_attributes)
result = self._post(f'security/groups/{name}', json=data)
return result # type: ignore
@api_call
def delete_group(self, group_name: str) -> bool:
"""Delete group_name from Artifactory.
Deleting a group automatically remove the specified group for
users.
# Required parameters
- group_name: a non-empty string
# Returned value
A boolean. True if successful.
"""
ensure_nonemptystring('group_name')
return self._delete(f'security/groups/{group_name}').status_code == 200
####################################################################
# artifactory repositories
#
# list_repositories
# get_repository
# create_repository
# update_repository
# delete_repository
@api_call
def list_repositories(self) -> List[Dict[str, Any]]:
"""Return the repositories list.
# Returned value
A list of _repositories_. Each repository is a dictionary with
the following entries:
- description: a string
- key: a string
- type: a string
- url: a string
"""
return self._get('repositories') # type: ignore
@api_call
def get_repository(self, repository_name: str) -> Dict[str, Any]:
"""Return the repository details.
# Required parameters
- repository_name: a non-empty string
# Returned value
A dictionary with the following entries:
- archiveBrowsingEnabled: a boolean
- blackedOut: a boolean
- blockXrayUnscannedArtifacts: a boolean
- calculateYumMetadata: a boolean
- checksumPolicyType: a string
- debianTrivialLayout: a boolean
- description: a string
- dockerApiVersion: a string
- enableBowerSupport: a boolean
- enableCocoaPodsSupport: a boolean
- enableComposerSupport: a boolean
- enableConanSupport: a boolean
- enableDebianSupport: a boolean
- enableDistRepoSupport: a boolean
- enableDockerSupport: a boolean
- enableGemsSupport: a boolean
- enableGitLfsSupport: a boolean
- enableNpmSupportenableNuGetSupport: a boolean
- enablePuppetSupport: a boolean
- enablePypiSupport: a boolean
- enableVagrantSupport: a boolean
- enabledChefSupport: a boolean
- excludesPattern: a string
- forceNugetAuthentication: a boolean
- handleReleases: a boolean
- handleSnapshots: a boolean
- includesPattern: a string
- key: a string
- maxUniqueSnapshots: an integer
- maxUniqueTags: an integer
- notes: a string
- packageType: a string
- propertySets: a list
- rclass: a string
- repoLayoutRef: a string
- snapshotVersionBehavior: a string
- suppressPomConsistencyChecks: a boolean
- xrayIndex: a boolean
- xrayMinimumBlockedSeverity: a string
- yumRootDepth: an integer
"""
ensure_nonemptystring('repository_name')
return self._get(f'repositories/{repository_name}') # type: ignore
@api_call
def create_repository(
self,
name: str,
rclass: Optional[str] = None,
package_type: Optional[str] = None,
url: Optional[str] = None,
description: Optional[str] = None,
notes: Optional[str] = None,
includes_pattern: Optional[str] = None,
excludes_pattern: Optional[str] = None,
repositories: Optional[List[str]] = None,
json: Optional[Dict[str, Any]] = None,
pos: Optional[int] = None,
default_deployment_repo: Optional[str] = None,
) -> None:
"""Create a repository.
# Required parameters
- name: a non-empty string
- json: a dictionary (if `rclass` and `package_type` are not
specified)
- rclass: a string (if `json` is not specified)
- package_type: a string (if `json` is not specified)
# Optional parameters
- pos: an integer or None (None by default)
If `json` is not specified:
- url: a string
- description: a string
- notes: a string
- includes_pattern: a string
- excludes_pattern: a string
- repositories: a list of strings
- default_deployment_repo: a string (optional, for virtual
repositories only)
A position may be specified using the `pos` parameter. If the
map size is shorter than `pos` the repository is the last one
(default).
Provides a minimal direct interface. In order to fully qualify
a repository, use the `json` parameter.
Legend: `+` = required entry, `-` = optional entry.
JSON for a local repository:
```json
{
- "key": "local-repo1",
+ "rclass" : "local",
+ "packageType": "maven" | "gradle" | "ivy" | "sbt" | "nuget"
| "gems" | "npm" | "bower" | "debian"
| "composer" | "pypi" | "docker" | "vagrant"
| "gitlfs" | "yum" | "conan" | "chef"
| "puppet" | "generic",
- "description": "The local repository public description",
- "notes": "Some internal notes",
- "includesPattern": "**/*" (default),
- "excludesPattern": "" (default),
- "repoLayoutRef" : "maven-2-default",
- "debianTrivialLayout" : false,
- "checksumPolicyType": "client-checksums" (default)
| "server-generated-checksums",
- "handleReleases": true (default),
- "handleSnapshots": true (default),
- "maxUniqueSnapshots": 0 (default),
- "maxUniqueTags": 0 (default),
- "snapshotVersionBehavior": "unique" | "non-unique" (default)
| "deployer",
- "suppressPomConsistencyChecks": false (default),
- "blackedOut": false (default),
- "propertySets": ["ps1", "ps2"],
- "archiveBrowsingEnabled" : false,
- "calculateYumMetadata" : false,
- "yumRootDepth" : 0,
- "dockerApiVersion" : "V2" (default),
- "enableFileListsIndexing " : "false" (default)
}
```
JSON for a remote repository:
```json
{
- "key": "remote-repo1",
+ "rclass" : "remote",
+ "packageType": "maven" | "gradle" | "ivy" | "sbt" | "nuget"
| "gems" | "npm" | "bower" | "debian"
| "pypi" | "docker" | "yum" | "vcs"
| "composer" | "p2" | "chef" | "puppet"
| "generic",
+ "url" : "http://host:port/some-repo",
- "username": "remote-repo-user",
- "password": "pass",
- "proxy": "proxy1",
- "description": "The remote repository public description",
- "notes": "Some internal notes",
- "includesPattern": "**/*" (default),
- "excludesPattern": "" (default),
- "repoLayoutRef" : "maven-2-default",
- "remoteRepoChecksumPolicyType":
"generate-if-absent" (default)
| "fail"
| "ignore-and-generate"
| "pass-thru",
- "handleReleases": true (default),
- "handleSnapshots": true (default),
- "maxUniqueSnapshots": 0 (default),
- "suppressPomConsistencyChecks": false (default),
- "hardFail": false (default),
- "offline": false (default),
- "blackedOut": false (default),
- "storeArtifactsLocally": true (default),
- "socketTimeoutMillis": 15000 (default),
- "localAddress": "212.150.139.167",
- "retrievalCachePeriodSecs": 43200 (default),
- "failedRetrievalCachePeriodSecs": 30 (default),
- "missedRetrievalCachePeriodSecs": 7200 (default),
- "unusedArtifactsCleanupEnabled": false (default),
- "unusedArtifactsCleanupPeriodHours": 0 (default),
- "assumedOfflinePeriodSecs" : 300 (default),
- "fetchJarsEagerly": false (default),
- "fetchSourcesEagerly": false (default),
- "shareConfiguration": false (default),
- "synchronizeProperties": false (default),
- "blockMismatchingMimeTypes" : true (default),
- "propertySets": ["ps1", "ps2"],
- "allowAnyHostAuth": false (default),
- "enableCookieManagement": false (default),
- "bowerRegistryUrl": "https://bower.herokuapp.com" (default),
- "vcsType": "GIT" (default),
- "vcsGitProvider": "GITHUB" (default) | "BITBUCKET" | "STASH"
| "ARTIFACTORY" | "CUSTOM",
- "vcsGitDownloadUrl": "" (default),
- "clientTlsCertificate": "" (default)
}
```
JSON for a virtual repository:
```json
{
- "key": "virtual-repo1",
+ "rclass" : "virtual",
+ "packageType": "maven" | "gradle" | "ivy" | "sbt" | "nuget"
| "gems" | "npm" | "bower" | "pypi"
| "docker" | "p2" | "yum" | "chef" | "puppet"
| "generic",
- "repositories": ["local-rep1", "local-rep2", "remote-rep1",
"virtual-rep2"],
- "description": "The virtual repository public description",
- "notes": "Some internal notes",
- "includesPattern": "**/*" (default),
- "excludesPattern": "" (default),
- "debianTrivialLayout" : false,
- "artifactoryRequestsCanRetrieveRemoteArtifacts": false,
- "keyPair": "keypair1",
- "pomRepositoryReferencesCleanupPolicy":
"discard_active_reference" (default)
| "discard_any_reference"
| "nothing",
- "defaultDeploymentRepo": "local-repo1"
}
```
# Returned value
None if successful.
# Raised exceptions
An _ApiError_ exception is raised if the repository creation
was not successful.
"""
ensure_noneorinstance('pos', int)
api_url = f'repositories/{name}'
if pos is not None:
api_url += f'?pos={pos}'
if json is not None:
if rclass is not None:
raise ValueError(INCOMPATIBLE_PARAM % 'rclass')
if package_type is not None:
raise ValueError(INCOMPATIBLE_PARAM % 'package_type')
if url is not None:
raise ValueError(INCOMPATIBLE_PARAM % 'url')
if description is not None:
raise ValueError(INCOMPATIBLE_PARAM % 'description')
if notes is not None:
raise ValueError(INCOMPATIBLE_PARAM % 'notes')
if includes_pattern is not None:
raise ValueError(INCOMPATIBLE_PARAM % 'includes_pattern')
if excludes_pattern is not None:
raise ValueError(INCOMPATIBLE_PARAM % 'excludes_pattern')
if repositories is not None:
raise ValueError(INCOMPATIBLE_PARAM % 'repositories')
data = json
else:
if rclass is None:
raise ValueError('rclass required if json is not provided')
if package_type is None:
raise ValueError(
'package_type required if json is not provided'
)
ensure_in('rclass', ['local', 'remote', 'virtual'])
ensure_in('package_type', PACKAGE_TYPES)
if rclass == 'remote' and url is None:
raise ValueError('url required for remote repositories')
if rclass != 'virtual':
if repositories is not None:
raise ValueError(
'repositories cannot be specified for '
'non-virtual repositories'
)
if default_deployment_repo is not None:
raise ValueError(
'default deployment repository cannot '
'be specified for non-virtual '
'repositories'
)
data = {'key': name, 'rclass': rclass, 'packageType': package_type}
add_if_specified(data, 'url', url)
add_if_specified(data, 'description', description)
add_if_specified(data, 'notes', notes)
add_if_specified(data, 'includesPattern', includes_pattern)
add_if_specified(data, 'excludesPattern', excludes_pattern)
add_if_specified(data, 'repositories', repositories)
add_if_specified(
data, 'defaultDeploymentRepo', default_deployment_repo
)
result = self._put(api_url, json=data)
return None if result.status_code == 200 else result # type: ignore
@api_call
def update_repository(
self, repository_name: str, json: Dict[str, Any]
) -> None:
"""Update repository repository_name with fields in JSON.
No direct interface for now.
# Required parameters
- repository_name: a non-empty string
- json: a dictionary
# Returned value
None if successful.
# Raised exceptionx
An _ApiError_ exception is raised if the update was not
successful.
"""
ensure_nonemptystring('repository_name')
result = self._post(f'repositories/{repository_name}', json=json)
return None if result.status_code == 200 else result # type: ignore
@api_call
def delete_repository(self, repository_name: str) -> bool:
"""Delete repository repository_name.
# Required parameters
- repository_name: a non-empty string
# Returned value
A boolean. True if successful.
"""
ensure_nonemptystring('repository_name')
return (
self._delete(f'repositories/{repository_name}').status_code == 200
)
####################################################################
# artifactory permission targets
#
# list_permissions
# get_permission
# create_or_replace_permission
# delete_permission
@api_call
def list_permissions(self) -> List[Dict[str, str]]:
"""Return the permission targets list.
# Returned value
A list of _permission targets_. Each permission target is a
dictionary with the following entries:
- name: a string
- uri: a string
"""
return self._get('security/permissions') # type: ignore
@api_call
def get_permission(self, permission_name: str) -> Dict[str, Any]:
"""Return the permission target details.
# Required parameters
- permission_name: a non-empty string
# Returned value
A dictionary with the following entries:
- name: a string
- repositories: a list of strings
- includesPattern: a string
- excludesPattern: a string
- principals: a dictionary
"""
ensure_nonemptystring('permission_name')
return self._get(f'security/permissions/{permission_name}') # type: ignore
@api_call
def create_or_replace_permission(
self,
permission_name: str,
repositories: List[str],
includes_pattern: str = '**/*',
excludes_pattern: str = '',
principals: Optional[Dict[str, Any]] = None,
) -> None:
"""Create or replace permission target.
# Required parameters
- permission_name: a non-empty string
- repositories: a list of strings
# Optional parameters
- includes_pattern: a string (`'**/*'` by default)
- excludes_pattern: a string (`''` by default)
- principals: a dictionary or None (None by default)
`repositories` is a list of repository names.
`includes_pattern` and `excludes_pattern` may contain more than
one pattern, separated by comas.
`principals` is a dictionary or None:
```python
{
"users" : {
"bob": ["r","w","m"],
"alice" : ["d","w","n", "r"]
},
"groups" : {
"dev-leads" : ["m","r","n"],
"readers" : ["r"]
}
}
```
Legend: `m`=admin, `d`=delete, `w`=deploy, `n`=annotate,
`r`=read.
# Returned value
None.
"""
ensure_nonemptystring('permission_name')
data = {
'name': permission_name,
'includesPattern': includes_pattern,
'excludesPattern': excludes_pattern,
'repositories': repositories,
}
add_if_specified(data, 'principals', principals)
result = self._put(
f'security/permissions/{permission_name}', json=data
)
return result # type: ignore
@api_call
def delete_permission(self, permission_name: str) -> bool:
"""Delete permission target.
# Required parameters
- permission_name: a non-empty string
# Returned value
A boolean. True if successful.
"""
ensure_nonemptystring('permission_name')
return (
self._delete(f'security/permissions/{permission_name}').status_code
== 200
)
####################################################################
# artifactory token
#
# create_token
# list_tokens
@api_call
def create_token(
self,
username: str,
scope: Optional[str] = None,
grant_type: str = 'client_credentials',
expires_in: int = 3600,
refreshable: bool = False,
audience: Optional[str] = None,
) -> Dict[str, Any]:
"""Create a new token.
# Required parameters
- username: a string
- scope: a string (only required if `username` does not exists)
# Optional parameters
- grant_type: a string (`'client_credentials'` by default)
- expires_in: an integer (3600 by default)
- refreshable: a boolean (False by default)
- audience: a string or None (None by default)
`expires_in` is in seconds (1 hour by default). Administrators
can set it to 0 so that the token never expires.
TODO: check `username` existence.
# Returned value
A dictionary with the following entries:
- scope: a string
- access_token: a string
- expires_in: an integer
- token_type: a string
"""
ensure_instance('username', str)
ensure_noneorinstance('scope', str)
ensure_instance('grant_type', str)
ensure_instance('expires_in', int)
ensure_instance('refreshable', bool)
ensure_noneorinstance('audience', str)
data = {
'username': username,
'grant_type': grant_type,
'expires_in': str(expires_in),
'refreshable': str(refreshable),
}
add_if_specified(data, 'scope', scope)
add_if_specified(data, 'audience', audience)
result = self._post('security/token', data=data)
return result # type: ignore
@api_call
def list_tokens(self) -> List[Dict[str, Any]]:
"""Return list of tokens.
The returned `subject` contains the token creator ID.
# Returned value
A list of _tokens_. Each token is a dictionary with the
following entries:
- issued_at: an integer (a timestamp)
- issuer: a string
- refreshable: a boolean
- subject: a string
- token_id: a string
"""
return self._get('security/token').json()['tokens'] # type: ignore
####################################################################
# artifactory artefacts information
#
# get_file_info
# get_folder_info
# get_file_properties
# get_file_stats
@api_call
def get_file_info(self, repository_name: str, path: str) -> Dict[str, Any]:
"""Return folder information
For virtual use the virtual repository returns the resolved
file. Supported by local, local-cached and virtual repositories.
# Required parameters
- repository_name: a non-empty string
- path: a non-empty string
# Returned value
A dictionary with the following entries:
- uri: a string
- downloadUri: a string
- repo: a string (same as `repository_name`)
- path: a string (same as `path`)
- remoteUrl: a string
- created: a string (ISO8601, yyyy-MM-dd'T'HH:mm:ss.SSSZ)
- createdBy: a string
- lastModified: a string (ISO8601)
- modifiedBy: a string
- lastUpdated: a string (ISO8601)
- size: a string (in bytes)
- mimeType: a string
- checksums: a dictionary
- originalChecksums: a dictionary
The `checksums` and the `originalChecksums` dictionaries have
the following entries:
- md5: a string
- sha1: a string
- sha256: a string
"""
ensure_nonemptystring('repository_name')
ensure_nonemptystring('path')
return self._get(f'storage/{repository_name}/{path}') # type: ignore
@api_call
def get_folder_info(
self, repository_name: str, path: str
) -> Dict[str, Any]:
"""Return folder information
For virtual use, the virtual repository returns the unified
children. Supported by local, local-cached and virtual
repositories.
# Required parameters
- repository_name: a non-empty string
- path: a non-empty string
# Returned value
A dictionary with the following entries:
- uri: a string
- repo: a string (same as `repository_name`)
- path: a string (same as `path`)
- created: a string (ISO8601, yyyy-MM-dd'T'HH:mm:ss.SSSZ)
- createdBy: a string
- lastModified: a string (ISO8601)
- modifiedBy: a string
- lastUpdated: a string (ISO8601)
- children: a list of dictionaries
Each dictionary in the `children` list has the following
entries:
- uri: a string
- folder: a boolean
"""
ensure_nonemptystring('repository_name')
ensure_nonemptystring('path')
return self._get(f'storage/{repository_name}/{path}') # type: ignore
@api_call
def get_file_properties(
self,
repository_name: str,
path: str,
properties: Optional[List[str]] = None,
) -> Dict[str, Any]:
"""Return file statistics.
Item statistics record the number of times an item was
downloaded, last download date and last downloader. Supported by
local and local-cached repositories.
# Required parameters
- repository_name: a non-empty string
- path: a non-empty string
# Optional parameters
- properties: a list of strings or None (None by default, i.e.,
returns all properties)
# Returned value
A dictionary with the following entries:
- uri: a string
- properties: a dictionary.
The `properties` dictionary has one entry per property. The key
is the property name (a string) and the value is the property
value (property-dependent)
# Raised exception
If no property exists, an _ApiError_ exception is raised.
"""
ensure_nonemptystring('repository_name')
ensure_nonemptystring('path')
props = 'properties'
if properties is not None:
properties += '=' + ','.join(properties)
return self._get(f'storage/{repository_name}/{path}?{props}') # type: ignore
@api_call
def get_file_stats(
self, repository_name: str, path: str
) -> Dict[str, Any]:
"""Return file statistics.
Item statistics record the number of times an item was
downloaded, last download date and last downloader. Supported by
local and local-cached repositories.
# Required parameters
- repository_name: a non-empty string
- path: a non-empty string
# Returned value
A dictionary with the following entries:
- uri: a string
- lastDownloaded: an integer (a timestamp)
- downloadCount: an integer
- lastDownloadedBy: a string
"""
ensure_nonemptystring('repository_name')
ensure_nonemptystring('path')
return self._get(f'storage/{repository_name}/{path}?stats') # type: ignore
####################################################################
# artifactory information
#
# get_storageinfo
# get_version
@api_call
def get_version(self) -> Dict[str, Any]:
"""Return version information.
# Returned value
A dictionary with the following entries:
- version: a string (the currently installed version)
- revision: a string
- addons: a list of strings
- license: a string
"""
return self._get('system/version') # type: ignore
@api_call
def get_storageinfo(self) -> Dict[str, Any]:
"""Return storage information.
# Returned value
A dictionary with the following 4 entries:
- binariesSummary: a dictionary
- fileStoreSummary: a dictionary
- storageSummary: a dictionary
- repositoriesSummaryList: a list of dictionaries
`binariesSummary` has the following entries:
- itemsCount: a string (`'28,348'`)
- optimization: a string (`'80.85%'`)
- artifactsCount: a string (`'15,492'`)
- binariesSize: a string (`'116.97 GB'`)
- binariesCount: a string (`'13,452'`)
- artifactsSize: a string (`'144.68 GB'`)
`fileStoreSummary` has the following entries:
- storageDirectory: a string
(`'/data/artifactory/data/filestore'`)
- usedSpace: a string (`'346.71 GB (70.47%)'`)
- totalSpace: a string (`'492.03 GB'`)
- storageType: a string (`'file-system'`)
- freeSpace: a string (`'145.32 GB (29.53%)'`)
`storageSummary` has the following entries:
- binariesSummary: a dictionary (same as above)
- fileStoreSummary:a dictionary (same as above)
- repositoriesSummaryList: a list of dictionaries (same as
below)
Dictionaries in the `repositoriesSummaryList` list have the
following entries:
- filesCount: an integer
- itemsCount: an integer
- packageType: a string
- usedSpace: a string (`'0 bytes'`)
- foldersCount: an integer
- percentage: a string (`'0%'`)
- repoType: a string (`'VIRTUAL'`, `'LOCAL'`, `'CACHE'`, or
`'NA'`)
- repoKey: a string (`'project-maven-scratch'`)
Two 'virtual' items are added to the `repositoriesSummaryList`
list: the 'auto-trashcan' item and the 'TOTAL' item.
Please note that the 'TOTAL' item has no `packageType` entry
(but the 'auto-trashcan' has one, valued to 'NA').
Those two items have a `repoType` entry valued to 'NA'.
"""
return self._get('storageinfo') # type: ignore
####################################################################
# artifactory health check
#
# ping
@api_call
def ping(self) -> bool:
"""Check if instance is OK.
# Returned value
A boolean. True if Artifactory is working properly.
"""
response = self._get('system/ping')
return response.status_code == 200 and response.text == 'OK'
####################################################################
# jfrog xray indexing
#
# get_reposindexing_configuration
# update_reposindexing_configuration
@api_call
def get_reposindexing_configuration(
self, bin_mgr_id: str = 'default'
) -> Dict[str, Any]:
"""Get indexed and not indexed repositories for binmgr.
# Optional parameters
- bin_mgr_id: a string ('default' by default)
# Returned value
A dictionary with the following entries:
- bin_mgr_id: a string
- indexed_repos: a list of dictionaries
- non_indexed_repos: a list of dictionaries
Entries in the `indexed_repos` and `non_indexed_repositories`
have the following entries:
- name: a string
- type: a string ('local' or 'remote')
- pkg_type: a string
"""
ensure_nonemptystring('bin_mgr_id')
return self._get_xray('/v1/binMgr/{id}/repos'.format(id=bin_mgr_id))
@api_call
def update_reposindexing_configuration(
self,
indexed_repos: List[Dict[str, Any]],
non_indexed_repos: List[Dict[str, Any]],
bin_mgr_id: str = 'default',
) -> Dict[str, Any]:
"""Update indexed and not indexed repositories for binmgr.
# Required parameters
- indexed_repos: a list of dictionaries
- non_indexed_repos: a list of dictionaries
# Optional parameters
- bin_mgr_id: a string ('default' by default)
# Returned value
A status dictionary, with an `info` entry (a string) describing
the operation result.
"""
ensure_instance('indexed_repos', list)
ensure_instance('non_indexed_repos', list)
ensure_nonemptystring('bin_mgr_id')
what = {
'indexed_repos': indexed_repos,
'non_indexed_repos': non_indexed_repos,
}
return self._put_xray(
'/v1/binMgr/{id}/repos'.format(id=bin_mgr_id), json=what
)
####################################################################
# artifactory private helpers
def _get(self, api: str) -> requests.Response:
"""Return artifactory api call results, as Response."""
api_url = join_url(self.url, api)
return self.session().get(api_url)
def _get_xray(self, api: str) -> requests.Response:
"""Return xray api call results, as Response."""
api_url = join_url(self.url_xray, api)
return self.session().get(api_url)
def _get_batch(self, apis: Iterable[str]) -> List[Dict[str, Any]]:
"""Return list of JSON results."""
return [
self.session().get(join_url(self.url, api)).json() for api in apis
]
def _post(
self,
api: str,
json: Optional[Mapping[str, Any]] = None,
data: Optional[Union[MutableMapping[str, str], bytes]] = None,
) -> requests.Response:
api_url = join_url(self.url, api)
return self.session().post(api_url, json=json, data=data)
def _put(self, api: str, json: Dict[str, Any]) -> requests.Response:
api_url = join_url(self.url, api)
return self.session().put(api_url, json=json)
def _put_xray(self, api: str, json: Dict[str, Any]) -> requests.Response:
api_url = join_url(self.url_xray, api)
return self.session().put(api_url, json=json)
def _delete(self, api: str) -> requests.Response:
api_url = join_url(self.url, api)
return self.session().delete(api_url)
# variants with explicit credentials
def _get2(self, api: str, auth: Tuple[str, str]) -> requests.Response:
"""Return artifactory api call results w/ auth."""
api_url = join_url(self.url, api)
return requests.get(api_url, auth=auth)
def _post2(self, api: str, auth: Tuple[str, str]) -> requests.Response:
"""Return artifactory api call results w/ auth."""
api_url = join_url(self.url, api)
return requests.post(api_url, auth=auth)
def _delete2(self, api: str, auth: Tuple[str, str]) -> requests.Response:
"""Return artifactory api call results w/ auth."""
api_url = join_url(self.url, api)
return requests.delete(api_url, auth=auth) | zabel-elements | /zabel_elements-1.15.0-py3-none-any.whl/zabel/elements/clients/base/artifactory.py | artifactory.py |
from typing import Any, Dict, List, Mapping, MutableMapping, Optional, Union
import requests
from zabel.commons.exceptions import ApiError
from zabel.commons.sessions import prepare_session
from zabel.commons.utils import (
add_if_specified,
api_call,
ensure_in,
ensure_instance,
ensure_nonemptystring,
ensure_noneorinstance,
ensure_noneornonemptystring,
ensure_onlyone,
join_url,
)
########################################################################
########################################################################
PROJECT_PERMISSIONS = [
'admin',
'codeviewer',
'issueadmin',
'scan',
'user',
'securityhotspotadmin',
]
GLOBAL_PERMISSIONS = [
'admin',
'gateadmin',
'profileadmin',
'provisioning',
'scan',
'applicationcreator',
'portfoliocreator',
]
QUALIFIERS = {
'APP': 'Applications',
'BRC': 'Sub-projects',
'DIR': 'Directories',
'FIL': 'Files',
'SVW': 'Portfolios',
'TRK': 'Projects',
'UTS': 'Test Files',
'VW': 'Portfolios',
}
EVENT_CATEGORIES = [
'VERSION',
'OTHER',
'QUALITY_PROFILE',
'QUALITY_GATE',
'DEFINITION_CHANGE',
]
# SonarQube low-level api
class SonarQube:
"""SonarQube Base-Level API Wrapper.
- Reference URL: https://docs.sonarqube.org/display/DEV/Web+API
- Web API URL: https://sonar.example.com/sonar/web_api
# Implemented features
- applications (incomplete)
- components (incomplete)
- languages
- permissions
- permissionstemplates
- projectanalyses (incomplete)
- projects (incomplete)
- qualitygates (incomplete)
- qualityprofiles (incomplete)
- tokens
- usergroups
- users
- misc. operations
Some features may be specific to the Enterprise Edition, but as long
as they are not used directly, the library can be used with the
Community edition too.
Tested on SonarQube v7.9.1.
# Conventions
`'_'` are removed from SonarQube entrypoints names, to prevent
confusion.
Getters exhaust results (they return all items matching the query,
there is no need for paging).
`list_xxx` methods take a possibly optional filter argument and
return a list of matching items.
# Sample use
```python
>>> from zabel.elements.clients import SonarQube
>>>
>>> url = 'https://sonar.example.com/sonar/api/'
>>> sq = SonarQube(url, token)
>>> sq.search_users()
```
"""
def __init__(self, url: str, token: str, verify: bool = True) -> None:
"""Create a SonarQube instance object.
If a required operation is not allowed for the specified token,
an _ApiError_ will be raised.
# Required parameters
- url: a non-empty string
- token: a string
The `url` parameter is the top-level API point. E.g.,
`https://sonar.example.com/sonar/api/`
# Optional parameters
- verify: a boolean (True by default)
`verify` can be set to False if disabling certificate checks for
SonarQube communication is required. Tons of warnings will
occur if this is set to False.
"""
ensure_nonemptystring('url')
ensure_instance('token', str)
self.url = url
self.auth = (token, '')
self.verify = verify
self.session = prepare_session(self.auth, verify=verify)
def __str__(self) -> str:
return f'{self.__class__.__name__}: {self.url}'
def __repr__(self) -> str:
return f"<{self.__class__.__name__}: {self.url!r}, '{self.auth[0][:4]}****'>"
####################################################################
# SonarQube applications
#
# create_application
# get_application
@api_call
def create_application(
self,
name: str,
description: Optional[str] = None,
key: Optional[str] = None,
visibility: str = 'private',
) -> Dict[str, Any]:
"""Create a new application.
# Required parameters
- name: a non-empty string
# Optional parameters
- description: a string or None (None by default)
- key: a string or None (None by default)
- visibility: `'public'` or `'private'` (`'private'` by default)
# Returned value
A dictionary with the following entry:
- application: a dictionary
The `application` dictionary has the following entries:
- key: a string
- name: a string
- description: a string
- visibility: a string
- projects: a possibly empty list of dictionaries
"""
ensure_nonemptystring('name')
ensure_noneornonemptystring('description')
ensure_noneornonemptystring('key')
ensure_in('visibility', ['private', 'public'])
data = {'name': name, 'visibility': visibility}
add_if_specified(data, 'key', key)
add_if_specified(data, 'description', description)
result = self._post('applications/create', data)
return result # type: ignore
@api_call
def get_application(
self, key: str, branch: Optional[str] = None
) -> Dict[str, Any]:
"""Return the application details.
# Required parameters
- key: a string
# Optional parameters
- branch: a string or None (None by default)
# Returned value
A dictionary with the following content:
```python
{
"application": {
"key": a string,
"name": a string,
"branch": a string,
"isMain": a boolean,
"projects": [
{
"key": a string,
"name": a string,
"branch": a string,
"isMain": a boolean,
"enabled": a boolean,
"selected": a boolean
},
...
],
"branches": [
{
"name": a string,
"isMain": a boolean
},
...
]
}
}
```
"""
ensure_nonemptystring('key')
ensure_noneornonemptystring('branch')
params = {'application': key}
add_if_specified(params, 'branch', branch)
result = self._get('applications/show', params=params)
return result # type: ignore
####################################################################
# SonarQube components
#
# list_components
@api_call
def list_components(
self, qualifiers: str, language: Optional[str] = None
) -> List[Dict[str, str]]:
"""Return the matching components list.
If `language` is provided, only components for the given
language are returned.
`qualifiers` is a comma-separated list of qualifiers. Each
qualifier must be in `QUALIFIERS`.
# Required parameters
- qualifiers: a non-empty string
# Optional parameters
- language: a non-empty string or None (None by default)
# Returned value
A list of _components_. Each component is a dictionary with the
following entries:
- organization: a string
- id: a string
- key: a string
- qualifier: a string
- name: a string
- project: a string
"""
ensure_nonemptystring('qualifiers')
ensure_noneornonemptystring('language')
params = {'qualifiers': qualifiers}
add_if_specified(params, 'language', language)
return self._collect_data('components/search', 'components', params)
####################################################################
# SonarQube languages
#
# list_languages
@api_call
def list_languages(self) -> List[Dict[str, str]]:
"""Return a list of supported languages.
# Returned value
A list of _languages_. Each language is a dictionary with the
following entries:
- key: a string
- name: a string
"""
return self._get('languages/list').json()['languages'] # type: ignore
####################################################################
# SonarQube permissions
#
# add_permission_group
# add_permission_user
# remove_permission_group
# remove_permission_user
@api_call
def add_permission_group(
self,
permission: str,
group_id: Optional[int] = None,
group_name: Optional[str] = None,
project_id: Optional[int] = None,
project_key: Optional[str] = None,
) -> None:
"""Add a permission to a group.
If neither `project_id` nor `project_key` are provided, it will
change the global permissions for the specified group.
# Required parameters
- permission: a string
- `group_id` OR `group_name`: an integer or a string
# Optional parameters
- `project_id` OR `project_key`: an integer or a string
"""
ensure_onlyone('group_id', 'group_name')
if project_id is None and project_key is None:
if permission not in GLOBAL_PERMISSIONS:
raise ValueError('Invalid global permission')
elif permission not in PROJECT_PERMISSIONS:
raise ValueError('Invalid project permission')
data = {'permission': permission}
add_if_specified(data, 'groupId', group_id)
add_if_specified(data, 'groupName', group_name)
add_if_specified(data, 'projectId', project_id)
add_if_specified(data, 'projectKey', project_key)
result = self._post('permissions/add_group', data)
return result # type: ignore
@api_call
def add_permission_user(
self,
permission: str,
login: str,
project_id: Optional[int] = None,
project_key: Optional[str] = None,
) -> None:
"""Add a permission to a user.
If neither `project_id` nor `project_key` are provided, it will
change the global permissions for the specified user.
# Required parameters
- permission: a string
- login: a string
# Optional parameters
- `project_id` OR `project_key`: an integer or a string
"""
ensure_instance('permission', str)
ensure_instance('login', str)
if project_id is None and project_key is None:
if permission not in GLOBAL_PERMISSIONS:
raise ValueError('Invalid global permission')
elif permission not in PROJECT_PERMISSIONS:
raise ValueError('Invalid project permission')
data = {'login': login, 'permission': permission}
add_if_specified(data, 'projectId', project_id)
add_if_specified(data, 'projectKey', project_key)
result = self._post('permissions/add_user', data)
return result # type: ignore
@api_call
def remove_permission_group(
self,
permission: str,
group_id: Optional[int] = None,
group_name: Optional[str] = None,
project_id: Optional[int] = None,
project_key: Optional[str] = None,
) -> None:
"""Remove a permission from a group.
If neither `project_id` nor `project_key` are provided, it will
change the global permissions for the specified group.
# Required parameters
- permission: a string
- `group_id` OR `group_name`: an integer or a string
# Optional parameters
- `project_id` OR `project_key`: an integer or a string (None by
default)
"""
ensure_onlyone('group_id', 'group_name')
if project_id is None and project_key is None:
if permission not in GLOBAL_PERMISSIONS:
raise ValueError('Invalid global permission')
elif permission not in PROJECT_PERMISSIONS:
raise ValueError('Invalid project permission')
data = {'permission': permission}
add_if_specified(data, 'groupId', group_id)
add_if_specified(data, 'groupName', group_name)
add_if_specified(data, 'projectId', project_id)
add_if_specified(data, 'projectKey', project_key)
result = self._post('permissions/remove_group', data)
return result # type: ignore
@api_call
def remove_permission_user(
self,
permission: str,
login: str,
project_id: Optional[int] = None,
project_key: Optional[str] = None,
) -> None:
"""Remove a permission from a user.
# Required parameters
- permission: a string
- login: a string
# Optional parameters
- `project_id` OR `project_key`: an integer or a string (None by
default)
If neither `project_id` nor `project_key` are provided, it will
change the global permissions for the specified user.
"""
if project_id is None and project_key is None:
if permission not in GLOBAL_PERMISSIONS:
raise ValueError('Invalid global permission')
elif permission not in PROJECT_PERMISSIONS:
raise ValueError('Invalid project permission')
data = {'login': login, 'permission': permission}
add_if_specified(data, 'projectId', project_id)
add_if_specified(data, 'projectKey', project_key)
result = self._post('permissions/remove_user', data)
return result # type: ignore
####################################################################
# SonarQube permissionstemplates
#
# create_permissionstemplate
# list_permissionstemplates
# update_permissionstemplate
# add_permissionstemplate_group
# apply_permissionstemplate
@api_call
def create_permissionstemplate(
self,
name: str,
description: Optional[str] = None,
project_key_pattern: Optional[str] = None,
) -> Dict[str, Any]:
"""Create a new permissions template.
If provided, `project_key_pattern` must be a valid Java regular
expression.
# Required parameters
- name: a string
# Optional parameters
- description: a string or None (None by default)
- project_key_pattern: a string or None (None by default)
# Returned value
A dictionary with the following entry:
- permissionTemplate: a dictionary
The `permissionTemplate` dictionary has the following entries:
- name: a string
- description: a string
- projectKeyPattern: a string
"""
ensure_nonemptystring('name')
ensure_noneorinstance('description', str)
ensure_noneorinstance('project_key_pattern', str)
data = {'name': name}
add_if_specified(data, 'description', description)
add_if_specified(data, 'projectKeyPattern', project_key_pattern)
result = self._post('permissions/create_template', data)
return result # type: ignore
@api_call
def list_permissionstemplates(
self, query: Optional[str] = None
) -> List[Dict[str, Any]]:
"""List the matching permissions templates.
If `query` is specified, will only returns the permissions
templates that contain it in their names.
# Optional parameters
- query: a string or None (None by default)
# Returned value
A list of _permissions templates_. Each permissions template
is a dictionary with the following entries:
- permissions: a list
- createdAt: a string (containing a timestamp)
- updatedAt: a string (containing a timestamp)
- name: a string
- id: a string
- description: a string
- projectKeyPattern: a string (if present)
Each entry in the `permissions` list is a dictionary with the
following entries:
- usersCount: an integer
- groupsCount: an integer
- key: a string
- withProjectCreator: a boolean
"""
ensure_noneorinstance('query', str)
return self._collect_data(
'permissions/search_templates',
'permissionTemplates',
None if query is None else {'q': query},
)
@api_call
def update_permissionstemplate(
self,
permissionstemplate_id: str,
description: Optional[str] = None,
name: Optional[str] = None,
project_key_pattern: Optional[str] = None,
) -> Dict[str, Any]:
"""Update a permissions template.
If provided, `project_key_pattern` must be a valid Java regular
expression.
# Required parameters
- permissionstemplate_id: a non-empty string
# Optional parameters
- description: a string or None (None by default)
- name: a string or None (None by default)
- project_key_pattern: a string or None (None by default)
# Returned value
A dictionary with the following entry:
- permissionTemplate: a dictionary
The `permissionTemplate` dictionary has the following entries:
- id: a string
- name: a string
- description: a string
- projectKeyPattern: a string
- createdAt: a string (an ISO timestamp)
- updatedAt: a string (an ISO timestamp)
"""
ensure_nonemptystring('permissionstemplate_id')
ensure_noneorinstance('description', str)
ensure_noneorinstance('name', str)
ensure_noneorinstance('project_key_pattern', str)
data = {'id': permissionstemplate_id}
add_if_specified(data, 'description', description)
add_if_specified(data, 'name', name)
add_if_specified(data, 'projectKeyPattern', project_key_pattern)
result = self._post('permissions/update_template', data)
return result # type: ignore
@api_call
def add_permissionstemplate_group(
self, template_name: str, group_name: str, permission: str
) -> None:
"""Add a group to permissions template with specified permission.
If more than one permission is to be added to a group, call this
method repeatedly.
# Required parameters
- template_name: a string
- group_name: a string
- permission: a string
"""
ensure_nonemptystring('template_name')
ensure_nonemptystring('group_name')
ensure_nonemptystring('permission')
if permission not in PROJECT_PERMISSIONS:
raise ValueError(f'Unexpected value {permission} for permission')
data = {
'groupName': group_name,
'templateName': template_name,
'permission': permission,
}
result = self._post('permissions/add_group_to_template', data)
return result # type: ignore
@api_call
def apply_permissionstemplate(
self,
template_name: str,
project_id: Optional[int] = None,
project_key: Optional[str] = None,
) -> None:
"""Apply a permission template to one project.
# Required parameters
- `template_name`: a string
- `project_id` OR `project_key`: an integer or a string (None by
default)
"""
ensure_nonemptystring('template_name')
ensure_onlyone('project_id', 'project_key')
data = {'templateName': template_name}
add_if_specified(data, 'projectKey', project_key)
add_if_specified(data, 'projectId', project_id)
result = self._post('permissions/apply_template', data)
return result # type: ignore
####################################################################
# SonarQube users
#
# create_user
# list_user_groups
# search_users
# get_user
# update_user
# deactivate_user
@api_call
def create_user(
self,
login: str,
name: str,
password: Optional[str] = None,
email: Optional[str] = None,
local: bool = True,
) -> Dict[str, Any]:
"""Create a new user.
# Required parameters
- login: a non-empty string
- name: a string
# Optional parameters
- password: a string or None (None by default)
- email: a string or None (None by default)
- local: a boolean (True by default)
`password` must be set for local users.
`password` must not be set for non-local users.
`scmAccount` not yet implemented.
# Returned value
A dictionary with the following entry:
- user: a dictionary
The `user` dictionary with the following entries:
- login: a string
- name: a string
- email: a string
- scmAccount: a list of strings
- active: a boolean
- local: a boolean
"""
ensure_nonemptystring('login')
ensure_instance('name', str)
if local and password is None:
raise ValueError('password must be set for local users')
if not local and password is not None:
raise ValueError('password must not be set for non-local users')
data = {
'login': login,
'name': name,
'local': 'true' if local else 'false',
}
add_if_specified(data, 'password', password)
add_if_specified(data, 'email', email)
result = self._post('users/create', data)
return result # type: ignore
@api_call
def list_user_groups(
self,
login: str,
selected: str = 'selected',
query: Optional[str] = None,
) -> List[Dict[str, Any]]:
"""List the groups a user belongs to.
# Required parameters
- login: a non-empty string
# Optional parameters
- selected: a string (`'selected'` by default)
- query: a string or None (None by default)
# Returned value
A list of _groups_. Each group is a dictionary with the
following entries:
- name: a string
- default: a boolean
- id: an integer
- selected: a boolean
- description: a string
"""
ensure_nonemptystring('login')
ensure_instance('selected', str)
ensure_noneorinstance('query', str)
params = {'login': login, 'selected': selected}
add_if_specified(params, 'q', query)
return self._collect_data('users/groups', 'groups', params)
@api_call
def search_users(
self, query: Optional[str] = None
) -> List[Dict[str, Any]]:
"""Return the maching users list.
# Optional parameters
- query: a non-empty string or None (None by default)
# Returned value
A list of _users_. Each user is a dictionary with the following
entries:
- active: a boolean
- avatar: a string
- email: a string, possibly empty
- externalIdentity: a string
- externalProvider: a string
- groups: a list of strings
- lastConnectionDate
- local: a boolean
- login: a string
- name: a string
- tokensCount: an int
"""
ensure_noneornonemptystring('query')
return self._collect_data(
'users/search', 'users', None if query is None else {'q': query}
)
@api_call
def get_user(self, login: str) -> Dict[str, Any]:
"""Return a user details.
Performs a case-insensitive strict match (i.e., `login` case
is insignificant, but no fuzzy matching occurs).
# Required parameters
- login: a non-empty string
# Returned value
A dictionary. Refer to #search_users() for more information.
# Raised exceptions
Raises an _ApiError_ if user not known.
"""
ensure_nonemptystring('login')
users: List[Dict[str, Any]] = [
user
for user in self._collect_data(
'users/search', 'users', {'q': login}
)
if user['login'].upper() == login.upper()
]
if not users:
raise ApiError(f'User not known ({login})')
return users[0]
@api_call
def update_user(
self,
login: str,
email: Optional[str] = None,
name: Optional[str] = None,
) -> Dict[str, Any]:
"""Update a user details.
At least one of the optional parameters must be specified.
# Required parameters
- login: a non-empty string
# Optional parameters
- email: a non-empty string or None (None by default)
- name: a non-empty string or None (None by default)
# Returned value
A dictionary. Refer to #search_users() for more information.
"""
ensure_nonemptystring('login')
ensure_noneornonemptystring('email')
ensure_noneornonemptystring('name')
data = {'login': login}
add_if_specified(data, 'email', email)
add_if_specified(data, 'name', name)
result = self._post('users/update', data)
return result # type: ignore
@api_call
def deactivate_user(self, login: str) -> Dict[str, Any]:
"""Deactivate a user.
# Required parameter
- login: a non-empty string
# Returned value
A dictionary with the following entry:
- user: a dictionary
Refer to #create_user() for more details on its content.
"""
ensure_nonemptystring('login')
result = self._post('users/deactivate', {'login': login})
return result # type: ignore
@api_call
def update_identity_provider(
self, login: str, provider: str, externalIdentity: Optional[str]
):
"""Update identity provider
# Required Parameters
- login: a non-empty string
- provider: a non-empty string
# Optional Parameter
- externalIdentity: a string
"""
ensure_nonemptystring('login')
ensure_noneornonemptystring('provider')
ensure_noneornonemptystring('externalIdentity')
data = {'login': login, 'newExternalProvider': provider}
add_if_specified(data, 'newExternalIdentity', externalIdentity)
self._post('users/update_identity_provider', data)
####################################################################
# SonarQube qualitygates
#
# create_qualitygate
# delete_qualitygate
# list_qualitygates
# TODO set_project_qualitygate (?)
@api_call
def create_qualitygate(self, name: str) -> Dict[str, Any]:
"""Create a new quality gate.
# Required parameters
- name: a non-empty string
# Returned value
A dictionary with the following two entries:
- name: a string
- id: an integer
# Raised exceptions
If a quality gate with the same name already exists, an
_ApiError_ exception is raised.
"""
ensure_nonemptystring('name')
result = self._post('qualitygates/create', {'name': name})
return result # type: ignore
@api_call
def delete_qualitygate(self, qualitygate_id: int) -> None:
"""Delete a quality gate.
# Required parameters
- qualitygate_id: an integer
# Returned value
None.
# Raised exceptions
An _ApiError_ exception is raised if the quality gate does not
exist.
"""
ensure_instance('qualitygate_id', int)
result = self._post(
'qualitygates/destroy', {'id': str(qualitygate_id)}
)
return result # type: ignore
@api_call
def list_qualitygates(self) -> List[Dict[str, Any]]:
"""Return a list of existing quality gates.
# Returned value
A list of _quality gates_. Each quality gate is a dictionary
with the following two entries:
- id: an integer
- name: a string
"""
return self._collect_data('qualitygates/list', 'qualitygates')
####################################################################
# SonarQube qualityprofiles
#
# create_qualityprofile
# list_qualityprofiles
# update_qualityprofile_parent
# add_qualityprofile_group
# add_qualityprofile_project
# add_qualityprofile_user
@api_call
def create_qualityprofile(
self, profile_name: str, language: str
) -> Dict[str, Any]:
"""Create a new quality profile.
# Required parameters
- profile_name: a non-empty string
- language: a non-empty string
`language` must be a valid language.
# Returned value
A dictionary with the following two entries:
- profile: a dictionary
- ?warnings: a list of strings
`profile` is a dictionary with the following entries:
- isDefault: a boolean
- isInherited: a boolean
- language: a string
- languageName: a string
- name: a string
- key: a string
"""
ensure_nonemptystring('profile_name')
ensure_in('language', [l['key'] for l in self.list_languages()])
result = self._post(
'qualityprofiles/create',
{'name': profile_name, 'language': language},
)
return result # type: ignore
@api_call
def list_qualityprofiles(
self,
defaults: bool = False,
language: Optional[str] = None,
project_key: Optional[str] = None,
profile_name: Optional[str] = None,
) -> List[Dict[str, Any]]:
"""Return a list of matching quality profiles.
# Optional parameters
- defaults: a boolean (False by default)
- language: a string or None (None by default)
- project_key: a string or None (None by default)
- profile_name: a string or None (None by default)
# Returned value
A list of _quality profiles_. Each quality profile is a
dictionary with the following entries:
- key: a string
- name: a string
- language: a string
- languageName: a string
- isInherited: a boolean
- isBuiltIn: a boolean
- activeRuleCount: an integer
- activeDeprecatedRuleCount: an integer
- isDefault: a boolean
- ruleUpdatedAt: a string
- lastUsed: a string
- actions: a dictionary
"""
ensure_instance('defaults', bool)
ensure_noneornonemptystring('language')
ensure_noneornonemptystring('project_key')
ensure_noneornonemptystring('profile_name')
if language is not None:
ensure_in('language', [l['key'] for l in self.list_languages()])
params = {'defaults': 'true' if defaults else 'false'}
add_if_specified(params, 'language', language)
add_if_specified(params, 'project', project_key)
add_if_specified(params, 'qualityProfile', profile_name)
result = self._get('qualityprofiles/search', params=params).json()
return result['profiles'] # type: ignore
@api_call
def add_qualityprofile_project(
self, profile_key: str, project_key: str
) -> None:
"""Associate quality profile to project.
If the project is already added, nothing occurs.
# Required parameters
- profile_key: a non-empty string
- project_key: a non-empty string
# Returned value
None.
# Raised exceptions
An _ApiError_ exception is raised if `profile_key` or
`profile_key` does not exist.
"""
ensure_nonemptystring('profile_key')
ensure_nonemptystring('project_key')
result = self._post(
'qualityprofiles/add_project',
{'key': profile_key, 'project': project_key},
)
return result # type: ignore
@api_call
def update_qualityprofile_parent(
self, profile_name: str, language: str, parent_name: str
) -> None:
"""Change quality profile parent.
# Required parameters
- profile_name: a non-empty string
- language: a non-empty string
- parent_name: a non-empty string
# Returned value
None.
"""
ensure_nonemptystring('profile_name')
ensure_nonemptystring('parent_name')
ensure_nonemptystring('language')
result = self._post(
'qualityprofiles/change_parent',
{
'qualityProfile': profile_name,
'parentQualityProfile': parent_name,
'language': language,
},
)
return result # type: ignore
@api_call
def add_qualityprofile_user(
self, profile_name: str, language: str, login: str
) -> None:
"""Add user to quality profile writers.
Internal API.
# Required parameters
- profile_name: a non-empty string
- language: a non-empty string, the quality profile language
- login: a non-empty string, the user login to add
# Returned value
None.
"""
ensure_nonemptystring('profile_name')
ensure_nonemptystring('language')
ensure_nonemptystring('login')
data = {
'qualityProfile': profile_name,
'language': language,
'login': login,
}
result = self._post('qualityprofiles/add_user', data)
return result # type: ignore
@api_call
def add_qualityprofile_group(
self, profile_name: str, language: str, group: str
) -> None:
"""Add group to quality profile writers.
Internal API.
# Required parameters
- profile_name: a non-empty string
- language: a non-empty string, the quality profile language
- login: a non-empty string, the user login to add
# Returned value
None.
"""
ensure_nonemptystring('profile_name')
ensure_nonemptystring('language')
ensure_nonemptystring('group')
data = {
'qualityProfile': profile_name,
'language': language,
'group': group,
}
result = self._post('qualityprofiles/add_group', data)
return result # type: ignore
####################################################################
# SonarQube tokens
#
# generate_token
# revoke_token
# list_tokens
@api_call
def generate_token(self, login: str, name: str) -> Dict[str, str]:
"""Generate a new token.
# Required parameters
- login: a non-empty string
- name: a non-empty string
# Returned value
A dictionary with the following entries:
- login: a string
- name: a string
- token: a string
"""
ensure_nonemptystring('login')
ensure_nonemptystring('name')
result = self._post(
'user_tokens/generate', {'login': login, 'name': name}
)
return result # type: ignore
@api_call
def revoke_token(self, login: str, name: str) -> None:
"""Revoke token.
# Required parameters
- login: a non-empty string
- name: a non-empty string
# Returned value
None (even if there is no matching token or login).
"""
ensure_nonemptystring('login')
ensure_nonemptystring('name')
result = self._post(
'user_tokens/revoke', {'login': login, 'name': name}
)
return result # type: ignore
@api_call
def list_tokens(self, login: str) -> List[Dict[str, Any]]:
"""List existing tokens for user.
# Required parameters
- login: a non-empty string
# Returned value
A list of _tokens_. Each token is a dictionary with the
following two entries:
- name: a string
- createdAt: a string (a timestamp)
"""
ensure_nonemptystring('login')
return self._collect_data(
'user_tokens/search', 'userTokens', {'login': login}
)
####################################################################
# SonarQube projects
#
# list_projects
@api_call
def list_projects(
self,
analyze_before: Optional[str] = None,
on_provisioned_only: bool = False,
projects: Optional[str] = None,
qualifiers: str = 'TRK',
) -> List[Dict[str, Any]]:
"""Return a list of matching projects.
# Optional parameters
- analyze_before: a string (ISO Timestamp representation) or
None (None by default)
- on_provisioned_only: a bolean (False by default)
- projects: a string (comma-separated list of project keys) or
None (None by default)
- qualifiers: a string (comma-separated list, `'TRK'` by
default)
# Returned value
A list of _projects_. Each project is a dictionary with the
following entries:
- organization: a string
- id: a string
- key: a string
- name: a string
- qualifier: a string, one of `'APP'`, `'VW'`, or `'TRK'`
- visibility: a string, either `'public'` or `'private'`
- lastAnalysisDate: a string (ISO Timestamp representation)
"""
ensure_noneornonemptystring('analyze_before')
ensure_noneorinstance('on_provisioned_only', bool)
ensure_noneornonemptystring('projects')
ensure_nonemptystring('qualifiers')
params = {
'onProvisionedOnly': 'true' if on_provisioned_only else 'false',
'qualifiers': qualifiers,
}
add_if_specified(params, 'analyzedBefore', analyze_before)
add_if_specified(params, 'projects', projects)
return self._collect_data('projects/search', 'components', params)
####################################################################
# SonarQube projectanalyses
#
# list_projectanalyses
@api_call
def list_projectanalyses(
self,
project_key: str,
category: Optional[str] = None,
from_date: Optional[str] = None,
to_date: Optional[str] = None,
) -> List[Dict[str, Any]]:
"""Return a list of matching project analyses.
`to_date` and `from_date` are both inclusive.
`category`, if specified, must be a value listed in
`EVENT_CATEGORIES`.
# Required parameters
- project_key: a non-empty string
# Optional parameters
- category: a non-empty string or None (None by default)
- from_date: a non-empty string (a date or datetime) or None
(None by default)
- to_date: a non-empty string (a date or datetime) or None (None
by default)
# Returned value
A list of _project analyses_. Each project analysis is a
dictionary with the following three entries:
- key: a string
- events: a list of dictionaries
- date: a string (ISO timestamp representation)
Entries in the `events` list have the following entries:
- key: a string
- name: a string
- category: a string
There may be other entries, depending on the event category.
"""
ensure_nonemptystring('project_key')
ensure_noneornonemptystring('category')
ensure_noneornonemptystring('from_date')
ensure_noneornonemptystring('to_date')
if category:
ensure_in('category', EVENT_CATEGORIES)
params = {'project': project_key}
add_if_specified(params, 'category', category)
add_if_specified(params, 'from', from_date)
add_if_specified(params, 'to', to_date)
return self._collect_data(
'project_analyses/search', 'analyses', params
)
####################################################################
# SonarQube usergroups
#
# create_usergroup
# add_usergroup_user
# remove_usergroup_user
# delete_usergroup
# list_usergroups
@api_call
def create_usergroup(
self, name: str, description: Optional[str] = None
) -> Dict[str, Any]:
"""Create a new group.
# Required parameters
- name: a non-empty string
# Optional parameters
- description: a string or None (None by default)
# Returned value
A dictionary with the following entry:
- group: a dictionary
The `group` dictionary has the following entries:
- id: an integer or a string
- organization: a string
- name: a string
- description: a string
- membersCount: an integer
- default: a boolean
"""
ensure_nonemptystring('name')
ensure_noneorinstance('description', str)
data = {'name': name}
add_if_specified(data, 'description', description)
result = self._post('user_groups/create', data)
return result # type: ignore
@api_call
def add_usergroup_user(
self,
group_id: Optional[int] = None,
group_name: Optional[str] = None,
login: Optional[str] = None,
) -> None:
"""Add a user to a group.
If `login` is not specified, add current user to group.
Adding a user that is already a member of the group is safely
ignored.
# Required parameters
- `group_id` OR `group_name`: an integer or a string
# Optional parameters
- login: a non-empty string or None (None by default)
# Returned value
None.
"""
ensure_onlyone('group_id', 'group_name')
ensure_noneornonemptystring('login')
if group_id is None and group_name is None:
raise ValueError('group_id or group_name must be specified')
if group_id is not None:
data: Dict[str, Any] = {'id': group_id}
else:
data = {'name': group_name}
add_if_specified(data, 'login', login)
result = self._post('user_groups/add_user', data)
return result # type: ignore
@api_call
def remove_usergroup_user(
self,
group_id: Optional[int] = None,
group_name: Optional[str] = None,
login: Optional[str] = None,
) -> None:
"""Remove a user from a group.
If `login` is not specified, remove current user from group.
Attempting to remove a known user that is not a member of the
group is safely ignored.
# Required parameters
- `group_id` OR `group_name`: an integer or a string
# Optional parameters
- login: a non-empty string or None (None by default)
# Returned value
None.
"""
ensure_onlyone('group_id', 'group_name')
ensure_noneornonemptystring('login')
if group_id is not None:
data: Dict[str, Any] = {'id': group_id}
else:
data = {'name': group_name}
add_if_specified(data, 'login', login)
result = self._post('user_groups/remove_user', data)
return result # type: ignore
@api_call
def delete_usergroup(
self, group_id: Optional[int] = None, group_name: Optional[str] = None
) -> None:
"""Delete a group.
# Required parameters
- `group_id` OR `group_name`: an integer or a string
# Returned value
None.
"""
ensure_onlyone('group_id', 'group_name')
if group_id is not None:
data: Dict[str, Any] = {'id': group_id}
else:
data = {'name': group_name}
result = self._post('user_groups/delete', data)
return result # type: ignore
@api_call
def list_usergroups(
self, query: Optional[str] = None, fields: Optional[str] = None
) -> List[Dict[str, Any]]:
"""Return the complete list of groups from SonarQube instance.
# Optional parameters
- query: a non-empty string or None (None by default)
- fields: a non-empty string or None (None by default)
# Returned value
A list of _groups_. Each group is a dictionary with the
following entries (assuming the default value for `fields`):
- id: an integer
- name: a string
- description: a string
- membersCount: an integer
- default: a boolean
"""
ensure_noneornonemptystring('query')
ensure_noneornonemptystring('fields')
params: Dict[str, str] = {}
add_if_specified(params, 'q', query)
add_if_specified(params, 'f', fields)
return self._collect_data(
'user_groups/search', 'groups', params if params else None
)
####################################################################
# SonarQube misc. operations
#
# list_upgrades
# TOTEST migrate_db
# TOTEST get_db_migration_status
# TOTEST get_status
# TOTEST get_health
# TOTEST get_ping
# restart
# TOTEST list_plugins
# list_plugins_updates
# TOTEST install_plugin
# update_plugin
@api_call
def list_upgrades(self) -> List[Dict[str, Any]]:
"""Return the list of available upgrades for SonarQube instance.
# Returned value
A list of _available upgrades_. An available upgrade is a
dictionary with the following entries:
- releaseDate: a string
- downloadUrl: a string
- changeLogUrl: a string
- version: a string
- description: a string
- plugins: a dictionary
The `plugins` entry is a dictionary with the following entries:
- requireUpdate: a possibly empty list of plugins
- incompatible: a possibly empty list of plugins
Items in the `requireUpdate` list are dictionaries with the
following entries:
- homepageUrl: a sting
- license: a string
- version: a string
- issueTrackerUrl: a string
- organizationUrl: a string
- key: a string
- category: a string
- name: a string
- description: a string
- organizationName: a string
"""
return self._collect_data('system/upgrades', 'upgrades')
@api_call
def migrate_db(self) -> Dict[str, str]:
"""Migrate the database to match current version of SonarQube.
# Returned value
A dictionary with the following entries:
- state: a string
- message: a string
- startedAt: a string (a timestamp)
"""
return self._post('system/migrate_db') # type: ignore
@api_call
def get_db_migration_status(self) -> Dict[str, str]:
"""Return database migration status.
# Returned value
A dictionary with the following entries:
- state: a string
- message: a string
- startedAt: a string (a timestamp)
`state` values are:
- NO_MIGRATION
- NOT_SUPPORTED
- MIGRATION_RUNNING
- MIGRATION_SUCCEEDED
- MIGRATION_FAILED
- MIGRATION_REQUIRES
"""
return self._get('system/db_migration_status') # type: ignore
@api_call
def get_status(self) -> Dict[str, str]:
"""Return state information about instance.
# Returned value
A dictionary with the following entries:
- id: a string
- version: a string
- status: a string
`status` values are:
- STARTING
- UP
- DOWN
- RESTARTING
- DB_MIGRATION_NEEDED
- DB_MIGRATION_RUNNING
"""
return self._get('system/status') # type: ignore
@api_call
def get_health(self) -> Dict[str, Any]:
"""Return system health.
# Returned value
A dictionary with the following entries:
- health: a string (`'GREEN'`, `'YELLOW'` or `'RED'`)
- causes: a dictionary
- nodes: a list of dictionaries
`causes` contains the following entry:
- message: a string
Items in `nodes` are dictionaries with the following entries:
- name: a string
- type: a string
- host: a string
- port: an integer
- startedAt: a string (a timestamp)
- health: a string (`'GREEN'`, `'YELLOW'` or `'RED'`)
- causes: a dictionary
"""
return self._get('system/health') # type: ignore
@api_call
def ping(self) -> str:
"""Return "pong" as plain text."""
return self._get('system/ping').text
@api_call
def restart(self) -> None:
"""Restart server."""
return self._post('system/restart') # type: ignore
@api_call
def list_plugins(
self, fields: Optional[str] = None
) -> List[Dict[str, Any]]:
"""Return the list of installed plugins.
# Optional parameters
- fields: a string, the comma-separated list of additional
fields to return or None (None by default)
# Returned value
A list of _installed plugins_. An installed plugin is a
dictionary with the following entries:
- key: a string
- name: a string
- description: a string
- version: a string
- license: a string
- organizationName: a string
- organizationUrl: a string
- editionBundled: a boolean,
- homepageUrl: a string
- issueTrackerUrl: a string
- implementationBuild: a string
- filename: a string
- hash: a string
- sonarLintSupported: a string
- updatedAt: an integer
"""
ensure_noneornonemptystring('fields')
return self._collect_data(
'plugins/installed',
'plugins',
None if fields is None else {'f': fields},
)
@api_call
def list_plugins_updates(self) -> List[Dict[str, Any]]:
"""Return the list of available plugin upgrades.
# Returned value
A list of _upgradeable plugins_. An upgradeable plugin is a
dictionary with the following entries:
- key: a string
- name: a string
- category: a string
- description: a string
- license: a string
- organizationName: a string
- organizationUrl: a string
- termsAndConditionsUrl: a string
- editionBundled: a boolean
- updates: a list of dictionaries
Items in the `updates` list are dictionaries containing the
following entries:
- release: a dictionary
- status: a string
- requires: a list
`release` is a dictionary with the following entries:
- version: a string
- date: a string
- description: a string
- changeLogUrl: a string
"""
return self._collect_data('plugins/updates', 'plugins')
@api_call
def install_plugin(self, key: str) -> None:
"""Install the latest compatible version of plugin.
# Required parameters
- key: a non-empty string
# Returned value
None.
"""
ensure_nonemptystring('key')
result = self._post('plugins/install', data={'key': key})
return result # type: ignore
@api_call
def update_plugin(self, key: str) -> None:
"""Update an installed plugin to the latest compatible version.
# Required parameters
- key: a non-empty string
# Returned value
None.
"""
ensure_nonemptystring('key')
result = self._post('plugins/update', data={'key': key})
return result # type: ignore
####################################################################
# SonarQube private helpers
def _post(
self,
api: str,
data: Optional[Union[MutableMapping[str, str], bytes]] = None,
) -> requests.Response:
api_url = join_url(self.url, api)
return self.session().post(api_url, data)
def _get(
self,
api: str,
params: Optional[Mapping[str, Union[str, List[str], None]]] = None,
) -> requests.Response:
api_url = join_url(self.url, api)
return self.session().get(api_url, params=params)
def _collect_data(
self,
api: str,
key: str,
params: Optional[Mapping[str, Union[str, List[str], None]]] = None,
) -> List[Any]:
"""Return SonarQube API call results, collecting key values.
The API call is expected to return an JSON structure.
`params`, if specified, is a dictionary and will be passed to
the API.
"""
page_size = '100'
page = 1
api_url = join_url(self.url, api)
_params: Dict[str, Union[str, List[str], None]] = {}
if params is not None:
_params.update(params)
try:
_params.update({'p': str(1), 'pageSize': page_size})
req = self.session().get(api_url, params=_params).json()
except ValueError:
raise ApiError(
f'Unexpected response, was expecting JSON ({api_url})'
) from None
values: List[Any] = req[key]
while 'paging' in req and len(values) < req['paging']['total']:
page += 1
_params.update({'p': str(page), 'pageSize': page_size})
req = self.session().get(api_url, params=_params).json()
if req:
values += req[key]
else:
raise ApiError(f'Empty response ({api_url})')
return values | zabel-elements | /zabel_elements-1.15.0-py3-none-any.whl/zabel/elements/clients/base/sonarqube.py | sonarqube.py |
from typing import Any, Dict, Iterable, List, Mapping, Optional, Union
import requests
from zabel.commons.exceptions import ApiError
from zabel.commons.sessions import prepare_session
from zabel.commons.utils import (
add_if_specified,
api_call,
ensure_in,
ensure_instance,
ensure_nonemptystring,
ensure_noneorinstance,
ensure_noneornonemptystring,
ensure_onlyone,
join_url,
)
########################################################################
########################################################################
# GitHub low-level api
class GitHub:
"""GitHub Low-Level Wrapper.
# Reference URL
- <https://developer.github.com/v3/>
- <https://developer.github.com/enterprise/2.20/v3>
- <https://stackoverflow.com/questions/10625190>
# Implemented features
- users
- organizations
- repositories
- hooks
- misc. operations (version, staff reports & stats)
# Sample use
```python
# standard use
from zabel.elements.clients import GitHub
url = 'https://github.example.com/api/v3/'
gh = GitHub(url, user, token)
gh.list_users()
# enabling management features
from zabel.elements import clients
mngt = 'https://github.example.com/'
gh = clients.GitHub(url, user, token, mngt)
gh.create_organization('my_organization', 'admin')
```
"""
def __init__(
self,
url: str,
user: str,
token: str,
management_url: Optional[str] = None,
verify: bool = True,
) -> None:
"""Create a GitHub instance object.
The optional `management_url` is only required if
'enterprise' features are used (staff reports, ...).
# Required parameters
- url: a non-empty string
- user: a string
- token: a string
# Optional parameters
- management_url: a non-empty string or None (None by
default)
- verify: a boolean (True by default)
`verify` can be set to False if disabling certificate checks for
GitHub communication is required. Tons of warnings will occur
if this is set to False.
"""
ensure_nonemptystring('url')
ensure_instance('user', str)
ensure_instance('token', str)
ensure_noneornonemptystring('management_url')
self.url = url
self.auth = (user, token)
self.management_url = management_url
self.verify = verify
self.session = prepare_session(self.auth, verify=verify)
def __str__(self) -> str:
return f'{self.__class__.__name__}: {self.url}'
def __repr__(self) -> str:
url, auth, mngt = self.url, self.auth[0], self.management_url
return f'<{self.__class__.__name__}: {url!r}, {auth!r}, {mngt!r}>'
####################################################################
# GitHub users (that or organization members?)
#
# list_users
# get_user
# TODO update_user
# TODO get_user_organizations
# suspend_user
# unsuspend_user
@api_call
def list_users(self) -> List[Dict[str, Any]]:
"""Return the list of users.
This API returns users and organizations. Use the `type` entry
in the returned items to distinguish (`'User'` or
`'Organization'`).
# Returned value
A list of _users_. A user is a dictionary with the following
entries:
- avatar_url: a string
- events_url: a string
- followers_url: a string
- following_url: a string
- gist_url: a string
- gravatar_id: a string
- html_url: a string
- id: an integer
- login: a string
- node_id: a string
- organizations_url: a string
- received_events_url: a string
- repos_url: a string
- site_admin: a boolean
- starred_url: a string
- subscription_url: a string
- type: a string
- url: a string
"""
return self._collect_data('users')
@api_call
def get_user(self, user_name: str) -> Dict[str, Any]:
"""Return the user details.
# Required parameters
- user_name: a non-empty string
# Returned value
A dictionary with the following entries:
- avatar_url: a string
- bio:
- blog:
- company:
- created_at: a string representing a datetime
- email:
- events_url: a string
- followers: an integer
- followers_url: a string
- following: an integer
- following_url: a string
- gist_url: a string
- gravatar_id: a string
- hireable:
- html_url: a string
- id: an integer
- location:
- login: a string
- name:
- organizations_url: a string
- public_gists: an integer
- public_repos: an integer
- received_events_url: a string
- repos_url: a string
- site_admin: a boolean
- starred_url: a string
- subscription_url: a string
- suspend_at:
- type: a string
- updated_at:
- url: a string
"""
ensure_nonemptystring('user_name')
return self._get(f'users/{user_name}') # type: ignore
@api_call
def suspend_user(self, user_name: str) -> bool:
"""Suspend the specified user.
Suspending an already suspended user is allowed.
# Required parameters
- user_name: a non-empty string
# Returned value
A boolean. True if the operation was successful.
"""
ensure_nonemptystring('user_name')
return self._put(f'users/{user_name}/suspended').status_code == 204
@api_call
def unsuspend_user(self, user_name: str) -> bool:
"""Unsuspend the specified user.
Unsuspending a non-suspended user is allowed.
# Required parameters
- user_name: a non-empty string
# Returned value
A boolean. True if the operation was successful.
"""
ensure_nonemptystring('user_name')
return self._delete(f'users/{user_name}/suspended').status_code == 204
####################################################################
# GitHub organizations
#
# organization name = login key
#
# list_organizations
# get_organization
# TODO update_organization
# list_organization_repositories
# list_organization_members
# list_organization_outsidecollaborators
# get_organization_membership
# add_organization_membership
# rm_organization_membership
# add_organization_outside_collaborator
# rm_organization_outside_collaborator
# list_organization_teams
#
# Part of enterprise administration
# create_organization
# TODO rename_organization
@api_call
def list_organizations(self) -> List[Dict[str, Any]]:
"""Return list of organizations.
# Returned value
A list of _organizations_. Each organization is a dictionary
with the following keys:
- avatar_url: a string
- description: a string
- events_url: a string
- hooks_url: a string
- id: an integer
- issues_url: a string
- login: a string
- members_url: a string
- node_id: a string
- public_members_url: a string
- repos_url: a string
- url: a string
The organization name is the `login` key.
"""
return self._collect_data('organizations')
@api_call
def list_organization_teams(
self, organization_name: str
) -> List[Dict[str, Any]]:
"""Return list of teams.
# Required parameters
- organization_name: a non-empty string
# Returned value
A list of _teams_. Each team is a dictionary with the following
keys:
- name: a string
- id: an integer
- node_id: a string
- slug: a string
- description: a string
- privacy: a string
- url: a string
- html_url: a string
- members_url: a string
- repositories_url: a string
- permission: a string
- parent: ?
"""
ensure_nonemptystring('organization_name')
return self._get(f'orgs/{organization_name}/teams') # type: ignore
@api_call
def get_organization(self, organization_name: str) -> Dict[str, Any]:
"""Return extended information on organization.
# Required parameters
- organization_name: a non-empty string
# Returned value
A dictionary with the following keys:
- login
- id
- url
- repos_url
- events_url
- hooks_url
- issues_url
- members_url
- public_members_url
- avatar_url
- description
- ?name
- ?company
- ?blog
- ?location
- ?email
- followers
- following
- html_url
- created_at
- type
- ?total_private_repos
- ?owned_private_repos
- ?private_gists
- ?disk_usage
- ?collaborators
- ?billing_email
- ?plan
- ?default_repository_settings
- ?members_can_create_repositories
- has_organization_projects
- public_gists
- updated_at
- has_repository_projects
- public_repos
"""
ensure_nonemptystring('organization_name')
return self._get(f'orgs/{organization_name}') # type: ignore
@api_call
def list_organization_repositories(
self, organization_name: str, headers: Optional[Dict[str, str]] = None
) -> List[Dict[str, Any]]:
"""Return the list of repositories for organization.
# Required parameters
- organization_name: a non-empty string
# Optional parameters
- headers: a dictionary or None (None by default)
# Returned value
A list of _repositories_. Each repository is a dictionary. See
#list_repositories() for its format.
"""
ensure_nonemptystring('organization_name')
return self._collect_data(
f'orgs/{organization_name}/repos', headers=headers
)
@api_call
def create_organization(
self,
organization_name: str,
admin: str,
profile_name: Optional[str] = None,
) -> Dict[str, Any]:
"""Create GitHub organization.
# Required parameters
- organization_name: a non-empty string
- admin: a non-empty string
# Optional parameters
- profile_name: a string or None (None by default)
# Returned value
An _organization_. An organization is a dictionary.
"""
ensure_nonemptystring('organization_name')
ensure_nonemptystring('admin')
ensure_noneorinstance('profile_name', str)
data = {'login': organization_name, 'admin': admin}
add_if_specified(data, 'profile_name', profile_name)
result = self._post('admin/organizations', json=data)
return result # type: ignore
@api_call
def list_organization_members(
self, organization_name: str, role: str = 'all'
) -> List[Dict[str, Any]]:
"""Return the list of organization members.
# Required parameters
- organization_name: a non-empty string
# Optional parameters
- role: a non-empty string, one of 'all', 'member', or 'admin'
('all' by default)
# Returned value
A list of _members_. Each member is a dictionary.
"""
ensure_nonemptystring('organization_name')
ensure_in('role', ('all', 'member', 'admin'))
return self._collect_data(
f'orgs/{organization_name}/members', params={'role': role}
)
@api_call
def list_organization_outsidecollaborators(
self, organization_name: str
) -> List[Dict[str, Any]]:
"""Return the list of organization outside collaborators.
# Required parameters
- organization_name: a non-empty string
# Returned value
A list of _members_ (outside collaborators). Each member is a
dictionary.
"""
ensure_nonemptystring('organization_name')
return self._collect_data(
f'orgs/{organization_name}/outside_collaborators'
)
@api_call
def get_organization_membership(
self, organization_name: str, user: str
) -> Dict[str, Any]:
"""Get organization membership.
# Required parameters
- organization_name: a non-empty string
- user: a non-empty string
# Returned value
A dictionary with the following entries:
- url: a string
- state: a string
- role: a string
- organization_url: a string
- organization: a dictionary
- user: a dictionary
`role` is either `'admin'` or `'member'`. `state` is either
`'active'` or `'pending'`.
# Raised exceptions
Raises an _ApiError_ if the caller is not a member of the
organization.
"""
ensure_nonemptystring('organization_name')
ensure_nonemptystring('user')
return self._get(f'orgs/{organization_name}/memberships/{user}') # type: ignore
@api_call
def add_organization_membership(
self, organization_name: str, user: str, role: str = 'member'
) -> Dict[str, Any]:
"""Add or update organization membership.
# Required parameters
- organization_name: a non-empty string
- user: a non-empty string
# Optional parameters
- role: a non-empty string (`'member'` by default)
`role` must be either `'member'` or `'admin'`, if provided.
# Returned value
A dictionary with the following entries:
- url: a string
- state: a string
- role: a string
- organization_url: a string
- organization: a dictionary
- user: a dictionary
If `user` already had membership, `state` is `'active'`. If
`user` was previously unaffiliated, `state` is `'pending'`.
Refer to #list_organizations() and #list_users() for more details
on `organization` and `user` content.
"""
ensure_nonemptystring('organization_name')
ensure_nonemptystring('user')
ensure_in('role', ['member', 'admin'])
result = self._put(
f'orgs/{organization_name}/memberships/{user}', json={'role': role}
)
return result # type: ignore
@api_call
def remove_organization_membership(
self, organization_name: str, user: str
) -> bool:
"""Remove user from organization.
Removing users will remove them from all teams and they will no
longer have any access to the organization's repositories.
# Required parameters
- organization_name: a non-empty string
- user: a non-empty string, the login of the user
# Returned Value
A boolean. True if the user has been removed from the
organization.
"""
ensure_nonemptystring('organization_name')
ensure_nonemptystring('user')
result = self._delete(f'orgs/{organization_name}/members/{user}')
return (result.status_code // 100) == 2
rm_organization_membership = remove_organization_membership
@api_call
def add_organization_outside_collaborator(
self, organization_name: str, user: str
) -> bool:
"""Add outside collaborator to organization.
# Required parameters
- organization_name: a non-empty string
- user: a non-empty string, the login of the user
# Returned value
A boolean. True if the outside collaborator was added to the
organization.
"""
ensure_nonemptystring('organization_name')
ensure_nonemptystring('user')
result = self._put(
f'orgs/{organization_name}/outside_collaborators/{user}'
)
return (result.status_code // 100) == 2
@api_call
def remove_organization_outside_collaborator(
self, organization_name: str, user: str
) -> bool:
"""Remove outside collaborator from organization.
# Required parameters
- organization_name: a non-empty string
- user: a non-empty string, the login of the user
# Returned value
A boolean. True if the outside collaborator was removed from
the organization.
"""
ensure_nonemptystring('organization_name')
ensure_nonemptystring('user')
result = self._delete(
f'orgs/{organization_name}/outside_collaborators/{user}'
)
return (result.status_code // 100) == 2
rm_organization_outside_collaborator = (
remove_organization_outside_collaborator
)
####################################################################
# GitHub teams
#
# list_team_members
@api_call
def list_team_members(
self, organization_name: str, team_name: str
) -> List[Dict[str, Any]]:
"""Return a list of members.
# Required parameters
- organization_name: a non-empty string
- team_name: a non-empty string
# Returned value
A list of _members_. Each member is a dictionary with the
following entries:
- avatar_url: a string
- events_url: a string
- followers_url: a string
- following_url: a string
- gists_url: a string
- gravatar_id: a string
- html_url: a string
- id: an integer
- login: a string
- node_id: a string
- organizations_url: a string
- received_events_url: a string
- repos_url: a string
- site_admin: a boolean
- starred_url: a string
- subscriptions_url: a string
- type: a string
- url: a string
"""
ensure_nonemptystring('organization_name')
ensure_nonemptystring('team_name')
return self._get(f'orgs/{organization_name}/teams/{team_name}/members') # type: ignore
####################################################################
# GitHub repositories
#
# repository name = name key
#
# list_repositories
# list_public_repositories
# get_repository
# create_repository
# TODO update_repository
# TODO delete_repository
# list_repository_commits
# get_repository_commit
# list_reporitory_teams
# list_repository_collaborators
# add_repository_collaborator
# rm_repository_collaborator
# list_repository_permissions_user
@api_call
def list_repositories(self) -> List[Dict[str, Any]]:
"""Return the list of repositories.
# Returned value
A list of _repositories_. Each repository is a dictionary with
the following entries:
+ archive_url: a string
+ assignees_url: a string
+ blobs_url: a string
+ branches_url: a string
- clone_url: a string
+ collaborators_url: a string
+ comments_url: a string
+ commits_url: a string
+ compare_url: a string
+ contents_url: a string
+ contributors_url: a string
- created_at: a string (a timestamp)
- default_branch: a string
+ deployments_url: a string
+ description: a string
+ downloads_url: a string
+ events_url: a string
+ fork: a boolean
- forks: an integer
- forks_count: an integer
+ forks_url: a string
+ full_name: a string
+ git_commits_url: a string
+ git_refs_url: a string
+ git_tags_url: a string
- git_url: a string
- has_downloads: a boolean
- has_issues: a boolean
- has_pages: a boolean
- has_projects: a boolean
- has_wiki: a boolean
- homepage
+ hooks_url: a string
+ html_url: a string
+ id: an integer
+ issue_comment_url
+ issue_events_url
+ issues_url: a string
+ keys_url: a string
+ labels_url: a string
- language: a string
+ languages_url: a string
+ merges_url: a string
+ milestones_url: a string
- mirror_url: a string
+ name: a string
+ node_id: a string
+ notifications_url: a string
- open_issues: an integer
- open_issues_count: an integer
+ owner: a dictionary
- permissions: a dictionary
+ private: a boolean
+ pulls_url: a string
- pushed_at: a string (a timestamp)
+ releases_url
- size: an integer
- ssh_url: a string
- stargazers_count: an integer
+ stargazers_url: a string
+ statuses_url: a string
+ subscribers_url: a string
+ subscription_url: a string
- svn_url: a string
+ tags_url: a string
+ teams_url: a string
+ trees_url: a string
- updated_at: a string (a timestamp)
+ url: a string
- watchers: an integer
- watchers_count: an integer
"""
return self._collect_data('repositories', params={'visibility': 'all'})
@api_call
def list_public_repositories(self) -> List[Dict[str, Any]]:
"""Return the list of public repositories.
# Returned value
A list of _repositories_. Each repository is a dictionary. See
#list_repositories() for its description.
"""
return self._collect_data('repositories')
@api_call
def get_repository(
self, organization_name: str, repository_name: str
) -> Dict[str, Any]:
"""Return the repository details.
# Required parameters
- organization_name: a non-empty string
- repository_name: a non-empty string
# Returned value
A _repository_. See #list_repositories() for its description.
"""
ensure_nonemptystring('organization_name')
ensure_nonemptystring('repository_name')
return self._get(f'repos/{organization_name}/{repository_name}') # type: ignore
@api_call
def create_repository(
self,
organization_name: str,
repository_name: str,
description: Optional[str] = None,
homepage: Optional[str] = None,
private: bool = False,
has_issues: bool = True,
has_projects: Optional[bool] = None,
has_wiki: bool = True,
team_id: Optional[int] = None,
auto_init: bool = False,
gitignore_template: Optional[str] = None,
license_template: Optional[str] = None,
allow_squash_merge: bool = True,
allow_merge_commit: bool = True,
allow_rebase_merge: bool = True,
) -> Dict[str, Any]:
"""Create a new repository in organization organization_name.
# Required parameters
- organization_name: a non-empty string
- repository_name: a non-empty string
# Optional parameters
- description: a string or None (None by default)
- homepage: a string or None (None by default)
- private: a boolean (False by default)
- has_issues: a boolean (True by default)
- has_projects: a boolean (True by default except for
organizations that have disabled repository projects)
- has_wiki: a boolean (True by default)
- team_id: an integer or None (None by default)
- auto_init: a boolean (False by default)
- gitignore_template: a string or None (None by default)
- license_template: a string or None (None by default)
- allow_squash_merge: a boolean (True by default)
- allow_merge_commit: a boolean (True by default)
- allow_rebase_merge: a boolean (True by default)
# Returned value
A _repository_. See #list_repositories() for its content.
"""
ensure_nonemptystring('repository_name')
ensure_nonemptystring('organization_name')
ensure_noneorinstance('description', str)
ensure_instance('private', bool)
ensure_instance('has_issues', bool)
ensure_instance('has_wiki', bool)
ensure_instance('auto_init', bool)
ensure_instance('allow_squash_merge', bool)
ensure_instance('allow_merge_commit', bool)
ensure_instance('allow_rebase_merge', bool)
data = {
'name': repository_name,
'private': private,
'has_issues': has_issues,
'has_wiki': has_wiki,
'auto_init': auto_init,
'allow_squash_merge': allow_squash_merge,
'allow_merge_commit': allow_merge_commit,
'allow_rebase_merge': allow_rebase_merge,
}
add_if_specified(data, 'description', description)
add_if_specified(data, 'homepage', homepage)
add_if_specified(data, 'has_projects', has_projects)
add_if_specified(data, 'team_id', team_id)
add_if_specified(data, 'gitignore_template', gitignore_template)
add_if_specified(data, 'license_template', license_template)
result = self._post(f'orgs/{organization_name}/repos', json=data)
return result # type: ignore
@api_call
def update_repository(
self,
organization_name: str,
repository_name: str,
patched_attributes: Dict[str, Any],
) -> Dict[str, Any]:
"""
Updates the attributes of a repository using a patch
(a subset of attributes).
The endpoint on GitHub is :
https://docs.github.com/en/[email protected]/rest/repos/repos#update-a-repository
# Required parameters
- organization_name: a non-empty string
- repository_name: a non-empty string
- patched_attributes: a dict of attributes/values, see
#create_repository() for the details of patchable
attributes.
# Returned value
A _repository_. See #list_repositories() for its content.
"""
ensure_nonemptystring('repository_name')
ensure_nonemptystring('organization_name')
ensure_instance('patched_attributes', dict)
response = self._patch(
f'repos/{organization_name}/{repository_name}', patched_attributes
)
return response # type: ignore
@api_call
def list_repository_topics(
self, organization_name: str, repository_name: str
) -> List[str]:
"""Return the list of topics.
# Required parameters
- organization_name: a non-empty string
- repository_name: a non-empty string
# Returned value
A list of strings (the list may be empty).
"""
ensure_nonemptystring('organization_name')
ensure_nonemptystring('repository_name')
response = self._get(
f'repos/{organization_name}/{repository_name}/topics',
headers={'Accept': 'application/vnd.github.mercy-preview+json'},
).json()
return response['names'] # type: ignore
@api_call
def replace_repository_topics(
self,
organization_name: str,
repository_name: str,
topics: Iterable[str],
) -> List[str]:
"""Replace the list of topics.
# Required parameters
- organization_name: a non-empty string
- repository_name: a non-empty string
- topics: a list of strings
# Returned value
A possibly empty list of strings.
"""
ensure_nonemptystring('organization_name')
ensure_nonemptystring('repository_name')
ensure_instance('topics', list)
response = self._put(
f'repos/{organization_name}/{repository_name}/topics',
json={'names': topics},
headers={'Accept': 'application/vnd.github.mercy-preview+json'},
).json()
return response['names'] # type: ignore
@api_call
def list_repository_codefrequency(
self, organization_name: str, repository_name: str
) -> List[List[int]]:
"""Return the list of number of additions&deletions per week.
The returned value is cached. A first call for a given
repository may return a 202 response code. Retrying a moment
later will return the computed value.
# Required parameters
- organization_name: a non-empty string
- repository_name: a non-empty string
# Returned value
A list of lists. Each item in the list is a list with the
following three values, in order:
- week: an integer (a unix timestamp)
- additions: an integer
- deletions: an integer
"""
ensure_nonemptystring('organization_name')
ensure_nonemptystring('repository_name')
result = self._get(
f'repos/{organization_name}/{repository_name}/stats/code_frequency'
)
return result # type: ignore
@api_call
def list_repository_contributions(
self, organization_name: str, repository_name: str
) -> List[Dict[str, Any]]:
"""Return the list of contributors with their contributions.
# Required parameters
- organization_name: a non-empty string
- repository_name: a non-empty string
# Returned value
A list of _contributors_. Each contributor is a dictionary with
the following entries:
- author: a dictionary
- total: the number of commits authored by the contributor
- weeks: a list of dictionaries describing the contributions
per week
`author` contains the following non exhaustive entries:
- id: a string
- login: a string
- type: a string
Each item in `weeks` has the following entries:
- w: a string (a unix timestamp)
- a: an integer (number of additions)
- d: an integer (number of deletions)
- c: an integer (number of commits)
"""
ensure_nonemptystring('organization_name')
ensure_nonemptystring('repository_name')
result = self._get(
f'repos/{organization_name}/{repository_name}/stats/contributors'
)
return result # type: ignore
@api_call
def list_repository_commits(
self,
organization_name: str,
repository_name: str,
sha: Optional['str'] = None,
path: Optional['str'] = None,
author: Optional['str'] = None,
since: Optional['str'] = None,
until: Optional['str'] = None,
) -> List[Dict[str, Any]]:
"""Return the list of commits.
# Required parameters
- organization_name: a non-empty string
- repository_name: a non-empty string
# Optional parameters
- sha: a string or None (None by default)
- path: a string or None (None by default)
- author: a non-empty string or None (None by default)
- since: a non-empty string (an ISO 8601 timestamp) or None
(None by default)
- until: a non-empty string (an ISO 8601 timestamp) or None
(None by default)
# Return value
A list of _commits_. Each commit is a dictionary.
"""
ensure_nonemptystring('organization_name')
ensure_nonemptystring('repository_name')
params: Dict[str, Any] = {}
add_if_specified(params, 'sha', sha)
add_if_specified(params, 'path', path)
add_if_specified(params, 'author', author)
add_if_specified(params, 'since', since)
add_if_specified(params, 'until', until)
result = self._get(
f'repos/{organization_name}/{repository_name}/commits',
params=params,
)
return result # type: ignore
@api_call
def get_repository_commit(
self, organization_name: str, repository_name: str, ref: str
) -> Dict[str, Any]:
"""Return a specific commit.
# Required parameters
- organization_name: a non-empty string
- repository_name: a non-empty string
- ref: a non-empty string
# Returned value
A _commit_. A commit is a dictionary.
"""
ensure_nonemptystring('organization_name')
ensure_nonemptystring('repository_name')
ensure_nonemptystring('ref')
result = self._get(
f'repos/{organization_name}/{repository_name}/commits/{ref}'
)
return result # type: ignore
@api_call
def list_repository_teams(
self, organization_name: str, repository_name: str
) -> List[Dict[str, Any]]:
"""Return list of teams.
# Required parameters
- organization_name: a non-empty string
- repository_name: a non-empty string
# Returned value
A list of _teams_. Each team is a dictionary with the following
keys:
- name: a string
- id: an integer
- node_id: a string
- slug: a string
- description: a string
- privacy: a string
- url: a string
- html_url: a string
- members_url: a string
- repositories_url: a string
- permission: a string
- parent: ?
"""
ensure_nonemptystring('organization_name')
ensure_nonemptystring('repository_name')
return self._get(f'repos/{organization_name}/{repository_name}/teams') # type: ignore
@api_call
def list_repository_collaborators(
self, organization_name: str, repository_name: str
) -> List[Dict[str, Any]]:
"""Return list of collaborators.
# Required parameters
- organization_name: a non-empty string
- repository_name: a non-empty string
# Returned value
A list of _members_. Each member is a dictionary with the
following entries:
- avatar_url: a string
- events_url: a string
- followers_url: a string
- following_url: a string
- gists_url: a string
- gravatar_id: a string
- html_url: a string
- id: an integer
- login: a string
- node_id: a string
- organizations_url: a string
- received_events_url: a string
- repos_url: a string
- site_admin: a boolean
- starred_url: a string
- subscriptions_url: a string
- type: a string
- url: a string
"""
ensure_nonemptystring('organization_name')
ensure_nonemptystring('repository_name')
return self._collect_data(f'repos/{organization_name}/{repository_name}/collaborators') # type: ignore
@api_call
def add_repository_collaborator(
self,
organization_name: str,
repository_name: str,
user: str,
permission: str = 'push',
) -> None:
"""Add collaborator to repository.
# Required parameters
- organization_name: a non-empty string
- repository_name: a non-empty string
- user: a non-empty string
- permission: a non-empty string
"""
ensure_nonemptystring('organization_name')
ensure_nonemptystring('repository_name')
ensure_nonemptystring('user')
ensure_in(
'permission', ['pull', 'triage', 'push', 'maintain', 'admin']
)
params = {'permission': permission}
self._put(
f'repos/{organization_name}/{repository_name}/collaborators/{user}',
json=params,
)
@api_call
def remove_repository_collaborator(
self, organization_name: str, repository_name: str, user: str
) -> None:
"""Remove collaborator from repository.
# Required parameters
- organization_name: a non-empty string
- repository_name: a non-empty string
- user: a non-empty string
"""
ensure_nonemptystring('organization_name')
ensure_nonemptystring('repository_name')
ensure_nonemptystring('user')
self._delete(
f'repos/{organization_name}/{repository_name}/collaborators/{user}'
)
rm_repository_collaborator = remove_repository_collaborator
@api_call
def list_repository_permissions_user(
self, organization_name: str, repository_name: str, user: str
) -> Dict[str, Any]:
"""List permissions of an user on a repository.
# Required parameters
- organization_name: a non-empty string
- repository_name: a non-empty string
- user: a non-empty string
# Returned value
Return a dictionary with following keys:
- permission: a string
- user: a dictionary
- role_name: a string
"""
ensure_nonemptystring('organization_name')
ensure_nonemptystring('repository_name')
ensure_nonemptystring('user')
result = self._get(
f'repos/{organization_name}/{repository_name}/collaborators/{user}/permission'
)
return result # type: ignore
####################################################################
# GitHub repository contents
#
# get_repository_readme
# get_repository_content
# create_repository_file
# update_repository_file
@api_call
def get_repository_readme(
self,
organization_name: str,
repository_name: str,
ref: Optional[str] = None,
format_: Optional[str] = None,
) -> Dict[str, Any]:
"""Return the repository README.
# Required parameters
- organization_name: a non-empty string
- repository_name: a non-empty string
# Optional parameters
- ref: a non-empty string or None (None by default)
- format_: a custom media type (a non-empty string) or None
(None by default)
# Returned value
A dictionary by default. May be something else if `format_` is
specified.
"""
ensure_nonemptystring('organization_name')
ensure_nonemptystring('repository_name')
ensure_noneornonemptystring('ref')
ensure_noneornonemptystring('format_')
params = {'ref': ref} if ref is not None else None
headers = {'Accept': format_} if format_ is not None else None
result = self._get(
f'repos/{organization_name}/{repository_name}/readme',
params=params,
headers=headers,
)
return result # type: ignore
@api_call
def get_repository_content(
self,
organization_name: str,
repository_name: str,
path: str,
ref: Optional[str] = None,
format_: Optional[str] = None,
) -> Any:
"""Return the file or directory content.
# Required parameters
- organization_name: a non-empty string
- repository_name: a non-empty string
- path: a string
# Optional parameters
- ref: a non-empty string or None (None by default)
- format_: the custom media type (a non-empty string) or None
(None by default)
# Returned value
A dictionary or a list of dictionaries by default. May be
something else if `format_` is specified
"""
ensure_nonemptystring('organization_name')
ensure_nonemptystring('repository_name')
ensure_instance('path', str)
ensure_noneornonemptystring('ref')
ensure_noneornonemptystring('format_')
params = {'ref': ref} if ref is not None else None
headers = {'Accept': format_} if format_ is not None else None
result = self._get(
f'repos/{organization_name}/{repository_name}/contents/{path}',
params=params,
headers=headers,
)
if result.status_code // 100 == 2:
try:
return result.json()
except requests.exceptions.JSONDecodeError:
return result.text
return result # type: ignore
@api_call
def create_repository_file(
self,
organization_name: str,
repository_name: str,
path: str,
message: str,
content: str,
branch: Optional[str] = None,
committer: Optional[Dict[str, str]] = None,
author: Optional[Dict[str, str]] = None,
) -> Dict[str, Any]:
"""Create a new repository file.
The created file must not already exist.
# Required parameters
- organization_name: a non-empty string
- repository_name: a non-empty string
- path: a string
- message: a string
- content: a string (Base64-encoded)
# Optional parameters
- branch: a string or None (None by default)
- committer: a dictionary or None (None by default)
- author: a dictionary or None (None by default)
# Returned value
A dictionary.
"""
ensure_nonemptystring('organization_name')
ensure_nonemptystring('repository_name')
ensure_instance('path', str)
ensure_instance('message', str)
ensure_instance('content', str)
ensure_noneornonemptystring('branch')
ensure_noneorinstance('committer', dict)
ensure_noneorinstance('author', dict)
data = {'message': message, 'content': content}
add_if_specified(data, 'branch', branch)
add_if_specified(data, 'committer', committer)
add_if_specified(data, 'author', author)
result = self._put(
f'repos/{organization_name}/{repository_name}/contents/{path}',
json=data,
)
return result # type: ignore
@api_call
def update_repository_file(
self,
organization_name: str,
repository_name: str,
path: str,
message: str,
content: str,
sha: str,
branch: Optional[str] = None,
committer: Optional[Dict[str, str]] = None,
author: Optional[Dict[str, str]] = None,
) -> Dict[str, Any]:
"""Update a repository file.
The file must already exist on the repository.
# Required parameters
- organization_name: a non-empty string
- repository_name: a non-empty string
- path: a string
- message: a string
- content: a string (Base64-encoded)
- sha: a non-empty string
# Optional parameters
- branch: a string or None (None by default)
- committer: a dictionary or None (None by default)
- author: a dictionary or None (None by default)
# Returned value
A dictionary.
"""
ensure_nonemptystring('organization_name')
ensure_nonemptystring('repository_name')
ensure_instance('path', str)
ensure_instance('message', str)
ensure_instance('content', str)
ensure_nonemptystring('sha')
ensure_noneornonemptystring('branch')
ensure_noneorinstance('committer', dict)
ensure_noneorinstance('author', dict)
data = {'message': message, 'content': content, 'sha': sha}
add_if_specified(data, 'branch', branch)
add_if_specified(data, 'committer', committer)
add_if_specified(data, 'author', author)
result = self._put(
f'repos/{organization_name}/{repository_name}/contents/{path}',
json=data,
)
return result # type: ignore
####################################################################
# GitHub repository branches
#
# list_branches
# get_branch
@api_call
def list_branches(
self,
organization_name: str,
repository_name: str,
protected: bool = False,
) -> List[Dict[str, Any]]:
"""List branches.
# Required parameters
- organization_name: a non-empty string
- repository_name: a non-empty string
# Optional parameters
- protected: a boolean
# Returned value
A list of _short branches_. Each short branch is a dictionary
with the following entries:
- name: a string
- commit: a dictionary
- protected: a boolean
- protection: a dictionary
"""
ensure_nonemptystring('organization_name')
ensure_nonemptystring('repository_name')
ensure_instance('protected', bool)
return self._collect_data(
f'repos/{organization_name}/{repository_name}/branches',
params={'protected': 'true'} if protected else None,
)
@api_call
def get_branch(
self,
organization_name: str,
repository_name: str,
branch_name: str,
) -> Dict[str, Any]:
"""Get branch.
# Required parameters
- organization_name: a non-empty string
- repository_name: a non-empty string
- branch_name: a non-empty string
# Returned value
A _branch_. A branch is a dictionary with the following
entries:
- name: a string
- commit: a dictionary
- protected: a boolean
- protection: a dictionary
- protetion_url: a string
- pattern: a string
- required_approving_review_count: an integer
- _links: a dictionary
"""
ensure_nonemptystring('organization_name')
ensure_nonemptystring('repository_name')
ensure_nonemptystring('branch_name')
result = self._get(
f'repos/{organization_name}/{repository_name}/branches/{branch_name}'
)
return result # type: ignore
####################################################################
# GitHub repository pull requests
#
# list_pullrequests
# create_pullrequest
# TODO get_pullrequest
# is_pullrequest_merged
# merge_pullrequest
# update_pullrequest_branch
@api_call
def list_pullrequests(
self,
organization_name: str,
repository_name: str,
state: str = 'all',
) -> List[Dict[str, Any]]:
"""List pull requests.
# Required parameters
- organization_name: a non-empty string
- repository_name: a non-empty string
# Optional parameters
- state: a string, one of 'open', 'closed', or 'all' (all by
default)
# Returned value
A list of _pull requests_. Each pull request is a dictionary
with the following entries:
- url: a string
- id: an integer
- node_id: a string
- html_url: a string
- diff_url: a string
- patch_url: a string
- issue_url: a string
- commits_url: a string
- review_comments_url: a string
- review_comment_url: a string
- comments_url: a string
- statuses_url: a string
- number: an integer
- state: a string
- locked: a boolean
- title: a string
- user: a dictionary
- body: a string
- labels: a list of dictionaries
- milestone: a dictionary,
- active_lock_reason: a string
- created_at: a string
- updated_at: a string
- closed_at: a string
- merged_at: a string
- merge_commit_sha: a string
- assignee: a dictionary
- assignees: a list of dictionaries
- requested_reviewers: a list of dictionaries
- requested_teams: a list of dictionaries
- head: a dictionary
- base: a dictionary
- _links: a dictionary
- author_association: a string
- auto_merge: a dictionary or None
- draft: a boolean
`number` is the value you use to interact with the pull request.
"""
ensure_nonemptystring('organization_name')
ensure_nonemptystring('repository_name')
ensure_in('state', ('open', 'closed', 'all'))
return self._collect_data(
f'repos/{organization_name}/{repository_name}/pulls',
params={'state': state},
)
@api_call
def create_pullrequest(
self,
organization_name: str,
repository_name: str,
head: str,
base: str,
title: Optional[str] = None,
body: Optional[str] = None,
maintainer_can_modify: bool = True,
draft: bool = False,
issue: Optional[int] = None,
) -> List[Dict[str, Any]]:
"""List branches.
# Required parameters
- organization_name: a non-empty string
- repository_name: a non-empty string
- head: a non-empty string
- base: a non-empty string
- title: a non-empty string or None (None by default)
- issue: an integer or None
Either `title` or `issue` must be specified.
# Optional parameters
- body: a non-empty string or None (None by default)
- maintainer_can_modify: a boolean (True by default)
- draft: a boolean (False by default)
# Returned value
A _pull request_. See #list_pullrequests for its description.
"""
ensure_nonemptystring('organization_name')
ensure_nonemptystring('repository_name')
ensure_nonemptystring('head')
ensure_nonemptystring('base')
ensure_noneornonemptystring('title')
ensure_noneornonemptystring('body')
ensure_instance('maintainer_can_modify', bool)
ensure_instance('draft', bool)
ensure_noneorinstance('issue', int)
ensure_onlyone('title', 'issue')
data = {
'head': head,
'base': base,
'maintainer_can_modify': maintainer_can_modify,
'draft': draft,
}
add_if_specified(data, 'body', body)
add_if_specified(data, 'title', title)
add_if_specified(data, 'issue', issue)
result = self._post(
f'repos/{organization_name}/{repository_name}/pulls', json=data
)
return result # type: ignore
@api_call
def is_pullrequest_merged(
self, organization_name: str, repository_name: str, pull_number: int
) -> bool:
"""Check if pull request has been merged.
# Required parameters
- organization_name: a non-empty string
- repository_name: a non-empty string
- pull_number: an integer
# Returned value
A boolean. True if the pull request has been merged, False
otherwise.
"""
ensure_nonemptystring('organization_name')
ensure_nonemptystring('repository_name')
ensure_instance('pull_number', int)
return (
self._get(
f'repos/{organization_name}/{repository_name}/pulls/{pull_number}/merge'
).status_code
== 204
)
@api_call
def merge_pullrequest(
self,
organization_name: str,
repository_name: str,
pull_number: int,
commit_title: Optional[str] = None,
commit_message: Optional[str] = None,
sha: Optional[str] = None,
merge_method: Optional[str] = None,
) -> Dict[str, Any]:
"""Merge pull request.
# Required parameters
- organization_name: a non-empty string
- repository_name: a non-empty string
- pull_number: an integer
# Optional parameters
- commit_title: a non-empty string or None (None by default)
- commit_message: a non-empty string or None (None by default)
- sha: a non-empty string or None (None by default)
- merge_method: a string, one of 'merge', 'squash', or 'rebase',
or None (None by default)
# Returned value
A _pull request merge result_. A pull request merge result is a
dictionary with the following entries:
- sha: a string
- merged: a boolean
- message: a string
"""
ensure_nonemptystring('organization_name')
ensure_nonemptystring('repository_name')
ensure_instance('pull_number', int)
ensure_noneornonemptystring('commit_title')
ensure_noneornonemptystring('commit_message')
ensure_noneornonemptystring('sha')
ensure_noneornonemptystring('merge_method')
if merge_method is not None:
ensure_in('merge_method', ('merge', 'squash', 'rebase'))
data = {}
add_if_specified(data, 'commit_title', commit_title)
add_if_specified(data, 'commit_message', commit_message)
add_if_specified(data, 'sha', sha)
add_if_specified(data, 'merge_method', merge_method)
result = self._put(
f'repos/{organization_name}/{repository_name}/pulls/{pull_number}/merge',
json=data,
)
return result # type: ignore
@api_call
def update_pullrequest_branch(
self,
organization_name: str,
repository_name: str,
pull_number: int,
expected_head_sha: Optional[str] = None,
) -> Dict[str, Any]:
"""Update pull request branch with latest upstream changes.
Update the pull request branch with the latest upstream changes
by merging HEAD from the base branch into the pull request
branch.
# Required parameters
- organization_name: a non-empty string
- repository_name: a non-empty string
- pull_number: an integer
# Optional parameters
- expected_head_sha: a non-empty string or None (None by
default)
# Returned value
A dictionary with the following entries:
- message: a string
- url: a string
"""
ensure_nonemptystring('organization_name')
ensure_nonemptystring('repository_name')
ensure_instance('pull_number', int)
ensure_noneornonemptystring('expected_head_sha')
data = (
{'expected_head_sha': expected_head_sha}
if expected_head_sha
else None
)
result = self._put(
f'repos/{organization_name}/{repository_name}/pulls/{pull_number}/update-branch',
json=data,
)
return result # type: ignore
####################################################################
# GitHub repository git database
#
# create_repository_reference
# delete_repository_reference
# create_repository_tag
# get_repository_reference
# get_repository_references
# get_repository_tree
@api_call
def create_repository_reference(
self,
organization_name: str,
repository_name: str,
ref: str,
sha: str,
key: Optional[str] = None,
) -> Dict[str, Any]:
"""Create a reference.
# Required parameters
- organization_name: a non-empty string
- repository_name: a non-empty string
- ref: a non-empty string (a fully-qualified reference, starting with
`refs` and having at least two slashed)
- sha: a non-empty string
# Optional parameters
- key: a string
# Returned value
A _reference_. A reference is a dictionary with the following
entries:
- ref: a string
- node_id: a string
- url: a string
- object: a dictionary
The `object` dictionary has the following entries:
- type: a string
- sha: a string
- url: a string
"""
ensure_nonemptystring('organization_name')
ensure_nonemptystring('repository_name')
ensure_nonemptystring('ref')
if not ref.startswith('refs/') or ref.count('/') < 2:
raise ValueError(
'ref must start with "refs" and contains at least two slashes.'
)
ensure_nonemptystring('sha')
ensure_noneornonemptystring('key')
data = {'ref': ref, 'sha': sha}
add_if_specified(data, 'key', key)
result = self._post(
f'repos/{organization_name}/{repository_name}/git/refs', json=data
)
return result # type: ignore
@api_call
def delete_repository_reference(
self,
organization_name: str,
repository_name: str,
ref: str,
) -> None:
"""Delete a reference.
# Required parameters
- organization_name: a non-empty string
- repository_name: a non-empty string
- ref: a non-empty string (a fully-qualified reference, starting with
`refs` and having at least two slashed)
# Returned value
No content.
"""
ensure_nonemptystring('organization_name')
ensure_nonemptystring('repository_name')
ensure_nonemptystring('ref')
if not ref.startswith('refs/') or ref.count('/') < 2:
raise ValueError(
'ref must start with "refs" and contains at least two slashes.'
)
result = self._delete(
f'repos/{organization_name}/{repository_name}/git/{ref}'
)
return result # type: ignore
@api_call
def create_repository_tag(
self,
organization_name: str,
repository_name: str,
tag: str,
message: str,
object_: str,
type_: str,
tagger: Optional[Any] = None,
) -> Dict[str, Any]:
"""Create a tag.
# Required parameters
- organization_name: a non-empty string
- repository_name: a non-empty string
- tag: a non-empty string
- message: a string
- object_: a non-empty string
- type_: a string
# Optional parameters
- tagger: a dictionary or None (None by default)
# Returned value
A _tag_. A tag is a dictionary with the following entries:
- node_id: a string
- tag: a string
- sha: a string
- url: a string
- message: a string
- tagger: a dictionary
- object: a dictionary
- verification: a dictionary
"""
ensure_nonemptystring('organization_name')
ensure_nonemptystring('repository_name')
ensure_nonemptystring('tag')
ensure_instance('message', str)
ensure_nonemptystring('object_')
ensure_nonemptystring('type_')
data = {
'tag': tag,
'message': message,
'object': object_,
'type': type_,
}
add_if_specified(data, 'tagger', tagger)
result = self._post(
f'repos/{organization_name}/{repository_name}/git/tags', json=data
)
return result # type: ignore
@api_call
def get_repository_reference(
self,
organization_name: str,
repository_name: str,
ref: str,
) -> Dict[str, Any]:
"""Get a repository reference.
# Required parameters
- organization_name: a non-empty string
- repository_name: a non-empty string
- ref: a non-empty string (of form `heads/{branch}` or
`tags/{tag}`)
# Returned value
A _reference_. A reference is a dictionary with the following
entries:
- ref: a string
- node_id: a string
- url: a string
- object: a dictionary
The `object` dictionary has the following entries:
- type: a string
- sha: a string
- url: a string
"""
ensure_nonemptystring('organization_name')
ensure_nonemptystring('repository_name')
ensure_nonemptystring('ref')
if not (ref.startswith('heads/') or ref.startswith('tags/')):
raise ValueError('ref must start with "heads/" or "tags/".')
result = self._get(
f'repos/{organization_name}/{repository_name}/git/ref/{ref}',
)
return result # type: ignore
@api_call
def get_repository_references(
self,
organization_name: str,
repository_name: str,
ref: str,
) -> Dict[str, Any]:
"""Get a repository references.
# Required parameters
- organization_name: a non-empty string
- repository_name: a non-empty string
- ref: a non-empty string (`heads` or `tags`)
# Returned value
A list of _references_. A reference is a dictionary with the
following entries:
- ref: a string
- node_id: a string
- url: a string
- object: a dictionary
The `object` dictionary has the following entries:
- type: a string
- sha: a string
- url: a string
"""
ensure_nonemptystring('organization_name')
ensure_nonemptystring('repository_name')
ensure_in('ref', ('heads', 'tags'))
result = self._get(
f'repos/{organization_name}/{repository_name}/git/refs/{ref}',
)
return result # type: ignore
@api_call
def get_repository_tree(
self,
organization_name: str,
repository_name: str,
tree_sha: str,
recursive: bool = False,
) -> Dict[str, Any]:
"""Get a tree.
# Required parameters
- organization_name: a non-empty string
- repository_name: a non-empty string
- tree_sha: a non-empty string (a SHA1)
# Optional parameters
- recursive: a boolean (False by default)
# Returned value
A _tree_. A tree is a dictionary with the following keys:
- sha: a string
- url: a string
- tree: a list of dictionaries
- truncated: a boolean
The `tree` elements have the following keys:
- path: a string
- mode: a string
- type: a string
- size: an integer
- sha: a string
- url: a string
If `truncated` is `True`, the number of items in the `tree` list
exceeds githubs' internal limits (100k entries with a maximum
size of 7 MB). If you need to fetch more items, use the
non-recursive method of fetching trees, and fetch one sub-tree
at a time.
"""
ensure_nonemptystring('organization_name')
ensure_nonemptystring('repository_name')
ensure_nonemptystring('tree_sha')
ensure_instance('recursive', bool)
params = {'recursive': 'true'} if recursive else None
headers = {'Accept': 'application/vnd.github+json'}
result = self._get(
f'repos/{organization_name}/{repository_name}/git/tree/{tree_sha}',
params=params,
headers=headers,
)
return result # type: ignore
####################################################################
# GitHub hook operations
#
# list_hooks
# create_hook
# delete_hook
@api_call
def list_hooks(
self, organization_name: str, repository_name: str
) -> Dict[str, Any]:
"""Return the list of hooks for repository.
# Required parameters
- organization_name: a non-empty string
- repository_name: a non-empty string
# Returned value
A list of _hooks_. A hook is a dictionary with the following
entries:
- active: a boolean
- config: a dictionary
- created_at: a string (a timestamp)
- events: a list of strings
- id: an integer
- last_response: a dictionary
- name: a string (always `'web'`)
- ping_url: a string
- test_url: a a string
- type: a string
- updated_at: a string (a timestamp)
- url: a string
`config` has the following entries:
- insecure_ssl: a string
- content_type: a string
- url: a string
`last_response` has the following entries:
- message: a string
- code: an integer
- status: a string
"""
ensure_nonemptystring('organization_name')
ensure_nonemptystring('repository_name')
result = self._get(
f'repos/{organization_name}/{repository_name}/hooks'
)
return result # type: ignore
@api_call
def create_hook(
self,
organization_name: str,
repository_name: str,
name: str,
config: Dict[str, str],
events: Optional[List[str]] = None,
active: bool = True,
) -> Dict[str, Any]:
"""Create a webhook.
# Required parameters
- organization_name: a non-empty string
- repository_name: a non-empty string
- name: a string (must be `'web'`)
- config: a dictionary
The `config` dictionary must contain the following entry:
- url: a string
It may contain the following entries:
- content_type: a string
- secret: a string
- insecure_ssl: a string
# Optional parameters
- events: a list of strings (`['push']` by default)
- active: a boolean (True by default)
# Returned value
A _hook_. See #list_hooks() for its format.
"""
ensure_nonemptystring('organization_name')
ensure_nonemptystring('repository_name')
if name != 'web':
raise ValueError('name must be "web".')
ensure_instance('config', dict)
ensure_noneorinstance('events', list)
ensure_instance('active', bool)
if 'url' not in config:
raise ValueError('config must contain an "url" entry.')
if events is None:
events = ['push']
data = {
'name': name,
'active': active,
'config': config,
'events': events,
}
result = self._post(
f'repos/{organization_name}/{repository_name}/hooks', json=data
)
return result # type: ignore
@api_call
def delete_hook(
self, organization_name: str, repository_name: str, hook_id: int
) -> bool:
"""Delete a webhook.
# Required parameters
- organization_name: a non-empty string
- repository_name: a non-empty string
- hook_id: an integer
# Returned value
A boolean. True when successful.
"""
ensure_nonemptystring('organization_name')
ensure_nonemptystring('repository_name')
ensure_instance('hook_id', int)
result = self._delete(
f'repos/{organization_name}/{repository_name}/hooks/{hook_id}'
)
return result.status_code == 204
####################################################################
# GitHub misc. operations
#
# get_server_version
# get_admin_stats
@api_call
def get_server_version(self) -> None:
"""Return current GitHub version.
!!! warning
Not implemented yet.
"""
@api_call
def get_admin_stats(self, what: str = 'all') -> Dict[str, Any]:
"""Return admin stats.
# Optional parameters
- what: a string (`'all'` by default)
`what` can be `'issues'`, `'hooks'`, `'milestones'`, `'orgs'`,
`'comments'`, `'pages'`, `'users'`, `'gists'`, `'pulls'`,
`'repos'` or `'all'`.
Requires sysadmin rights.
# Returned value
A dictionary with either one entry (if `what` is not `'all'`)
or one entry per item.
Values are dictionaries.
"""
ensure_nonemptystring('what')
return self._get(f'enterprise/stats/{what}') # type: ignore
####################################################################
# GitHub helpers
#
# All helpers are api_call-compatibles (i.e., they can be used as
# a return value)
def _get(
self,
api: str,
params: Optional[Mapping[str, Union[str, List[str], None]]] = None,
headers: Optional[Mapping[str, str]] = None,
) -> requests.Response:
"""Return GitHub API call results, as Response."""
api_url = join_url(self.url, api)
return self.session().get(api_url, headers=headers, params=params)
def _collect_data(
self,
api: str,
params: Optional[Mapping[str, Union[str, List[str], None]]] = None,
headers: Optional[Mapping[str, str]] = None,
) -> List[Dict[str, Any]]:
"""Return GitHub API call results, collected.
The API call is expected to return a list of items. If not,
an _ApiError_ exception is raised.
"""
api_url = join_url(self.url, api)
collected: List[Dict[str, Any]] = []
more = True
while more:
response = self.session().get(
api_url, params=params, headers=headers
)
if response.status_code // 100 != 2:
raise ApiError(response.text)
try:
collected += response.json()
except Exception as exception:
raise ApiError(exception)
more = 'next' in response.links
if more:
api_url = response.links['next']['url']
return collected
def _post(
self, api: str, json: Optional[Mapping[str, Any]] = None
) -> requests.Response:
api_url = join_url(self.url, api)
return self.session().post(api_url, json=json)
def _put(
self,
api: str,
json: Optional[Mapping[str, Any]] = None,
headers: Optional[Mapping[str, str]] = None,
) -> requests.Response:
api_url = join_url(self.url, api)
return self.session().put(api_url, json=json, headers=headers)
def _delete(self, api: str) -> requests.Response:
api_url = join_url(self.url, api)
return self.session().delete(api_url)
def _patch(
self,
api: str,
json: Optional[Mapping[str, Any]] = None,
headers: Optional[Mapping[str, str]] = None,
) -> requests.Response:
api_url = join_url(self.url, api)
return self.session().patch(api_url, json=json, headers=headers) | zabel-elements | /zabel_elements-1.15.0-py3-none-any.whl/zabel/elements/clients/base/github.py | github.py |
from typing import Any, Dict, List, Optional
from zabel.commons.utils import api_call, ensure_nonemptystring
########################################################################
########################################################################
# Kubernetes low-level api
DEFAULT_NAMESPACE = 'default'
class Kubernetes:
"""Kubernetes Base-Level Wrapper.
!!! warning
Preliminary work. Not stable. May change at any time.
# Reference URL
...
# Implemented features
- `namespaces`
- `resource quota`
# Sample use
Using the default context as defined in the `~/.kube/config`
configuration file:
```python
>>> from zabel.elements.clients import Kubernetes
>>>
>>> k8s = Kubernetes()
>>> namespaces = k8s.list_namespaces()
```
Using explicit configuration:
```python
>>> from zabel.elements.clients import Kubernetes
>>>
>>> K8S_URL = 'https://kubernetes.example.com'
>>> k8s = Kubernetes(
>>> config={
>>> 'url': K8S_URL,
>>> 'api_key': '...',
>>> 'verify': False,
>>> }
>>> )
>>> namespaces = k8s.list_namespaces()
```
"""
def __init__(
self,
config_file: Optional[str] = None,
context: Optional[str] = None,
config: Optional[Dict[str, Any]] = None,
) -> None:
"""Create a Kubernetes instance object.
By default, will use the standard context in Kubernetes config
file.
# Optional parameters
- config_file: a non-empty string or None (None by default)
- context: a non-empty string or None (None by default)
- config: a dictionary or None (None by default)
If `config_file` or `context` are specified, `config` must
be None.
If neither `config_file` nor `config` are specified, the default
Kubernetes config file will be used.
If `context` is specified, the instance will use the
specified Kubernetes context. If not specified, the default
context will be use instead.
If `config` is specified, it must be a dictionary with the
following entries:
- url: a non-empty string
- api_key: a non-empty string (a JWT)
If may also contain the following entries:
- verify: a boolean (True by default)
- ssl_ca_cert: a string (a base64-encoded certificate)
- ssl_ca_cert_file: a string (an existing file name)
The `url` parameter is the top-level API point. E.g.:
https://FOOBARBAZ.example.com
`verify` can be set to False if disabling certificate checks for
Kubernetes communication is required. Tons of warnings will
occur if this is set to False.
If both `ssl_ca_cert` and `ssl_ca_cert_file` are specified,
`ssl_ca_cert_file` will be ignored.
"""
from kubernetes import client as klient, config as konfig
self._klient = klient
if config is not None and (
config_file is not None or context is not None
):
raise ValueError(
'config cannot be set if config_file or context are set'
)
_config = klient.Configuration()
if config is not None:
_config.api_key_prefix['authorization'] = 'Bearer'
_config.api_key['authorization'] = config.get('api_key')
_config.host = config.get('url')
_config.verify_ssl = config.get('verify', True)
if 'ssl_ca_cert' in config or 'ssl_ca_cert_file' in config:
_config.ssl_ca_cert = konfig.kube_config.FileOrData(
config, 'ssl_ca_cert_file', 'ssl_ca_cert'
).as_file()
else:
konfig.load_kube_config(
config_file=config_file,
context=context,
client_configuration=_config,
)
self._config = _config
self._apiclient = self._v1client = self._rbacv1_client = None
def __str__(self) -> str:
return f'{self.__class__.__name__}: {self._config.host}'
def __repr__(self) -> str:
return f'<{self.__class__.__name__}: {self._config.host!r}>'
####################################################################
# create_namespace
# delete_namespace
# list_namespaces
#
# list_resourcequotas
# patch_resourcequota
#
# ? create_role
# ? create_role_binding
# ? create_service_account
@api_call
def create_namespace(self, namespace: str) -> Dict[str, Any]:
"""Create namespace.
# Required parameters
- namespace: a non-empty string
# Returned value
A dictionary with the following entries:
- api_version: a string
- kind: a string
- metadata: a dictionary
- spec: a dictionary
- status: a dictionary
"""
ensure_nonemptystring('namespace')
metadata = self._klient.V1ObjectMeta()
metadata.name = namespace
metadata.labels = {'name': namespace}
_namespace = self._klient.V1Namespace()
_namespace.metadata = metadata
return self._get_v1_client().create_namespace(_namespace).to_dict() # type: ignore
@api_call
def delete_namespace(self, namespace: str) -> Dict[str, Any]:
"""Delete namespace.
# Required parameters
- namespace: a non-empty string
# Returned value
A dictionary with the following entries:
- api_version: a string
- code: ... or None
- details: ... or None
- kind: a string
- message: ... or None
- metadata: a dictionary
- reason: ... or None
- status: a dictionary
"""
ensure_nonemptystring('namespace')
return self._get_v1_client().delete_namespace(namespace).to_dict() # type: ignore
@api_call
def list_namespaces(self) -> List[Dict[str, Any]]:
"""Return a list of namespaces.
# Returned value
A list of _namespaces_. Each namespace in the returned list is
a dictionary with the following entries:
- api_version: a string
- kind: a string
- metadata: a dictionary
- spec: a dictionary
- status: a dictionary
"""
return [
item.to_dict()
for item in self._get_v1_client().list_namespace().items
]
@api_call
def list_resourcequotas(
self, *, namespace: str = DEFAULT_NAMESPACE
) -> List[Dict[str, Any]]:
"""Return a list of resource quota in namespace.
# Optional parameters
- namespace: a string
# Returned value
A list of _resource quota_. Each resource quota is a dictionary
with the following entries:
- api_version: a string
- kind: a string
- metadata: a dictionary
- spec: a dictionary
- status: a dictionary
"""
return [
item.to_dict()
for item in self._get_v1_client()
.list_namespaced_resource_quota(namespace)
.items
]
@api_call
def patch_resourcequota(
self,
name: str,
body: Dict[str, Any],
*,
namespace: str = DEFAULT_NAMESPACE,
) -> Dict[str, Any]:
"""Patch a resource quota in a namespace.
# Required parameters
- name: a string
- body: a dictionary
# Optional parameters
- namespace: a string
# Returned value
A dictionary.
"""
result = (
self._get_v1_client()
.patch_namespaced_resource_quota(
namespace=namespace, name=name, body=body
)
.to_dict()
)
return result # type: ignore
####################################################################
# kubernetes clients
def _get_apiclient(self) -> Any:
"""Return the API client."""
if self._apiclient is None:
self._apiclient = self._klient.ApiClient(self._config)
return self._apiclient
def _get_v1_client(self) -> Any:
"""Return Core v1 API client."""
if self._v1client is None:
self._v1client = self._klient.CoreV1Api(self._get_apiclient())
return self._v1client
def _get_rbacv1_client(self) -> Any:
"""Return RbacAuthorizationV1Api client."""
if self._rbacv1_client is None:
self._rbacv1_client = self._klient.RbacAuthorizationV1Api(
self._get_apiclient()
)
return self._rbacv1_client | zabel-elements | /zabel_elements-1.15.0-py3-none-any.whl/zabel/elements/clients/base/kubernetes.py | kubernetes.py |
from typing import Any, Dict, List
import asyncio
from zabel.commons.exceptions import ApiError
from zabel.commons.utils import ensure_nonemptystring, api_call
class OktaException(Exception):
def __init__(self, *args: object) -> None:
super().__init__(*args)
class Okta:
"""Okta Base-Level Wrapper.
# Reference url
<https://developer.okta.com/docs/reference/api/groups/>
"""
def __init__(
self,
url: str,
token: str,
):
ensure_nonemptystring('url')
ensure_nonemptystring('token')
self.url = url
self.client = None
self.token = token
def _client(self) -> 'okta.OktaClient':
"""singleton instance, only if needed."""
if self.client is None:
from okta.client import Client as OktaClient
self.client = OktaClient({'orgUrl': self.url, 'token': self.token})
return self.client
####################################################################
# users
#
# list_users
# get_user_info
# list_groups_by_user_id
@api_call
def list_users(self) -> List[Dict[str, Any]]:
"""Return users list.
# Returned value
A list of _users_. Each user is a dictionary. See
#get_user_info() for its format.
"""
async def list_users_async(self):
users, response, error = await self._client().list_users()
if error:
raise ApiError(error)
collected = users
while response.has_next():
users, error = await response.next()
if error:
raise ApiError(error)
collected += users
users_dict = [user.as_dict() for user in collected]
return users_dict
loop = asyncio.get_event_loop()
return loop.run_until_complete(list_users_async(self))
@api_call
def get_user_info(self, user: str) -> Dict[str, Any]:
"""Request the Okta user info.
# Required parameters
- user: a non-empty string
# Returned value
A dictionary with following entries:
- id: a string
- status: an enum
- created: a timestamp
- activated: a timestamp
- statusChanged: a timestamp
- lastLogin: a timestamp
- lastUpdated: a timestamp
- passwordChanged: a boolean
- type: a dictionary
- profile: a dictionary
- credentials: a dictionary
"""
ensure_nonemptystring('user')
async def get_user_info_async(self, user: str):
okta_user, resp, err = await self._client().get_user(user)
if err:
# TODO : check if err is itself an exception, no time
# for this for now
raise OktaException(err)
if okta_user is not None:
return okta_user.as_dict()
else:
raise OktaException(f"User {user} not found")
loop = asyncio.get_event_loop()
return loop.run_until_complete(get_user_info_async(self, user))
@api_call
def list_groups_by_user_id(self, userId: str) -> List[Dict[str, Any]]:
"""Return the groups for an user.
# Required parameters
- userId: a non-empty string
# Raised exceptions
Raises an _ApiError_ exception if error is throw by Okta.
# Returned value
Return a list of groups. Refer to #get_group_by_name() for more information.
"""
ensure_nonemptystring('userId')
async def list_groups_by_user_id_async(self, userId: str):
groups, resp, err = await self._client().list_user_groups(userId)
groups_dict = [group.as_dict() for group in groups]
return groups_dict
loop = asyncio.get_event_loop()
return loop.run_until_complete(
list_groups_by_user_id_async(self, userId)
)
####################################################################
# groups
#
# get_group_by_name
# add_user_to_group
# remove_user_from_group
# list_users_by_group_id
@api_call
def get_group_by_name(self, group_name: str) -> Dict[str, Any]:
"""Requet Okta group by his name.
# Required parameters
- group_name: a non-empty string
# Returned value
A dictionary with following entries:
- id: a string
- created: a timestamp
- lastUpdated: a timestamp
- lastMembershipUpdated: a timestamp
- objectClass: an array
- type: a string
- profile: a dictionary
- _links: a dictionary
# Raised exceptions
Raises an _ApiError_ exception if zero or more than one
group is return by Okta API.
"""
ensure_nonemptystring('group_name')
async def find_group_async(self, group_name):
param = {'q': group_name}
groups, resp, error = await self._client().list_groups(
query_params=param
)
if len(groups) == 0:
raise ApiError(f'The group {group_name} is not an Okta group')
elif len(groups) > 1:
raise ApiError(
f'More than one group with the name: {group_name}'
)
return groups[0].as_dict()
loop = asyncio.get_event_loop()
return loop.run_until_complete(find_group_async(self, group_name))
@api_call
def add_user_to_group(self, group_id: str, user_id: str) -> None:
"""Add user to Okta group.
# Required parameters
- group_id: a non-empty string
- user_id: a non-empty string
# Raised exceptions
Raises an _ApiError_ exception if error is throw by Okta during add
user to group operation.
"""
ensure_nonemptystring('group_id')
ensure_nonemptystring('user_id')
async def add_user_to_group_async(self, group_id, user_id):
resp, error = await self._client().add_user_to_group(
userId=user_id, groupId=group_id
)
if error:
raise ApiError(error)
loop = asyncio.get_event_loop()
loop.run_until_complete(
add_user_to_group_async(self, group_id, user_id)
)
@api_call
def remove_user_from_group(self, group_id: str, user_id: str) -> None:
"""Remove user from Okta group.
# Required parameters
- group_id: a non-empty string
- user_id: a non-empty string
# Raised exceptions
Raises an _ApiError_ exception if error is throw by Okta during remove
user from group operation.
"""
ensure_nonemptystring('group_id')
ensure_nonemptystring('user_id')
async def remove_user_from_group_async(self, group_id, user_id):
resp, error = await self._client().remove_user_from_group(
userId=user_id, groupId=group_id
)
if error:
raise ApiError(error)
loop = asyncio.get_event_loop()
loop.run_until_complete(
remove_user_from_group_async(self, group_id, user_id)
)
@api_call
def list_users_by_group_id(self, group_id: str) -> List[Dict[str, Any]]:
"""List users in Okta group.
# Required parameters
- group_id: a non-empty string
# Raised exceptions
Raises an _ApiError_ exception if error is throw by Okta.
# Returned value
Return a list of users. Refer to #get_user_info() for more information.
"""
ensure_nonemptystring('group_id')
async def list_users_by_group_id_async(self, group_id):
users, response, error = await self._client().list_group_users(
group_id
)
collected = users
while response.has_next():
users, error = await response.next()
collected += users
if error:
raise ApiError(error)
users_dict = [user.as_dict() for user in collected]
return users_dict
loop = asyncio.get_event_loop()
return loop.run_until_complete(
list_users_by_group_id_async(self, group_id)
) | zabel-elements | /zabel_elements-1.15.0-py3-none-any.whl/zabel/elements/clients/base/okta.py | okta.py |
from typing import Any, Dict, Iterable, List, Mapping, Optional, Tuple, Union
import json
import re
from urllib.parse import urlencode
import requests
from zabel.commons.exceptions import ApiError
from zabel.commons.utils import (
add_if_specified,
api_call,
ensure_in,
ensure_instance,
ensure_nonemptystring,
ensure_noneorinstance,
ensure_onlyone,
join_url,
)
########################################################################
########################################################################
REINDEX_KINDS = [
'FOREGROUND',
'BACKGROUND',
'BACKGROUND_PREFFERED',
'BACKGROUND_PREFERRED',
]
PERMISSIONSCHEME_EXPAND = 'permissions,user,group,projectRole,field,all'
NOTIFICATIONSCHEME_EXPAND = (
'notificationSchemeEvents,user,group,projectRole,field,all'
)
PROJECT_EXPAND = 'description,lead,url,projectKeys'
USER_EXPAND = 'groups,applicationRoles'
MAX_RESULTS = 1000
# Helpers
def _get_atl_token(html: str) -> str:
atl_token = html[html.find('"atl_token"') :]
atl_token = atl_token[atl_token.find('value="') + 7 :]
return atl_token[: atl_token.find('"')]
def _get_scheme_id(
name_or_id: Union[int, str], schemes: Iterable[Mapping[str, Any]]
) -> str:
if isinstance(name_or_id, str):
matches = [s['id'] for s in schemes if s['name'] == name_or_id]
if len(matches) != 1:
raise ApiError('Scheme %s not found.' % name_or_id)
return str(matches.pop())
if not any(str(s['id']) == str(name_or_id) for s in schemes):
raise ApiError('Scheme ID %s not found.' % str(name_or_id))
return str(name_or_id)
class BearerAuth(requests.auth.AuthBase):
"""A Bearer handler class for requests."""
def __init__(self, pat: str):
self.pat = pat
def __eq__(self, other):
return self.pat == getattr(other, 'pat', None)
def __ne__(self, other):
return not self == other
def __call__(self, r):
r.headers['Authorization'] = f'Bearer {self.pat}'
return r
# JIRA low-level api
class Jira:
"""JIRA Low-Level Wrapper.
Reference URL:
<https://docs.atlassian.com/jira/REST/server/>
<https://docs.atlassian.com/software/jira/docs/api/REST/8.7.1>
<https://docs.atlassian.com/jira-servicedesk/REST/4.9.0/>
Agile reference:
<https://docs.atlassian.com/jira-software/REST/8.7.1/>
Using the python library:
<http://jira.readthedocs.io/en/latest/>
Plus the various WADLS, such as:
<https://jira.example.com/rest/greenhopper/1.0/application.wadl>
<https://jira.example.com/rest/bitbucket/1.0/application.wadl>
Implemented features:
- search
- groups
- permissionschemes
- projects
- users
- boards
- sprints
- issues
- servicedesk
- misc. features (reindexing, plugins, xray, & server info)
Works with basic authentication as well as OAuth authentication.
It is the responsibility of the user to be sure the provided
authentication has enough rights to perform the requested operation.
# Sample use
```python
>>> from zabel.elements.clients import Jira
>>>
>>> url = 'https://jira.example.com'
>>> jc = Jira(url, basic_auth=(user, token))
>>> jc.list_users()
```
!!! note
Reuse the JIRA library whenever possible, but always returns
'raw' values (dictionaries, ..., not classes).
"""
def __init__(
self,
url: str,
basic_auth: Optional[Tuple[str, str]] = None,
oauth: Optional[Dict[str, str]] = None,
bearer_auth: Optional[str] = None,
verify: bool = True,
) -> None:
"""Create a Jira instance object.
You can only specify either `basic_auth`, `oauth`, or 'bearer_auth`.
# Required parameters
- url: a string
- basic_auth: a strings tuple (user, token)
- oauth: a dictionary
- bearer_auth: a string
The `oauth` dictionary is expected to have the following
entries:
- access_token: a string
- access_token_secret: a string
- consumer_key: a string
- key_cert: a string
# Optional parameters
- verify: a boolean (True by default)
`verify` can be set to False if disabling certificate checks for
Jira communication is required. Tons of warnings will occur if
this is set to False.
"""
ensure_nonemptystring('url')
ensure_onlyone('basic_auth', 'oauth', 'bearer_auth')
ensure_noneorinstance('basic_auth', tuple)
ensure_noneorinstance('oauth', dict)
ensure_noneorinstance('bearer_auth', str)
ensure_instance('verify', bool)
self.url = url
self.basic_auth = basic_auth
self.oauth = oauth
self.bearer_auth = bearer_auth
if basic_auth is not None:
self.auth = basic_auth
if oauth is not None:
from requests_oauthlib import OAuth1
from oauthlib.oauth1 import SIGNATURE_RSA
self.auth = OAuth1(
oauth['consumer_key'],
'dont_care',
oauth['access_token'],
oauth['access_token_secret'],
signature_method=SIGNATURE_RSA,
rsa_key=oauth['key_cert'],
signature_type='auth_header',
)
if bearer_auth is not None:
self.auth = BearerAuth(bearer_auth)
self.client = None
self.verify = verify
self.UPM_BASE_URL = join_url(url, 'rest/plugins/1.0/')
self.AGILE_BASE_URL = join_url(url, 'rest/agile/1.0/')
self.GREENHOPPER_BASE_URL = join_url(url, 'rest/greenhopper/1.0/')
self.SERVICEDESK_BASE_URL = join_url(url, 'rest/servicedeskapi/')
self.SDBUNDLE_BASE_URL = join_url(url, 'rest/jsdbundled/1.0')
self.XRAY_BASE_URL = join_url(url, 'rest/raven/1.0')
def __str__(self) -> str:
return f'{self.__class__.__name__}: {self.url}'
def __repr__(self) -> str:
if self.basic_auth:
rep = self.basic_auth[0]
elif self.oauth:
rep = self.oauth['consumer_key'] # type: ignore
else:
rep = f'Bearer {self.bearer_auth[:5]}...{self.bearer_auth[-2:]}'
return f'<{self.__class__.__name__}: {self.url!r}, {rep!r}>'
def _client(self) -> 'jira.JIRA':
"""singleton instance, only if needed."""
if self.client is None:
from jira import JIRA
options = {
'server': self.url,
'agile_rest_path': 'agile',
'verify': self.verify,
}
if self.bearer_auth:
options['headers'] = {
'Authorization': f'Bearer {self.bearer_auth}'
}
self.client = JIRA(
options=options,
basic_auth=self.basic_auth,
oauth=self.oauth,
)
return self.client
####################################################################
# JIRA search
#
# search
@api_call
def search(
self,
jql: str,
start_at: Optional[int] = None,
max_results: Optional[int] = None,
validate_query: bool = True,
fields: Optional[List[str]] = None,
expand: Optional[str] = None,
) -> Dict[str, Any]:
"""Return the result of a query.
# Required parameters
- jql: a string
# Optional parameters
- start_at: an integer or None (None by default)
- max_results: an integer or None (None by default)
- validate_query: a boolean (True by default)
- fields: a list of strings or None (None by default)
- expand: a string or None (None by default)
`max_results` is limited by the `jira.search.views.default.max`
property, and hence requesting a high number of results may
result in fewer returned results.
# Returned value
A dictionary with the following entries:
- expand: a string
- startAt: an integer
- maxResults: an integer
- total: an integer
- issues: a list of dictionaries
The entries in `issues` items depends on what was specified for
`expand`.
Assuming the default `expand` value, items in `issues` contain
the following entries:
- id: an integer
- expand: a string
- self: a string
- key: a string
- fields: a dictionary
The entries in `fields` depends on the issue type.
"""
ensure_instance('jql', str)
ensure_noneorinstance('start_at', int)
ensure_noneorinstance('max_results', int)
ensure_instance('validate_query', bool)
ensure_noneorinstance('fields', list)
ensure_noneorinstance('expand', str)
params = {'jql': jql, 'validateQuery': validate_query}
add_if_specified(params, 'startAt', start_at)
add_if_specified(params, 'maxResults', max_results)
add_if_specified(params, 'fields', fields)
add_if_specified(
params, 'expand', expand.split(',') if expand is not None else None
)
result = self._post('search', json=params)
return result # type: ignore
####################################################################
# JIRA groups
#
# list_groups
# delete_group
# create_group
# add_group_user
# list_group_users
# remove_group_user
@api_call
def list_groups(self) -> List[str]:
"""Return the list of groups.
# Returned value
A list of strings, each string being a group name.
"""
return self._client().groups()
@api_call
def create_group(self, group_name: str) -> bool:
"""Create a new group.
# Required parameters
- group_name: a non-empty string
# Returned value
A boolean. True if successful, False otherwise.
"""
ensure_nonemptystring('group_name')
return self._client().add_group(group_name)
@api_call
def delete_group(self, group_name: str) -> bool:
"""Delete group.
# Required parameters
- group_name: a non-empty string
# Returned value
A boolean. True if successful, False otherwise.
"""
ensure_nonemptystring('group_name')
return self._client().remove_group(group_name)
@api_call
def add_group_user(
self, group_name: str, user_name: str
) -> Union[bool, Dict[str, Any]]:
"""Add user to group.
# Required parameters
- group_name: a non-empty string
- user_name: a non-empty string
# Returned value
False if the operation failed, a dictionary otherwise.
"""
ensure_nonemptystring('group_name')
ensure_nonemptystring('user_name')
return self._client().add_user_to_group(user_name, group_name)
@api_call
def remove_group_user(self, group_name: str, user_name: str) -> bool:
"""Remove user from group.
# Required parameters
- group_name: a non-empty string
- username: a non-empty string
# Returned value
A boolean, True.
"""
ensure_nonemptystring('group_name')
ensure_nonemptystring('user_name')
return self._client().remove_user_from_group(user_name, group_name)
@api_call
def list_group_users(self, group_name: str) -> Dict[str, Any]:
"""Return the group users.
# Required parameters
- group_name: a non-empty string
# Returned value
A dictionary. Keys are the user names, and values are
dictionaries with the following entries:
- active: a boolean
- fullname: a string
- email: a string
"""
ensure_nonemptystring('group_name')
return self._client().group_members(group_name)
####################################################################
# JIRA permission scheme
#
# list_permissionschemes
# get_permissionscheme
# create_permissionscheme
# update_permissionscheme
# delete_permissionscheme
# list_permissionscheme_grants
@api_call
def list_permissionschemes(
self, expand: str = PERMISSIONSCHEME_EXPAND
) -> List[Dict[str, Any]]:
"""Return the list of permissionschemes.
# Optional parameters
- expand: a string (`PERMISSIONSCHEME_EXPAND` by default)
# Returned value
A list of _permissionschemes_. Each permissionscheme is a
dictionary with the following entries (assuming the default for
`expand`):
- id: an integer
- expand: a string
- name: a string
- self: a string
- description: a string
- permissions: a list of dictionaries
Each `permissions` dictionary has the following entries:
- permission: a string
- id: an integer
- holder: a dictionary
- self: a string
The `holder` dictionary has the following entries
- type: a string
- group: a dictionary
- expand: a string
- parameter: a string
The `group` dictionary has the following entries
- self: a string
- name: a string
"""
ensure_instance('expand', str)
result = self._get_json('permissionscheme', params={'expand': expand})
return result['permissionSchemes'] # type: ignore
@api_call
def get_permissionscheme(
self, scheme_id: int, expand: str = PERMISSIONSCHEME_EXPAND
) -> Dict[str, Any]:
"""Return permission scheme details.
# Required parameters
- scheme_id: an integer
# Optional parameters
- expand: a string (`PERMISSIONSCHEME_EXPAND` by default)
# Returned value
A dictionary. See #get_permissionschemes() for details
on its structure.
"""
ensure_instance('scheme_id', int)
result = self._get_json(
f'permissionscheme/{scheme_id}', params={'expand': expand}
)
return result # type: ignore
@api_call
def create_permissionscheme(
self,
name: str,
description: Optional[str] = None,
permissions: List[Dict[str, Any]] = [],
) -> Dict[str, Any]:
"""Create new permission scheme.
# Required parameters
- name: a non-empty string
# Optional parameters
- description: a string or None (None by default)
- permissions: a possibly empty list of dictionaries (`[]` by
default)
# Returned value
If successful, returns a dictionary containing:
- name
- id
- expand
- self
# Raised exceptions
Raises an _ApiError_ in case of problem (duplicate permission
scheme, invalid permissions, ...).
"""
ensure_nonemptystring('name')
ensure_noneorinstance('description', str)
ensure_instance('permissions', list)
scheme = {'name': name, 'permissions': permissions}
add_if_specified(scheme, 'description', description)
result = self.session().post(
self._get_url('permissionscheme'), data=json.dumps(scheme)
)
return result # type: ignore
@api_call
def update_permissionscheme(
self, scheme_id: int, scheme: Dict[str, Any]
) -> Dict[str, Any]:
"""Update permission scheme scheme_id.
# Required parameters
- scheme_id: an integer
- scheme: a dictionary
# Returned value
A dictionary. See #get_permissionschemes() for details on its
structure.
"""
ensure_instance('scheme_id', int)
ensure_instance('scheme', dict)
result = self.session().put(
self._get_url(f'permissionscheme/{scheme_id}'),
data=json.dumps(scheme),
)
return result # type: ignore
@api_call
def delete_permissionscheme(self, scheme_id: int) -> Dict[str, Any]:
"""Delete permission scheme scheme_id.
# Required parameters
- scheme_id: an integer
# Returned value
An empty dictionary if successful.
"""
ensure_instance('scheme_id', int)
result = self.session().delete(
self._get_url(f'permissionscheme/{scheme_id}')
)
return result # type: ignore
@api_call
def list_permissionscheme_grants(
self, scheme_id: int
) -> List[Dict[str, Any]]:
"""Return list of permission grants.
# Required parameters
- scheme_id: an integer
# Returned value
A list of _permission grants_. Each permission grant is a
dictionary with the following entries:
- id: an integer
- holder: a dictionary
- permission: a string
`holder` contains the following entries:
- parameter: a string
- type: a string
"""
ensure_instance('scheme_id', int)
result = self._get_json(f'permissionscheme/{scheme_id}/permission')
return result['permissions'] # type: ignore
####################################################################
# JIRA misc. schemes
#
# getters suffixed with a '+' add a `active` entry in their returned
# values.
#
# list_issuetypeschemes+
# delete_issuetypescheme (for pre-8 JIRA versions)
# list_issuetypescreenschemes+
# delete_issuetypescreenscheme
# list_notificationschemes
# list_inactivenotificationschemes
# delete_notificationscheme
# list_priorityschemes+
# delete_priorityscheme
# list_fieldconfigurationschemes+
# delete_fieldconfigurationscheme
# list_fieldconfigurations+
# delete_fieldconfiguration
# list_workflows
# delete_workflow
# list_workflowschemes+
# delete_workflowscheme
# list_screens+
# delete_screen
# list_screenschemes+
# delete_screenscheme
# issuetypeschemes
@api_call
def list_issuetypeschemes(self) -> List[Dict[str, Any]]:
"""Return the list of issue type schemes.
# Returned value
A list of _issuetypeschemes_. Each issuetypeschemes is a
dictionary with the following entries:
- name: a string
- id: an integer or a string
- active: a boolean
"""
uri = 'secure/admin/ManageIssueTypeSchemes!default.jspa'
pat_name = r'data-scheme-field="name">([^<]+)<'
pat_id = r'&schemeId=(\d+)">Edit</a>'
pat_inactive = (
r'<span class="errorText">No projects</span>\s+'
r'</td>\s+<td class="cell-type-collapsed">\s+'
r'<ul class="operations-list">\s+<li><a id="edit_%s"'
)
return self._parse_data(uri, pat_name, pat_id, pat_inactive)
@api_call
def delete_issuetypescheme(
self, scheme_id_or_name: Union[int, str]
) -> None:
"""Delete issuetypescheme.
# Required parameters
- scheme_id_or_name: a integer or a non-empty string
# Returned value
None.
# Raised exceptions
_ApiError_ if `scheme_id_or_name` is invalid or something wrong
occured.
"""
ensure_instance('scheme_id_or_name', (int, str))
scheme_id = _get_scheme_id(
scheme_id_or_name, self.list_issuetypeschemes()
)
uri = (
'secure/admin/DeleteOptionScheme!default.jspa?fieldId=&schemeId=%s'
)
form = self._get(uri % scheme_id)
self._do_form_step(
'secure/admin/DeleteOptionScheme.jspa',
data={
'atl_token': _get_atl_token(form.text),
'schemeId': scheme_id,
},
cookies=form.cookies,
)
# issuetypescreenschemes
@api_call
def list_issuetypescreenschemes(self) -> List[Dict[str, Any]]:
"""Return the list of issuetypescreenschemes.
# Returned value
A list of _issuetypescreenschemes_. Each issuetypescreenscheme
is a dictionary with the following entries:
- id: an integer or a string
- name: a string
- active: a boolean
`active` is true if the scheme is associated with at least one
project.
"""
uri = 'secure/admin/ViewIssueTypeScreenSchemes.jspa'
pat_name = r'<strong\s+data-scheme-field="name">([^<]+)<'
pat_id = r'id=(\d+)[^<]*>\s*<strong\s+data-scheme-field'
pat_inactive = (
r'ViewDeleteIssueTypeScreenScheme.jspa\?[^>]+&id=%s"'
)
return self._parse_data(uri, pat_name, pat_id, pat_inactive)
@api_call
def delete_issuetypescreenscheme(
self, scheme_id_or_name: Union[int, str]
) -> None:
"""Delete issuetypescreenscheme.
# Required parameters
- scheme_id_or_name: an integer or a string
# Returned value
None.
# Raised exceptions
_ApiError_ if `scheme_id_or_name` is invalid or the scheme is
active.
"""
ensure_instance('scheme_id_or_name', (int, str))
scheme_id = _get_scheme_id(
scheme_id_or_name, self.list_issuetypescreenschemes()
)
uri = 'secure/admin/ViewIssueTypeScreenSchemes.jspa'
page = self._get(uri)
atl_token = re.search(
r'ViewDeleteIssueTypeScreenScheme.jspa\?atl_token=([^&]+)&id=%s"'
% scheme_id,
page.text,
)
if not atl_token:
raise ApiError('Scheme %s is active.' % str(scheme_id_or_name))
self._do_form_step(
'secure/admin/DeleteIssueTypeScreenScheme.jspa',
data={
'id': scheme_id,
'confirm': 'true',
'atl_token': atl_token.group(1),
},
cookies=page.cookies,
)
# screens
@api_call
def list_screens(self, expand: str = 'deletable') -> List[Dict[str, Any]]:
"""Return the list of screens.
# Optional parameters
- expand: a string (`deletable` by default)
# Returned value
A list of _screens_. Each screen is a dictionary with the
following entries:
- id: an integer or a string
- name: a string
- description: a string
- deletable: a boolean
- expand: a string
"""
ensure_instance('expand', str)
return self._get_json('screens', {'expand': expand})
@api_call
def delete_screen(self, screen_id_or_name: Union[int, str]) -> None:
"""Delete screen.
# Required parameters
- scheme_id_or_name: a non-empty string
# Returned value
None.
"""
ensure_instance('screen_id_or_name', (int, str))
scheme_id = _get_scheme_id(screen_id_or_name, self.list_screens())
uri = 'secure/admin/ViewDeleteFieldScreen.jspa?id=%s'
form = self._get(uri % scheme_id)
self._do_form_step(
'secure/admin/DeleteFieldScreen.jspa',
data={
'id': scheme_id,
'confirm': 'true',
'atl_token': _get_atl_token(form.text),
},
cookies=form.cookies,
)
# screenschemes
@api_call
def list_screenschemes(self) -> List[Dict[str, Any]]:
"""Return the list of screenschemes.
# Returned value
A list of _screenschemes_. Each screenscheme is a dictionary
with the following entries:
- id: an integer or a string
- name: a string
- active: a boolean
`active` is true if the screen scheme is used in an Issue Type
Screen Scheme.
"""
uri = 'secure/admin/ViewFieldScreenSchemes.jspa'
pat_name = r'class="field-screen-scheme-name">([^<]+)</strong>'
pat_id = r'ConfigureFieldScreenScheme.jspa\?id=(\d+)"'
pat_inactive = r'ViewDeleteFieldScreenScheme.jspa\?id=%s"'
return self._parse_data(uri, pat_name, pat_id, pat_inactive)
@api_call
def delete_screenscheme(self, scheme_id_or_name: Union[int, str]) -> None:
"""Delete screenscheme.
# Required parameters
- scheme_id_or_name: a non-empty string
# Returned value
None.
"""
ensure_instance('scheme_id_or_name', (int, str))
scheme_id = _get_scheme_id(
scheme_id_or_name, self.list_screenschemes()
)
uri = 'secure/admin/ViewDeleteFieldScreenScheme.jspa?id=%s'
form = self._get(uri % scheme_id)
self._do_form_step(
'secure/admin/DeleteFieldScreenScheme.jspa',
data={
'id': scheme_id,
'confirm': 'true',
'atl_token': _get_atl_token(form.text),
},
cookies=form.cookies,
)
# notificationschemes
@api_call
def list_notificationschemes(
self, expand: str = NOTIFICATIONSCHEME_EXPAND
) -> List[Dict[str, Any]]:
"""Return the list of notificationschemes.
# Optional parameters
- expand: a string (`NOTIFICATIONSCHEME_EXPAND` by default)
# Returned value
A list of _notificationschemes_. Each notificationscheme is a
dictionary with the following entries (assuming the default for
`expand`):
- id: an integer
- expand: a string
- name: a string
- self: a string
- description: a string
- notificationSchemeEvents: a list of dictionaries
Each `notificationSchemeEvents` dictionary has the following
entries:
- event: a dictionary
- notifications: a list of dictionaries
The `event` dictionaries have the following entries:
- id: an integer
- name: a string
- description: a string
The `notifications` dictionaries have the following entries:
- id: an integer
- notificationType: a string
They may have other entries depending on their
`notificationType`.
"""
ensure_instance('expand', str)
return self._collect_data(
'notificationscheme', params={'expand': expand}
)
@api_call
def list_inactivenotificationschemes(self) -> List[Dict[str, Any]]:
"""Returns the id of inactive notification schemes.
A notification scheme is said to be inactive if it is not used
by any project.
# Returned value
A list of inactive _notificationschemes_. Each
notificationschemes is a dictionary with the following entries:
- id: an integer
- name: a string
"""
uri = 'secure/admin/ViewNotificationSchemes.jspa'
pat_name = r'<a href="EditNotifications!default.jspa.*?&schemeId=\d+">([^<]+)<'
pat_id = (
r'<a href="EditNotifications!default.jspa.*?&schemeId=(\d+)">'
)
pat_inactive = (
r' \s+</td>\s+<td>\s+'
r'<ul class="operations-list">\s+<li><a id="%s_'
)
return [
{'id': scheme['id'], 'name': scheme['name']}
for scheme in self._parse_data(uri, pat_name, pat_id, pat_inactive)
if not scheme['active']
]
@api_call
def delete_notificationscheme(self, scheme_id: Union[int, str]) -> None:
"""Delete notification scheme.
# Required parameters
- scheme_id: either an integer or a string
# Returned value
None.
# Raised exceptions
_ApiError_ if the scheme does not exist.
"""
scheme_id = str(scheme_id)
ensure_nonemptystring('scheme_id')
uri = 'secure/admin/ViewNotificationSchemes.jspa'
page = self._get(uri)
atl_token = re.search(
r'<a href="EditNotifications!default.jspa\?atl_token=([^&]+)&schemeId=%s">'
% scheme_id,
page.text,
)
if not atl_token:
raise ApiError(
'Notification Scheme %s could not be found.' % scheme_id
)
self._do_form_step(
'secure/admin/DeleteNotificationScheme.jspa',
data={
'schemeId': scheme_id,
'Delete': 'Delete',
'confirmed': 'true',
'atl_token': atl_token.group(1),
},
cookies=page.cookies,
)
# priority schemes
@api_call
def list_priorityschemes(self) -> List[Dict[str, Any]]:
"""Return the list of priorityschemes.
# Returned value
A list of _priorityschemes_. Each priorityscheme is a
dictionary with the following entries:
- id: an integer
- name: a string
- active: a boolean
`active` is true if the priority scheme is used in any project.
"""
uri = 'secure/admin/ViewPrioritySchemes.jspa'
pat_name = r'<strong data-scheme-field="name">([^<]+)</strong>'
pat_id = r'<tr data-id="(\d+)"'
pat_inactive = (
r'<span class="errorText">No projects</span>'
r'</td><td class="cell-type-collapsed">'
r'<ul class="operations-list"><li><a id="\w+_%s"'
)
return self._parse_data(uri, pat_name, pat_id, pat_inactive)
@api_call
def delete_priorityscheme(self, scheme_id: Union[int, str]) -> None:
"""Delete priority scheme.
# Required parameters
- scheme_id: either an integer or a string
# Returned value
None.
# Raised exceptions
_ApiError_ if the scheme does not exist.
"""
scheme_id = str(scheme_id)
ensure_nonemptystring('scheme_id')
uri = 'secure/admin/ViewPrioritySchemes.jspa'
page = self._get(uri)
atl_token = re.search(r'/logout\?atl_token=([^"]+)"', page.text)
if not atl_token:
raise ApiError(
'Priority Scheme %s could not be found.' % scheme_id
)
self._do_form_step(
'secure/admin/DeletePriorityScheme.jspa',
data={
'schemeId': scheme_id,
'decorator': 'dialog',
'inline': 'true',
'atl_token': atl_token.group(1),
},
cookies=page.cookies,
)
# field configuration fields
@api_call
def list_fieldconfigurationschemes(self) -> List[Dict[str, Any]]:
"""Return the list of field configuration schemes.
# Returned value
A list of _fieldconfigurationschemes_. Each
fieldconfigurationschemes is a dictionary with the following
entries:
- id: an integer
- name: a string
- active: a boolean
`active` is true if the field configuration scheme is used in
any project.
"""
uri = 'secure/admin/ViewFieldLayoutSchemes.jspa'
pat_name = r'<strong data-scheme-field="name">([^<]+)</strong>'
pat_id = r'<a id="configure_(\d+)" data-operation="configure"'
pat_inactive = (
r' \s+</td>\s+<td>\s+'
r'<ul class="operations-list">\s+<li><a id="\w+_%s"'
)
return self._parse_data(uri, pat_name, pat_id, pat_inactive)
@api_call
def delete_fieldconfigurationscheme(
self, scheme_id: Union[int, str]
) -> None:
"""Delete field configuration scheme.
# Required parameters
- scheme_id: either an integer or a string
# Returned value
None.
# Raised exceptions
_ApiError_ if the scheme does not exist.
"""
scheme_id = str(scheme_id)
ensure_nonemptystring('scheme_id')
uri = 'secure/admin/ViewFieldLayoutSchemes.jspa'
page = self._get(uri)
atl_token = re.search(
r'atl_token=([^&]+)&id=%s" title="Delete this scheme">'
% scheme_id,
page.text,
)
if not atl_token:
raise ApiError(
'Field Configuration Scheme %s could not be found.' % scheme_id
)
self._do_form_step(
'secure/admin/DeleteFieldLayoutScheme.jspa',
data={
'id': scheme_id,
'confirm': 'true',
'Delete': 'Delete',
'atl_token': atl_token.group(1),
},
cookies=page.cookies,
)
# field configurations
@api_call
def list_fieldconfigurations(self) -> List[Dict[str, Any]]:
"""Return the list of field configurations.
# Returned value
A list of _fieldconfigurations_. Each fieldconfigurations is a
dictionary with the following entries:
- id: an integer
- name: a string
- active: a boolean
`active` is true if the field configuration scheme is used in
any project.
"""
uri = 'secure/admin/ViewFieldLayouts.jspa'
pat_name = r'<span data-scheme-field="name" class="field-name">\s+.*?title="Edit field properties">([^<]+)'
pat_id = r';id=(\d+)" title="Create a copy of '
pat_inactive = (
r'<td>\s+</td>\s+<td>\s+<ul class="operations-list">'
r'\s+<li><a[^>]+?;id=%s"'
)
return self._parse_data(uri, pat_name, pat_id, pat_inactive)
@api_call
def delete_fieldconfiguration(self, conf_id: Union[int, str]) -> None:
"""Delete field configuration.
# Required parameters
- conf_id: either an integer or a string
# Returned value
None.
# Raised exceptions
_ApiError_ if the field configuration does not exist.
"""
conf_id = str(conf_id)
ensure_nonemptystring('conf_id')
uri = 'secure/admin/ViewFieldLayouts.jspa'
page = self._get(uri)
atl_token = re.search(
r'atl_token=([^&]+)&id=%s" title="Create a copy ' % conf_id,
page.text,
)
if not atl_token:
raise ApiError(
'Field Configuration %s could not be found.' % conf_id
)
self._do_form_step(
'secure/admin/DeleteFieldLayout.jspa',
data={
'id': conf_id,
'confirm': 'true',
'Delete': 'Delete',
'atl_token': atl_token.group(1),
},
cookies=page.cookies,
)
# workflows
@api_call
def list_workflows(self) -> List[Dict[str, Any]]:
"""Return the list of workflows.
# Returned value
A list of _workflows_. Each workflow is a dictionary with the
following entries:
- name: a string
- description: a string
- lastModifiedDate: a string (local format)
- lastModifiedUser: a string (display name)
- steps: an integer
- default: a boolean
"""
return self._get_json('workflow') # type: ignore
@api_call
def list_inactiveworkflows(self) -> List[str]:
"""Return the list of inactive workflows.
# Returned value
A list of _workflow names_.
"""
page = self._get('secure/admin/workflows/ListWorkflows.jspa')
inactives = page.text.split('<table id="inactive-workflows-table"')
if len(inactives) == 1:
return []
return re.findall(r'<tr data-workflow-name="([^"]+)">', inactives[1])
@api_call
def delete_workflow(self, workflow_name: str) -> None:
"""Delete worflow.
# Required parameters
- workflow_name: a non-empty string
# Returned value
None.
# Raised exceptions
_ApiError_ if the workflow does not exist or is attached to a
project.
"""
ensure_nonemptystring('workflow_name')
what = urlencode({'workflowName': workflow_name}).replace('+', r'\+')
uri = 'secure/admin/workflows/ListWorkflows.jspa'
page = self._get(uri)
atl_token = re.search(
r'DeleteWorkflow.jspa\?atl_token=([^&]+)&[^&]+&%s"' % what,
page.text,
)
if not atl_token:
raise ApiError(
'Workflow %s not found or attached to project(s).'
% workflow_name
)
self._do_form_step(
'secure/admin/workflows/DeleteWorkflow.jspa',
data={
'workflowName': workflow_name,
'workflowMode': 'live',
'confirmedDelete': 'true',
'atl_token': atl_token.group(1),
},
cookies=page.cookies,
)
# workflowschemes
@api_call
def list_workflowschemes(self) -> List[Dict[str, Any]]:
"""Return list of workflow schemes.
# Returned value
A list of _workflowschemes_. Each workflowscheme is a
dictionary with the following entries:
- name: a string
- id: an integer
- active: a boolean
"""
uri = 'secure/admin/ViewWorkflowSchemes.jspa'
pat_name = r'class="workflow-scheme-name[^<]+<strong>([^<]+)</strong>'
pat_id = r'EditWorkflowScheme.jspa\?schemeId=(\d+)"'
pat_inactive = r'DeleteWorkflowScheme!default.jspa\?schemeId=%s"'
return self._parse_data(uri, pat_name, pat_id, pat_inactive)
@api_call
def delete_workflowscheme(
self, scheme_id_or_name: Union[int, str]
) -> None:
"""Delete workflowscheme.
# Required parameters
- scheme_id_or_name: an integer or a non-empty string
# Returned value
None.
# Raised exceptions
_ApiError_ if `scheme_id_or_name` is invalid or something wrong
occurred.
"""
ensure_instance('scheme_id_or_name', (int, str))
if not isinstance(scheme_id_or_name, int):
scheme_id = _get_scheme_id(
scheme_id_or_name, self.list_workflowschemes()
)
scheme = self._get_json(f'workflowscheme/{scheme_id}')
if scheme['name'] != scheme_id_or_name:
raise ApiError('Scheme %s not found.' % scheme_id_or_name)
else:
scheme_id = str(scheme_id_or_name)
requests.delete(
self._get_url(f'workflowscheme/{scheme_id}'),
auth=self.auth,
verify=self.verify,
)
####################################################################
# JIRA project
#
# list_projects
# get_project
# create_project
# delete_project
# update_project
# list_project_boards
#
# get_project_issuetypescheme
# set_project_issuetypescheme
# get_project_issuetypescreenscheme
# set_project_issuetypescreenscheme
# get_project_notificationscheme
# set_project_notificationscheme
# get_project_permissionscheme
# set_project_permissionscheme
# get_project_priorityscheme
# set_project_priorityscheme
# get_project_workflowscheme
# set_project_workflowscheme
#
# list_project_shortcuts
# add_project_shortcut
# create_project_board
#
# list_project_roles
# get_project_role
# add_project_role_actors
# remove_project_role_actor
@api_call
def list_projects(
self, expand: str = PROJECT_EXPAND
) -> List[Dict[str, Any]]:
"""Return list of expanded projects.
# Optional parameters
- expand: a string (`PROJECT_EXPAND` by default)
# Returned value
A list of _projects_. Each project is a dictionary with the
following entries (assuming the default for `expand`):
- projectKeys: a list of string
- id: a string
- projectTypeKey: a string
- name: a string
- expand: a string
- avatarUrls: a dictionary
- self: a string
- description: a string
- lead: a dictionary
- key: a string
The `avatarUrls` dictionary has string keys (of the form 'nnxnn'
for each avatar size) and string values (an URL referring the
avatar image).
The `lead` dictionary represents a user and has the following
entries:
- avatarUrls: a dictionary as described above
- name: a string
- active: a boolean
- self: a string
- displayName: a string
- key: a string
"""
ensure_instance('expand', str)
result = self._get_json('project', params={'expand': expand})
return result # type: ignore
@api_call
def list_projectoverviews(self):
"""Return list of project overviews.
# Returned value
A list of _project overviews_. Each project overview is a
dictionary with the following entries:
- admin: a boolean
- hasDefaultAvatar: a boolean
- id: an integer
- issueCount: an integer or None
- key: a string
- lastUpdatedTimestamp: an integer (a timestamp) or None
- lead: a string
- leadProfileLink: a string
- name: a string
- projectAdmin: a boolean
- projectCategoryId: ... or None
- projectTypeKey: a string
- projectTypeName: a string
- recent: a boolean
- url: a string or None
"""
result = requests.get(
join_url(self.url, '/secure/project/BrowseProjects.jspa'),
auth=self.auth,
verify=self.verify,
)
upd = result.text.split(
'WRM._unparsedData["com.atlassian.jira.project.browse:projects"]="'
)[1].split('\n')[0][:-2]
return json.loads(
upd.replace('\\"', '"').replace('\\\\', '\\').replace("\\\'", "'")
)
@api_call
def get_project(
self, project_id_or_key: Union[int, str], expand: str = PROJECT_EXPAND
) -> Dict[str, Any]:
"""Returned expanded project details.
# Required parameters
- project_id_or_key: an integer or a non-empty string
# Optional parameters
- expand: a string (`PROJECT_EXPAND` by default)
# Returned value
A dictionary. See #list_projects() for details on its
structure.
"""
ensure_instance('project_id_or_key', (str, int))
ensure_instance('expand', str)
result = self._get_json(
f'project/{project_id_or_key}', params={'expand': expand}
)
return result # type: ignore
@api_call
def create_project(
self,
key: str,
project_type: str,
lead: str,
name: Optional[str] = None,
project_template: Optional[str] = None,
description: Optional[str] = None,
url: Optional[str] = None,
assignee_type: Optional[str] = None,
avatar_id: Optional[int] = None,
issue_security_scheme: Optional[int] = None,
permission_scheme: Optional[int] = None,
notification_scheme: Optional[int] = None,
category_id: Optional[int] = None,
) -> Dict[str, Any]:
"""Create new project.
# Required parameters
- key: a string
- project_type: a string
- lead: a string
# Optional parameters
- name: a string or None (None by default)
- project_template: a string or None (None by default)
- description: a string or None (None by default)
- url: a string or None (None by default)
- assignee_type: one of `'PROJECT_LEAD'`, `'UNASSIGNED'`
- avatar_id: an integer or None (None by default)
- issue_security_scheme: an integer or None (None by default)
- permission_scheme: an integer or None (None by default)
- notification_scheme: an integer or None (None by default)
- category_id: an integer or None (None by default)
# Returned value
A dictionary describing the project if successful.
# Raised exceptions
Raises an _ApiError_ if not successful.
"""
ensure_noneorinstance('avatar_id', int)
ensure_noneorinstance('issue_security_scheme', int)
ensure_noneorinstance('permission_scheme', int)
ensure_noneorinstance('notification_scheme', int)
ensure_noneorinstance('category_id', int)
project = {'key': key}
add_if_specified(project, 'name', name)
add_if_specified(project, 'projectTypeKey', project_type)
add_if_specified(project, 'projectTemplateKey', project_template)
add_if_specified(project, 'description', description)
add_if_specified(project, 'lead', lead)
add_if_specified(project, 'url', url)
add_if_specified(project, 'assigneeType', assignee_type)
add_if_specified(project, 'avatarId', avatar_id)
add_if_specified(project, 'issueSecurityScheme', issue_security_scheme)
add_if_specified(project, 'permissionScheme', permission_scheme)
add_if_specified(project, 'notificationScheme', notification_scheme)
add_if_specified(project, 'categoryId', category_id)
result = self.session().post(
self._get_url('project'), data=json.dumps(project)
)
return result # type: ignore
@api_call
def update_project(
self, project_id_or_key: Union[int, str], project: Dict[str, Any]
) -> Dict[str, Any]:
"""Update project project.
# Required parameters
- project_id_or_key: an integer or a non-empty string
- project: a dictionary
`project` is dictionary with the following optional entries:
- name
- projectTypeKey
- projectTemplateKey
- description
- lead
- url
- assigneeType
- avatarId
- issueSecurityScheme
- permissionScheme
- notificationScheme
- categoryId
This dictionary respects the format returned by
#list_projects().
If an entry is not specified or is None, its corresponding
value in the project will remain unchanged.
# Returned value
A dictionary. See #list_projects() for details on its
structure.
"""
ensure_instance('project_id_or_key', (str, int))
result = self.session().put(
self._get_url(f'project/{project_id_or_key}'),
data=json.dumps(project),
)
return result # type: ignore
@api_call
def delete_project(
self, project_id_or_key: Union[int, str]
) -> Dict[str, Any]:
"""Delete project project.
# Required parameters
- project_id_or_key: an integer or a string
# Returned value
An empty dictionary if the deletion is successful.
# Raised exceptions
Raises an _ApiError_ if not successful.
"""
ensure_instance('project_id_or_key', (str, int))
result = self.session().delete(
self._get_url(f'project/{project_id_or_key}')
)
return result # type: ignore
@api_call
def list_project_boards(
self, project_id_or_key: Union[int, str]
) -> List[Dict[str, Any]]:
"""Returns the list of boards attached to project.
# Required parameters
- project_id_or_key: an integer or a string
# Returned value
A list of _boards_. Each board is a dictionary with the
following entries:
- type: a string
- id: an integer
- name: a string
- self: a string
# Raised exceptions
Browse project permission required (will raise an _ApiError_
otherwise).
"""
ensure_instance('project_id_or_key', (str, int))
return self.list_boards(params={'projectKeyOrId': project_id_or_key})
@api_call
def get_project_notificationscheme(
self, project_id_or_key: Union[int, str]
) -> Optional[Dict[str, Any]]:
"""Get notificationscheme assigned to project.
# Required parameters
- project_id_or_key: an integer or a string
# Returned value
A dictionary with the following entries:
- id: an integer
- self: a string
- name: a string
- description: a string
- notificationSchemeEvents: a list of dictionaries
Returns None if no notificationscheme assigned.
"""
ensure_instance('project_id_or_key', (str, int))
try:
return self._get_json(
f'project/{project_id_or_key}/notificationscheme'
)
except:
return None
@api_call
def set_project_notificationscheme(
self,
project_id_or_key: Union[int, str],
scheme_id_or_name: Union[int, str],
) -> Dict[str, Any]:
"""Set notificationscheme associated to project.
# Required parameters
- project_id_or_key: an integer or a string
- scheme_id_or_name! an integer or a string
`scheme_id_or_name` is either the scheme ID or the scheme name.
# Returned value.
A dictionary. See #list_projects() for details on its
structure.
"""
ensure_instance('project_id_or_key', (str, int))
ensure_instance('scheme_id_or_name', (str, int))
if isinstance(scheme_id_or_name, int):
scheme_id = scheme_id_or_name
else:
nss = [
ns['id']
for ns in self.list_notificationschemes()
if ns['name'] == scheme_id_or_name
]
if len(nss) > 1:
raise ApiError(
'More than one notificationscheme with name %s.'
% scheme_id_or_name
)
if not nss:
raise ApiError(
'No notificationscheme with name %s.' % scheme_id_or_name
)
scheme_id = nss[0]
return self.update_project(
project_id_or_key, {'notificationScheme': scheme_id}
)
@api_call
def get_project_permissionscheme(
self, project_id_or_key: Union[int, str]
) -> Dict[str, Any]:
"""Get permissionscheme assigned to project.
# Required parameters
- project_id_or_key: an integer or a string
# Returned value
A dictionary with the following entries:
- id: an integer
- self: a string
- name: a string
- description: a string
"""
ensure_instance('project_id_or_key', (str, int))
result = self._get_json(
f'project/{project_id_or_key}/permissionscheme'
)
return result # type: ignore
@api_call
def set_project_permissionscheme(
self,
project_id_or_key: Union[int, str],
scheme_id_or_name: Union[int, str],
) -> Dict[str, Any]:
"""Set permissionscheme associated to project.
# Required parameters
- project_id_or_key: an integer or a string
- scheme_id_or_name: an integer or a string
`scheme_id_or_name` is either the scheme ID or the scheme name.
# Returned value
A dictionary with the following entries:
- id: an integer
- self: a string
- name: a string
- description: a string
# Raised exceptions
Raises an _ApiError_ if `scheme_id_or_name` is not known or
ambiguous.
"""
ensure_instance('project_id_or_key', (str, int))
ensure_instance('scheme_id_or_name', (str, int))
if isinstance(scheme_id_or_name, int):
data = {'id': scheme_id_or_name}
else:
pss = [
ps['id']
for ps in self.list_permissionschemes()
if ps['name'] == scheme_id_or_name
]
if len(pss) > 1:
raise ApiError(
'More than one permissionscheme with name %s.'
% scheme_id_or_name
)
if not pss:
raise ApiError(
'No permissionscheme with name %s.' % scheme_id_or_name
)
data = {'id': pss[0]}
result = self.session().put(
self._get_url(f'project/{project_id_or_key}/permissionscheme'),
data=json.dumps(data),
)
return result # type: ignore
@api_call
def get_project_priorityscheme(
self, project_id_or_key: Union[int, str]
) -> Dict[str, Any]:
"""Get priorityscheme associated to project.
# Required parameters
- project_id_or_key: an integer or a string
# Returned value
A dictionary with the following entries:
- id: an integer
- self: a string
- name: a string
- description: a string
- ...
"""
ensure_instance('project_id_or_key', (str, int))
result = self._get_json(f'project/{project_id_or_key}/priorityscheme')
return result # type: ignore
@api_call
def set_project_priorityscheme(
self,
project_id_or_key: Union[int, str],
scheme_id_or_name: Union[int, str],
) -> Dict[str, Any]:
"""Set priorityscheme associated to project.
# Required parameters
- project_id_or_key: an integer or a string
- scheme_id_or_name: an integer or a string
`scheme_id_or_name` is either the scheme ID or the scheme name.
# Returned value
A dictionary with the following entries:
- expand: a string
- self: a string
- id: an integer
- name: a string
- description: a string
- defaultOptionId: a string
- optionIds: a list of strings
- defaultScheme: a boolean
- projectKeys: a list of strings
# Raised exceptions
Raises an _ApiError_ if `scheme_id_or_name` is not known or
ambiguous.
"""
ensure_instance('project_id_or_key', (str, int))
ensure_instance('scheme_id_or_name', (str, int))
if isinstance(scheme_id_or_name, int):
data = {'id': scheme_id_or_name}
else:
pss = [
ps['id']
for ps in requests.get(
self._get_url('priorityschemes'),
auth=self.auth,
verify=self.verify,
).json()['schemes']
if ps['name'] == scheme_id_or_name
]
if len(pss) > 1:
raise ApiError(
'More than one priorityscheme with name %s.'
% scheme_id_or_name
)
if not pss:
raise ApiError(
'No priorityscheme with name %s.' % scheme_id_or_name
)
data = {'id': pss[0]}
result = self.session().put(
self._get_url(f'project/{project_id_or_key}/priorityscheme'),
data=json.dumps(data),
)
return result # type: ignore
@api_call
def get_project_workflowscheme(
self, project_id_or_key: Union[int, str]
) -> Dict[str, Any]:
"""Get workflowscheme assigned to project.
# Required parameters
- project_id_or_key: an integer or a string
# Returned value
A dictionary with the following entries:
- name: a string
- description: a string
- id: an integer
- shared: a dictionary
- ...
"""
ensure_instance('project_id_or_key', (str, int))
# projectconfig requires a project key
project = self.get_project(project_id_or_key)
api_uri = f'rest/projectconfig/1/workflowscheme/{project["key"]}'
return self._get(api_uri) # type: ignore
@api_call
def set_project_workflowscheme(
self, project_id_or_key: Union[int, str], workflowscheme: str
) -> None:
"""Set project workflowscheme.
# Required parameters
- project_id_or_key: an integer or a string
- workflowscheme: a non-empty string
# Returned value
None.
"""
# No API for that, using forms...
#
# !!! note
# The last request returns a 401 error, but it
# works. No idea why (and skipping it does NOT work).
# Maybe due to a redirect?
ensure_instance('project_id_or_key', (str, int))
ensure_nonemptystring('workflowscheme')
project = self.get_project(project_id_or_key)
form, workflowscheme_id = self._get_projectconfig_option(
'secure/project/SelectProjectWorkflowScheme!default.jspa',
project['id'],
workflowscheme,
)
atl_token = _get_atl_token(form.text)
step1 = self._do_form_step(
'secure/project/SelectProjectWorkflowSchemeStep2!default.jspa',
data={
'Associate': 'Associate',
'atl_token': atl_token,
'projectId': project['id'],
'schemeId': workflowscheme_id,
},
cookies=form.cookies,
)
self._do_form_step(
'secure/project/SelectProjectWorkflowSchemeStep2.jspa',
data={
'Associate': 'Associate',
'atl_token': atl_token,
'projectId': project['id'],
'schemeId': workflowscheme_id,
'draftMigration': False,
'projectIdsParameter': project['id'],
},
cookies=step1.cookies,
)
@api_call
def get_project_issuetypescheme(
self, project_id_or_key: Union[int, str]
) -> Dict[str, str]:
"""Return the current issuetypescheme name.
# Required parameters
- project_id_or_key: an integer or a string
# Returned value
A dictionary with the following entry:
- name: a string
# Raised exceptions
Raises an _ApiError_ if the project does not exist.
"""
return {
'name': self._get_projectconfig_scheme(
project_id_or_key, 'issuetypes'
)
}
@api_call
def set_project_issuetypescheme(
self, project_id_or_key: Union[int, str], scheme: str
) -> None:
"""Set project issuetypescheme.
# Required parameters
- project_id_or_key: an integer or a string
- scheme: a non-empty string
# Returned value
None.
# Raised exceptions
Raises an _ApiError_ if the scheme does not exist.
"""
ensure_instance('project_id_or_key', (str, int))
ensure_nonemptystring('scheme')
project = self.get_project(project_id_or_key)
page, option = self._get_projectconfig_option(
'secure/admin/SelectIssueTypeSchemeForProject!default.jspa',
project['id'],
scheme,
)
self._do_form_step(
'secure/admin/SelectIssueTypeSchemeForProject.jspa',
data={
'OK': 'OK',
'atl_token': _get_atl_token(page.text),
'projectId': project['id'],
'schemeId': option,
'createType': 'chooseScheme',
},
cookies=page.cookies,
)
@api_call
def get_project_issuetypescreenscheme(
self, project_id_or_key: Union[int, str]
) -> Dict[str, str]:
"""Return the current issuetypescreenscheme name.
# Required parameters
- project_id_or_key: an integer or a string
# Returned value
A dictionary with the following entry:
- name: a string
# Raised exceptions
If the project does not exist, raises an _ApiError_.
"""
return {
'name': self._get_projectconfig_scheme(
project_id_or_key, 'screens'
)
}
@api_call
def set_project_issuetypescreenscheme(
self, project_id_or_key: Union[int, str], scheme: str
) -> None:
"""Set project issuetypescreenscheme.
# Required parameters
- project_id_or_key: an integer or a string
- scheme: a non-empty string
# Returned value
None.
# Raised exceptions
Raises an _ApiError_ if the scheme does not exist.
"""
ensure_instance('project_id_or_key', (str, int))
ensure_nonemptystring('scheme')
project = self.get_project(project_id_or_key)
page, option = self._get_projectconfig_option(
'secure/project/SelectIssueTypeScreenScheme!default.jspa',
project['id'],
scheme,
)
self._do_form_step(
'secure/project/SelectIssueTypeScreenScheme.jspa',
data={
'Associate': 'Associate',
'atl_token': _get_atl_token(page.text),
'projectId': project['id'],
'schemeId': option,
},
cookies=page.cookies,
)
@api_call
def list_project_shortcuts(
self, project_id_or_key: Union[int, str]
) -> List[Dict[str, str]]:
"""Return the list of project shortcuts.
# Required parameters
- project_id_or_key: an integer or a string
# Returned value
A list of project _shortcuts_. Each shortcut is a dictionary
with the following entries:
- name: a string
- url: a string
- id: a string
- icon: a string
The list may be empty.
"""
ensure_instance('project_id_or_key', (str, int))
api_uri = f'rest/projects/1.0/project/{project_id_or_key}/shortcut'
return self._get(api_uri) # type: ignore
@api_call
def add_project_shortcut(
self, project_id_or_key: Union[int, str], url: str, description: str
) -> Dict[str, str]:
"""Add a shortcut to project.
!!! note
It is not an error to create identical shortcuts.
# Required parameters
- project_id_or_key: an integer or a string
- url: a non-empty string
- description: a non-empty string
# Returned value
A dictionary with the following entries:
- icon: a string
- id: a string
- name: a string
- url: a string
"""
ensure_instance('project_id_or_key', (str, int))
ensure_nonemptystring('url')
ensure_nonemptystring('description')
project = self.get_project(project_id_or_key)
result = requests.post(
join_url(
self.url,
f'rest/projects/1.0/project/{project["key"]}/shortcut',
),
json={'url': url, 'name': description, 'icon': ''},
auth=self.auth,
verify=self.verify,
)
return result # type: ignore
@api_call
def create_project_board(
self, project_id_or_key: Union[int, str], name: str, preset: str
) -> Dict[str, Any]:
"""Create a new board associated to project.
# Required parameters
- project_id_or_key: a non-empty string
- name: a non-empty string
- preset: one of 'kanban', 'scrum'
# Returned value
A dictionary with the following entries:
- id: an integer
- name: a string
KLUDGE we shouldn't switch to greenhopper
"""
ensure_nonemptystring('project_id_or_key')
ensure_nonemptystring('name')
ensure_in('preset', ['kanban', 'scrum'])
self._client()._options['agile_rest_path'] = 'greenhopper'
result = self._client().create_board(name, project_id_or_key, preset)
self._client()._options['agile_rest_path'] = 'agile'
return result.raw
@api_call
def list_project_roles(
self, project_id_or_key: Union[int, str]
) -> Dict[str, Any]:
"""Return the project roles.
# Required parameters
- project_id_or_key: an integer or a string
# Returned value
A dictionary. Keys are role names, and values are URIs
containing details for the role.
"""
ensure_instance('project_id_or_key', (str, int))
result = self._get_json(f'project/{project_id_or_key}/role')
return result # type: ignore
@api_call
def get_project_role(
self, project_id_or_key: Union[int, str], role_id: Union[int, str]
) -> Dict[str, Any]:
"""Return the project role details.
# Required parameters
- project_id_or_key: an integer or a string
- role_id: an integer or a string
# Returned value
A dictionary with the following entries:
- self: a string (an URL)
- name: a string
- id: an integer
- actors: a list of dictionaries
`actors` entries have the following entries:
- id: an integer
- displayName: a string
- type: a string
- name: a string
- avatarUrl: a string
"""
ensure_instance('project_id_or_key', (str, int))
ensure_instance('role_id', (str, int))
result = self._get_json(f'project/{project_id_or_key}/role/{role_id}')
return result # type: ignore
@api_call
def add_project_role_actors(
self,
project_id_or_key: Union[int, str],
role_id: Union[int, str],
groups: Optional[List[str]] = None,
users: Optional[List[str]] = None,
) -> Dict[str, Any]:
"""Add an actor (group or user) to a project role.
You can only specify either `groups` or `users`.
# Required parameters
- project_id_or_key: an integer or a string
- role_id: an integer or a string
- groups: a list of strings
- users: a list of strings
# Returned value
A project role. Refer to #get_project_role() for details.
"""
ensure_instance('project_id_or_key', (str, int))
ensure_instance('role_id', (str, int))
ensure_onlyone('groups', 'users')
ensure_noneorinstance('groups', list)
ensure_noneorinstance('users', list)
if groups is not None:
data = {'group': groups}
else:
data = {'user': users} # type: ignore
result = self.session().post(
self._get_url(f'project/{project_id_or_key}/role/{role_id}'),
data=json.dumps(data),
)
return result # type: ignore
@api_call
def remove_project_role_actor(
self,
project_id_or_key: Union[int, str],
role_id: Union[int, str],
group: Optional[str] = None,
user: Optional[str] = None,
) -> None:
"""Remove an actor from project role.
You can only specify either `group` or `user`.
# Required parameters
- project_id_or_key: an integer or a string
- role_id: an integer or a string
- group: a string
- user: a string
"""
ensure_instance('project_id_or_key', (str, int))
ensure_instance('role_id', (str, int))
ensure_onlyone('group', 'user')
ensure_noneorinstance('group', str)
ensure_noneorinstance('user', str)
if group is not None:
params = {'group': group}
else:
params = {'user': user} # type: ignore
self.session().delete(
self._get_url(f'project/{project_id_or_key}/role/{role_id}'),
params=params,
)
####################################################################
# JIRA roles
#
# list_roles
@api_call
def list_roles(self) -> List[Dict[str, Any]]:
"""Return list of roles available in JIRA.
# Returned value
A list of _roles_. Each role is a dictionary with the
following entries:
- self: a string (an URL)
- name: a string
- id: an integer
- actors: a list of dictionaries
`actors` entries have the following entries:
- id: an integer
- displayName: a string
- type: a string
- name: a string
- avatarUrl: a string
The `actors` entry may be missing.
"""
return self._get_json('role') # type: ignore
####################################################################
# JIRA users
#
# list_users
# get_user
# get_currentuser
# create_user
# update_user
# delete_user
# search_user
@api_call
def list_users(self, include_inactive: bool = True) -> List[str]:
"""Return users list.
# Returned value
A list of _users_. Each user is a string (the user 'name').
All known users are returned, including inactive ones if
`include_inactive` is true.
# Optional parameters
- include_inactive: a boolean (True by default)
# Returned value
A list of _user names_ (strings).
"""
users = {}
for letter in 'abcdefghijklmnopqrstuvwxyz':
exhausted = False
start = 0
while not exhausted:
results = self._client().search_users(
letter,
includeInactive=include_inactive,
maxResults=MAX_RESULTS,
startAt=start,
)
for user in results:
users[user.name] = True
if len(results) == MAX_RESULTS:
start += MAX_RESULTS
else:
exhausted = True
return list(users.keys())
@api_call
def get_user(
self, user_name: str, expand: Optional[str] = None
) -> Dict[str, Any]:
"""Return user details.
# Required parameters
- user_name: a non-empty string
# Optional parameters
- expand: a string
If not specified, `expand` defaults to
`'groups,applicationRoles'` and lists what to return for each
user.
# Returned value
A dictionary with the followin entries (assuming the default for
`expand`):
- active: a boolean
- applicationRoles: a dictionary
- avatarUrls: a dictionary
- displayName: a string
- emailAddress: a string
- expand: a string
- groups: a dictionary
- key: a string
- locale: a string
- name: a string
- self: a string
- timeZone: a string
The `applicationRoles` has two entries, `size` and `items`.
`size` is the number of entries in item, `items` is a list of
dictionaries.
Each entry (if any) in the items list has the following entries:
- key: a string
- name: a string
# Raised exceptions
If `user_name` does not exist, an _ApiError_ is raised.
"""
ensure_nonemptystring('user_name')
ensure_noneorinstance('expand', str)
if expand is None:
expand = USER_EXPAND
result = self._get_json(
'user', params={'username': user_name, 'expand': expand}
)
return result # type: ignore
@api_call
def get_currentuser(self, expand: Optional[str] = None) -> Dict[str, Any]:
"""Return currently logged user details.
# Optional parameters
- expand: a string
# Returned value
A dictionary. Refer to #get_user() for details.
"""
ensure_noneorinstance('expand', str)
if expand:
params = {'expand': expand}
else:
params = {}
return self._get_json('myself', params=params) # type: ignore
@api_call
def search_users(self, name: str) -> List[Dict[str, Any]]:
"""Return list of user details for users matching name.
Return at most 1000 entries.
# Required parameters
- name: a non-empty string
`name` will be searched in `name` and `displayName` fields, and
is case-insensitive.
# Returned value
A list of _user details_. Each user details is a dictionary.
Refer to #get_user() for its structure.
"""
ensure_nonemptystring('name')
return [
self.get_user(u.name)
for u in self._client().search_users(
name, includeInactive=True, maxResults=1000
)
]
@api_call
def create_user(
self,
name: str,
password: Optional[str],
email_address: str,
display_name: str,
) -> bool:
"""Create a new user.
# Required parameters
- name: a non-empty string
- email_address: a non-empty string
- password: a non-empty string or None
- display_name: a string
# Returned value
True if successful.
"""
ensure_nonemptystring('name')
ensure_nonemptystring('email_address')
ensure_noneorinstance('password', str)
ensure_instance('display_name', str)
return self._client().add_user(
name, email_address, password=password, fullname=display_name
)
@api_call
def update_user(
self, user_name: str, user: Dict[str, Any]
) -> Dict[str, Any]:
"""Update user.
!!! note
JSON support only.
# Required parameters
- user_name: a non-empty string
- user: a dictionary
# Returned value
The updated _user details_. Refer to #get_user() for more
information.
"""
ensure_nonemptystring('user_name')
ensure_instance('user', dict)
result = self.session().put(
self._get_url('user'),
params={'username': user_name},
data=json.dumps(user),
)
return result # type: ignore
@api_call
def delete_user(self, user_name: str) -> bool:
"""Delete user.
# Required parameters
- user_name: a non-empty string
# Returned value
True if successful, False otherwise.
"""
ensure_nonemptystring('user_name')
return self._client().delete_user(user_name)
####################################################################
# JIRA agile
#
# list_boards
# get_board
# get_board_configuration
# list_board_sprints
# list_board_projects
# list_board_epics
# create_board
# delete_board
# get_board_editmodel
# set_board_admins
# set_board_columns
# set_board_daysincolumn
@api_call
def list_boards(
self, params: Optional[Dict[str, Any]] = None
) -> List[Dict[str, Any]]:
"""Return the list of boards.
# Optional parameters
- params: a dictionary or None (None by default)
`params`, if provided, is a dictionary with at least one of the
following entries:
- expand: a string
- includePrivate: a boolean
- maxResults: an integer
- name: a string
- orderBy: a string
- projectKeyOrId: a string
- projectLocation: a string
- startAt: an integer
- type: a string
- userkeyLocation: a string
- usernameLocation: a string
# Returned value
A list of _boards_. Each board is a dictionary with the
following entries:
- name: a string
- type: a string (`'scrum'` or `'kanban'`)
- id: an integer
- self: a string (URL)
"""
ensure_noneorinstance('params', dict)
return self._collect_agile_data('board', params=params)
@api_call
def get_board(self, board_id: int) -> Dict[str, Any]:
"""Return board details.
# Required parameters
- board_id: an integer
# Returned value
A dictionary with the following entries:
- id: an integer
- type: a string
- name: a string
- self: a string
"""
ensure_instance('board_id', int)
result = self._client()._get_json(
f'board/{board_id}', base=self._client().AGILE_BASE_URL
)
return result # type: ignore
@api_call
def get_board_configuration(self, board_id: int) -> Dict[str, Any]:
"""Return board configuration details.
# Required parameters
- board_id: an integer
# Returned value
A dictionary with the following entries:
- filter: a dictionary
- ranking: a dictionary
- columnConfig: a dictionary
- name: a string
- subQuery: a dictionary
- self: a string (an URL)
- type: a string
- id: an integer
"""
ensure_instance('board_id', int)
result = self._client()._get_json(
f'board/{board_id}/configuration',
base=self._client().AGILE_BASE_URL,
)
return result # type: ignore
@api_call
def list_board_sprints(self, board_id: int) -> List[Dict[str, Any]]:
"""Return the list of sprints attached to board.
Sprints will be ordered first by state (i.e. closed, active,
future) then by their position in the backlog.
# Required parameters
- board_id: an integer
# Returned value
A list of _sprints_. Each sprint is a dictionary with the
following entries:
- id: an integer
- self: a string
- state: a string
- name: a string
- startDate: a string (an ISO8601 timestamp)
- endDate: a string (an ISO8601 timestamp)
- originBoardId: an integer
- goal: a string
Depending on the sprint state, some entries may be missing.
"""
ensure_instance('board_id', int)
return self._collect_agile_data(f'board/{board_id}/sprint')
@api_call
def list_board_projects(self, board_id: int) -> List[Dict[str, Any]]:
"""Return the list of projects attached to board.
# Required parameters
- board_id: an integer
# Returned value
A list of _projects_. Each project is a dictionary with the
following entries:
- key: a string
- id: a string (or an int)
- avatarUrls: a dictionary
- name: a string
- self: a string
"""
ensure_instance('board_id', int)
return self._collect_agile_data(f'board/{board_id}/project')
@api_call
def list_board_epics(self, board_id: int) -> List[Dict[str, Any]]:
"""Return the list of epics attached to board.
# Required parameters
- board_id: an integer
# Returned value
A list of _epics_. Each epic is a dictionary with the following
entries:
- id: an integer
- self: a string
- name: a string
- summary: a string
- color: a dictionary
- done: a boolean
The `color` dictionary has one key, `'key'`, with its value
being a string (the epic color, for example `'color_1'`).
"""
ensure_instance('board_id', int)
return self._collect_agile_data(f'board/{board_id}/epic')
@api_call
def create_board(
self, board_name: str, board_type: str, filter_id: int
) -> Dict[str, Any]:
"""Create board.
# Required parameters
- board_name: a non-empty string
- board_type: a non-empty string, either `'scrum'` or `'kanban'`
- filter_id: an integer
# Returned value
A dictionary with the following entries:
- id: an integer
- self: a string (an URL)
- name: a string
- type: a string
"""
ensure_nonemptystring('board_name')
ensure_in('board_type', ['scrum', 'kanban'])
ensure_instance('filter_id', int)
data = {'name': board_name, 'type': board_type, 'filterId': filter_id}
result = requests.post(
join_url(self.AGILE_BASE_URL, 'board'),
json=data,
auth=self.auth,
verify=self.verify,
)
return result # type: ignore
@api_call
def delete_board(self, board_id: int) -> None:
"""Delete board.
# Required parameters
- board_id: an integer
# Returned value
None if successful.
# Raised exceptions
An _ApiError_ is raised if the board does not exist or if
the deletion was not successful.
"""
ensure_instance('board_id', int)
result = requests.delete(
join_url(self.AGILE_BASE_URL, f'board/{board_id}'),
auth=self.auth,
verify=self.verify,
)
return result # type: ignore
@api_call
def get_board_editmodel(self, board_id: int) -> Dict[str, Any]:
"""Return board editmodel.
# Required parameters
- board_id: an integer
# Returned value
A dictionary with the following entries:
- boardAdmins: a dictionary
- canEdit: a boolean
- canUseBoardAdminsPicker: a boolean
- cardColorConfig: a dictionary
- cardLayoutConfig: a dictionary
- detailViewFieldConfig: a dictionary
- estimationStatisticConfig: a dictionary
- filterConfig: a dictionary
- globalConfig: a dictionary
- id: an integer
- isKanPlanEnabled: a boolean
- isOldDoneIssuesCutoffConfigurable: a boolean
- isSprintSupportEnabled: a boolean
- JQLAutoComplete: a dictionary
- name: a string
- oldDoneIssuesCutoff: a string
- oldDoneIssuesCutoffOptions: a list of dictionaries
- quickFilterConfig: a dictionary
- rapidListConfig: a dictionary
- showDaysInColumn: a boolean
- showEpicAsPanel: a boolean
- subqueryConfig: a dictionary
- swimlanesConfig: a dictionary
- warnBeforeEditingOwner: a boolean
- workingDaysConfig: a dictionary
"""
ensure_instance('board_id', int)
result = requests.get(
join_url(self.GREENHOPPER_BASE_URL, 'rapidviewconfig/editmodel'),
params={'rapidViewId': board_id},
auth=self.auth,
verify=self.verify,
)
return result # type: ignore
@api_call
def set_board_admins(
self, board_id: int, board_admins: Dict[str, List[str]]
) -> Dict[str, List[Dict[str, str]]]:
"""Set the board administrators.
# Required parameters
- board_id: an integer
- board_admins: a dictionary
The `board_admins` dictionary has the following two entries:
- groupKeys: a list of strings
- userKeys: a list of strings
The lists can be empty. Their items must be valid group keys
or user keys, respectively.
# Returned value
A dictionary with the following entries:
- groupKeys: a list of dictionaries
- userKeys: a list of dictionaries
The list items are dictionaries with the following two entries:
- key: a string
- displayName: a string
This returned value has the same format as the `boardAdmins`
entry in #get_board_editmodel().
# Raised exceptions
Raises an _ApiError_ if a provided key is invalid.
"""
ensure_instance('board_id', int)
ensure_instance('board_admins', dict)
result = requests.put(
join_url(self.GREENHOPPER_BASE_URL, 'rapidviewconfig/boardadmins'),
json={'id': board_id, 'boardAdmins': board_admins},
auth=self.auth,
verify=self.verify,
)
return result # type: ignore
@api_call
def set_board_columns(
self,
board_id: int,
columns_template: List[Dict[str, Any]],
statistics_field: str = 'none_',
) -> Dict[str, Any]:
"""Set the board columns.
# Required parameters
- board_id: an integer
- columns_template: a list of dictionaries
Each item in the `columns_template` list has the following
entries:
- name: a non-empty string
- mappedStatuses: a list of string (possibly empty)
- isKanPlanColumn: a boolean
- min: a string,
- max: a string,
- id: an integer or None
`mappedStatuses` entries must be names of existing statuses in
the associated project(s) workflow(s). A given status cannot
be mapped to more than one column (but it's fine to have a
status not mapped to a column).
If `id` is None, a new column is created. If it is not None,
the column must already exist, and will be updated if needed.
# Optional parameters
- statistics_field: a non-empty string (`'_none'` by default)
If `statistics_field` is specified, it must be the ID of a
valid statistic field.
# Returned value
A dictionary.
# Raised exceptions
Raises an _ApiError_ if the provided columns definition is
invalid.
"""
ensure_instance('board_id', int)
ensure_instance('columns_template', list)
ensure_nonemptystring('statistics_field')
model = self.get_board_editmodel(board_id)
if statistics_field not in [
sf['id'] for sf in model['rapidListConfig']['statisticsFields']
]:
raise ApiError('Unknown statistics_field %s.' % statistics_field)
# collecting known statuses
statuses = list(model['rapidListConfig']['unmappedStatuses'])
for col in model['rapidListConfig']['mappedColumns']:
statuses += col['mappedStatuses']
statuses_names = {status['name']: status['id'] for status in statuses}
mapped_names: List[str] = []
columns_definitions = []
for col in columns_template:
col_statuses = []
for name in col['mappedStatuses']:
if name in mapped_names:
raise ApiError('Status %s mapped more than once.' % name)
if name not in statuses_names:
raise ApiError('Unknown status %s.' % name)
mapped_names.append(name)
col_statuses.append(name)
column_definition = col.copy()
column_definition['mappedStatuses'] = [
{'id': statuses_names[n]} for n in col_statuses
]
columns_definitions.append(column_definition)
result = requests.put(
join_url(self.GREENHOPPER_BASE_URL, 'rapidviewconfig/columns'),
json={
'currentStatisticsField': {'id': statistics_field},
'rapidViewId': board_id,
'mappedColumns': columns_definitions,
},
auth=self.auth,
verify=self.verify,
)
return result # type: ignore
@api_call
def set_board_daysincolumn(
self, board_id: int, days_in_column: bool
) -> None:
"""Enable or disable the time spent indicator on cards.
# Required parameters
- board_id: an integer
- days_in_column: a boolean
# Returned value
None if successful.
# Raised exceptions
An _ApiError_ is raised if something went wrong while setting
the time spent indicator.
"""
ensure_instance('board_id', int)
ensure_instance('days_in_column', bool)
result = requests.put(
join_url(
self.GREENHOPPER_BASE_URL, 'rapidviewconfig/showDaysInColumn'
),
json={'rapidViewId': board_id, 'showDaysInColumn': days_in_column},
auth=self.auth,
verify=self.verify,
)
return result # type: ignore
####################################################################
# JIRA issues
#
# get_issue
# list_issue_transitions
# list_issue_comments
# add_issue_comment
# add_issue_link
# transition_issue
# get_issue_fields
# create_issue
# create_issues
# TODO delete_issue
# update_issue
# assign_issue
# TEST add_issue_attachment
@api_call
def get_issue(
self, issue_id_or_key: str, expand: Optional[str] = None
) -> Dict[str, Any]:
"""Return issue details.
# Required parameters
- issue_id_or_key: a non-empty string
# Optional parameters
- expand: a string or None (None by default)
# Returned value
A dictionary with the following entries:
- fields: a dictionary
- self: a string
- id: a string
- key: a string
- expand: a string
`fields` contains one entry per field associated with the issue.
The key is the field name (`resolution`, `customfield_11038`,
...). The value is field-dependent (it may be None, a list,
a string, a dictionary, ...).
"""
ensure_nonemptystring('issue_id_or_key')
ensure_noneorinstance('expand', str)
return (
self._client()
.issue(
issue_id_or_key,
expand=expand.split(',') if expand is not None else expand,
)
.raw
)
@api_call
def list_issue_comments(
self, issue_id_or_key: str
) -> List[Dict[str, Any]]:
"""Return the available comments for issue.
# Required parameters
- issue_id_or_key: a non-empty string
# Returned value
A list of _comments_. Each comment is a dictionary with the
following entries:
- self: a string (an URL)
- id: a string
- author: a dictionary
- body: a string
- updateAuthor
- created: a string (a timestamp)
- updated: a string (a timestamp)
"""
return [c.raw for c in self._client().comments(issue_id_or_key)]
@api_call
def add_issue_comment(
self, issue_id_or_key: str, fields: Dict[str, Any]
) -> Dict[str, Any]:
"""Add a comment.
# Required parameters
- issue_id_or_key: a non-empty string
- fields: a dictionary
# Returned value
A _comment_. Comments are dictionaries. Refer to
#list_issue_comments() for more information.
"""
url = self._get_url(f'issue/{issue_id_or_key}/comment')
result = self.session().post(url, data=json.dumps(fields))
return result # type: ignore
@api_call
def add_issue_link(
self,
inward_issue_id_or_key: str,
type_: str,
outward_issue_id_or_key: str,
) -> Dict[str, Any]:
"""Add an issue link between two issues.
The `type_` value must be a valid _issue link type_ name. Refer
to #list_issuelinktypes() for details.
# Required parameters
- inward_issue_id_or_key: a non-empty string
- type: a non-empty string
- outward_issue_id_or_key: a non-empty string
# Returned value
"""
url = self._get_url('issueLink')
data = {
'type': {'name': type_},
'inwardIssue': {'key': inward_issue_id_or_key},
'outwardIssue': {'key': outward_issue_id_or_key},
}
result = self.session().post(url, data=json.dumps(data))
return result # type: ignore
@api_call
def list_issue_transitions(
self, issue_id_or_key: str
) -> List[Dict[str, Any]]:
"""Return the available transitions for issue.
# Required parameters
- issue_id_or_key: a non-empty string
# Returned value
A list of _transitions_. Each transition is a dictionary with
the following entries:
- to: a dictionary
- id: a string
- name: a string
It returns the available transitions, depending on issue current
state.
"""
ensure_nonemptystring('issue_id_or_key')
return self._client().transitions(
self._client().issue(issue_id_or_key)
)
@api_call
def transition_issue(self, issue_id_or_key: str, path: List[str]) -> None:
"""Transition an issue to a new state, following provided path.
# Required parameters
- issue_id_or_key: a non-empty string
- path: a list of strings
# Returned value
None.
"""
ensure_nonemptystring('issue_id_or_key')
ensure_instance('path', list)
for name in path:
transitions = [
t['id']
for t in self.list_issue_transitions(issue_id_or_key)
if t['name'] == name
]
if len(transitions) != 1:
raise ApiError(
'Got %d transitions to %s, was expecting one.'
% (len(transitions), name)
)
self._client().transition_issue(issue_id_or_key, transitions[0])
@api_call
def get_issue_fields(self, issue_id_or_key: str) -> Dict[str, Any]:
"""Return the available fields for issue.
# Required parameters
- issue_id_or_key: a non-empty string
# Returned value
A dictionary of _fields_. Keys are fields internal names
and values are dictionaries describing the corresponding fields.
"""
ensure_nonemptystring('issue_id_or_key')
result = self._get_json(
f'issue/{issue_id_or_key}/editmeta', params=None
)
return result['fields'] # type: ignore
@api_call
def create_issue(self, fields: Dict[str, Any]) -> Dict[str, Any]:
"""Create a new issue.
# Required parameters
- fields: a dictionary
`fields` is a dictionary with at least the following entries:
- project: a dictionary
- summary: a string
- description: a string
- issuetype: a dictionary
`project` is a dictionary with either an `id` entry or a `key`
entry.
`issuetype` is a dictionary with a `name` entry.
# Returned value
A dictionary representing the issue. Refer to #get_issue() for
more details on its content.
"""
return self._client().create_issue(fields=fields).raw
@api_call
def create_issues(
self, issue_list: List[Dict[str, Any]]
) -> List[Dict[str, Any]]:
"""Create multiple issues.
# Required parameters
- issue_list: a list of dictionaries
# Returned value
A list of _issues_. Each issue is a dictionary with the
following entries:
- status: a string (`'Success'` or `'Error'`)
- error: a string or None (in case of success)
- issue: a dictionary or None
- input_fields: a dictionary, the corresponding entry in
`issue_list`
"""
ensure_instance('issue_list', list)
return self._client().create_issues(field_list=issue_list)
@api_call
def assign_issue(self, issue_id_or_key: str, assignee: str) -> bool:
"""Assign or reassign an issue.
!!! important
Requires issue assign permission, which is different from
issue editing permission.
# Required parameter
- issue_id_or_key: a non-empty string
- assignee: a non-empty string
# Returned value
True if successful.
"""
ensure_nonemptystring('issue_id_or_key')
ensure_nonemptystring('assignee')
return self._client().assign_issue(issue_id_or_key, assignee)
@api_call
def update_issue(
self, issue_id_or_key: str, fields: Dict[str, Any]
) -> None:
"""Update issue.
# Required parameters
- issue_id_or_key: a non-empty string
- fields: a dictionary
`fields` is a dictionary with one entry per issue field to
update. The key is the field name, and the value is the new
field value.
# Returned value
None.
"""
ensure_nonemptystring('issue_id_or_key')
ensure_instance('fields', dict)
return self._client().issue(issue_id_or_key).update(fields)
@api_call
def add_issue_attachment(
self,
issue_id_or_key: str,
filename: str,
rename_to: Optional[str] = None,
) -> Dict[str, Any]:
"""Add attachment to issue.
!!! note
If rename_to contains non-ASCII symbols, this may
fail with an HTTP error (code 500). Some (?) Jira versions
fail to handle that properly.
# Required parameters
- issue_id_or_key: a non-empty string
- filename: a non-empty string
# Optional parameters
- rename_to: a non-empty string or None (None by default)
# Returned value
A dictionary.
"""
ensure_nonemptystring('issue_id_or_key')
ensure_nonemptystring('filename')
ensure_noneorinstance('rename_to', str)
return (
self._client()
.add_attachment(
issue=issue_id_or_key, attachment=filename, filename=rename_to
)
.raw
)
####################################################################
# JIRA issue linktypes
#
# list_issuelinktypes
@api_call
def list_issuelinktypes(self) -> List[Dict[str, str]]:
"""List issue link types.
# Returned value
A list of _issuelinktypes_. Each issuelinktype is a dictionary
with the following entries:
- id: a string
- name: a string
- inward: a string
- outward: a string
- self: a string (an URL)
The `name` entry can be used to create a link between two
issues.
"""
return self._get_json('issueLinkType')['issueLinkTypes']
####################################################################
# JIRA sprints
#
# create_sprint
# TODO delete_sprint
# update_sprint
# TODO get_sprint
# add_sprint_issues
# TODO get_sprint_issues
@api_call
def create_sprint(
self,
name: str,
board_id: int,
start_date: Optional[Any] = None,
end_date: Optional[Any] = None,
) -> Dict[str, Any]:
"""Create a new sprint.
# Required parameters
- name: a string
- board_id: an integer
# Optional parameters
- start_date
- end_date
# Returned value
A dictionary.
"""
return (
self._client()
.create_sprint(name, board_id, start_date, end_date)
.raw
)
@api_call
def update_sprint(
self,
sprint_id: int,
name: Optional[str] = None,
state: Optional[str] = None,
start_date: Optional[str] = None,
end_date: Optional[str] = None,
complete_date: Optional[str] = None,
origin_board_id: Optional[int] = None,
goal: Optional[str] = None,
) -> None:
"""Update existing sprint.
# Required parameters
- sprint_id: an integer
# Optional parameters
- name
- state
- start_date
- end_date
- complete_date
- origin_board_id
- goal
# Returned value
None.
"""
ensure_instance('sprint_id', int)
ensure_noneorinstance('name', str)
ensure_noneorinstance('state', str)
ensure_noneorinstance('start_date', str)
ensure_noneorinstance('end_date', str)
ensure_noneorinstance('complete_date', str)
ensure_noneorinstance('origin_board_id', int)
ensure_noneorinstance('goal', str)
scheme = {'id': sprint_id}
add_if_specified(scheme, 'name', name)
add_if_specified(scheme, 'state', state)
add_if_specified(scheme, 'startDate', start_date)
add_if_specified(scheme, 'endDate', end_date)
add_if_specified(scheme, 'completeDate', complete_date)
add_if_specified(scheme, 'originBoardId', origin_board_id)
add_if_specified(scheme, 'goal', goal)
self.session().post(
join_url(self.AGILE_BASE_URL, f'sprint/{sprint_id}'), json=scheme
)
@api_call
def add_sprint_issues(self, sprint_id: int, issue_keys: List[str]) -> None:
"""Add issues to sprint.
# Required parameters
- sprint_id: an integer
- issue_keys: a list of strings
# Returned value
None.
"""
return self._client().add_issues_to_sprint(sprint_id, issue_keys)
####################################################################
# Xray for JIRA
#
# list_xray_projects
@api_call
def list_xray_projects(self) -> List[Dict[str, Any]]:
"""Return the requirement projects.
A _requirement_ project is a project on which Xray is enabled.
# Returned value
A list of dictionary with the following entries:
- icon: a string
- name: a string (the project name)
- alias: a string (the project key)
- pid: an integer
- avatarId: an integer
- type: a string (the project type)
TODO: support more than 100 projects.
"""
params = {'iDisplayStart': 0, 'iDisplayLength': 100}
result = requests.get(
join_url(self.XRAY_BASE_URL, 'preferences/requirementProjects'),
params=params,
auth=self.auth,
).json()
return result['entries']
####################################################################
# JIRA Service Desk
#
# create_request
# get_request
# add_request_comment
# get_bundledfield_definition
# list_queues
# list_queue_issues
# list_requesttypes
@api_call
def create_request(
self,
servicedesk_id: str,
requesttype_id: str,
fields: List[Dict[str, Any]],
) -> Dict[str, Any]:
"""Create a new customer request on specified service desk.
# Required parameters:
- servicedesk_id: a non-empty string
- requesttype_id: a non-empty string
- fields: a dictionary
The `fields` dictionary content depends on the request type (as
specified by `requesttype_id`). It typically has at least the
following two entries:
- summary: a string
- description: a string
# Returned value
The created _request_ details. Please refer to #get_request()
for more information.
"""
ensure_nonemptystring('servicedesk_id')
ensure_nonemptystring('requesttype_id')
# ensure_instance('fields', list)
result = requests.post(
join_url(self.SERVICEDESK_BASE_URL, 'request'),
json={
'serviceDeskId': servicedesk_id,
'requestTypeId': requesttype_id,
'requestFieldValues': fields,
},
auth=self.auth,
verify=self.verify,
)
return result # type: ignore
@api_call
def get_request(
self, request_id_or_key: str, expand: Optional['str'] = None
) -> Dict[str, Any]:
"""Return request details.
# Required parameters:
- request_id_or_key: a non-empty string
# Optional parameters
- expand: a string or None (None by default)
# Returned value:
The _request_ details, a dictionary, with the following entries:
- issueId: a string
- issueKey: a string
- requestTypeId: a string
- serviceDeskId: a string
- createDate: a dictionary
- reporter: a dictionary
- active: a boolean
- timeZone: a string
- currentStatus: a dictionary
- requestFieldValues: a dictionary
There may be additional fields depending on the specified
`expand` parameter.
"""
ensure_nonemptystring('request_id_or_key')
ensure_noneorinstance('expand', str)
if expand is not None:
params: Optional[Dict[str, str]] = {'expand': expand}
else:
params = None
response = requests.get(
join_url(
self.SERVICEDESK_BASE_URL, f'request/{request_id_or_key}'
),
params=params,
auth=self.auth,
verify=self.verify,
)
return response # type: ignore
@api_call
def add_request_comment(
self, request_id_or_key: str, body: str, public: bool = False
) -> Dict[str, Any]:
"""Create a public or private comment on request.
# Required parameters
- request_id_or_key: a non-empty string
- body: a string
# Optional parameters
- public: a boolean (False by default)
# Returned value
A dictionary with the following entries:
- id: a string
- body: a string
- public: a boolean
- author: a dictionary
- created: a dictionary
- _links: a dictionary
The `author` dictionary has the following entries:
- name: a string
- key: a string
- emailAddress: a string
- displayName: a string
- active: a boolean
- timeZone: a string
- _links: a dictionary
The `created` dictionary has the following entries:
- iso8601: a string (an ISO8601 timestamp)
- jira: a string (an ISO8601 timestamp)
- friendly: a string
- epochMillis: an integer
"""
ensure_nonemptystring('request_id_or_key')
ensure_instance('body', str)
ensure_instance('public', bool)
result = requests.post(
join_url(
self.SERVICEDESK_BASE_URL,
f'request/{request_id_or_key}/comment',
),
json={'body': body, 'public': public},
auth=self.auth,
verify=self.verify,
)
return result # type: ignore
@api_call
def get_bundledfield_definition(
self, context_id: str, customfield_id: str
) -> Dict[str, Any]:
"""Return a bundled field definition.
# Required parameters
- context_id: a string
- customfield_id: a string
# Returned value
A dictionary.
"""
ensure_nonemptystring('context_id')
ensure_nonemptystring('customfield_id')
result = requests.get(
join_url(self.SDBUNDLE_BASE_URL, 'jsdbundled/getBundledFields'),
params={'contextId': context_id, 'customFieldId': customfield_id},
auth=self.auth,
)
return result # type: ignore
@api_call
def list_queues(self, servicedesk_id: str) -> List[Dict[str, Any]]:
"""Return a list of queues for a given service desk.
# Required parameters
- servicedesk_id: a non-empty string
# Returned value
A list of dictionaries.
"""
ensure_nonemptystring('servicedesk_id')
return self._collect_sd_data(
f'servicedesk/{servicedesk_id}/queue',
headers={'X-ExperimentalApi': 'opt-in'},
)
@api_call
def list_queue_issues(
self, servicedesk_id: str, queue_id: str
) -> List[Dict[str, Any]]:
"""Return a list of issues that are in a given queue.
# Required parameters
- servicedesk_id: a non-empty string
- queue_id: a non-empty string
# Returned value
A list of dictionaries.
"""
ensure_nonemptystring('servicedesk_id')
ensure_nonemptystring('queue_id')
return self._collect_sd_data(
f'servicedesk/{servicedesk_id}/queue/{queue_id}/issue',
headers={'X-ExperimentalApi': 'opt-in'},
)
@api_call
def list_requesttypes(self, servicedesk_id: str) -> List[Dict[str, Any]]:
"""Return a list of service desk request types.
# Required parameters
- servicedesk_id: a non-empty string
# Returned value
A list of dictionaries.
"""
ensure_nonemptystring('servicedesk_id')
return self._collect_sd_data(
f'servicedesk/{servicedesk_id}/requesttype'
)
@api_call
####################################################################
# JIRA misc. operation
#
# list_plugins
# get_server_info
# reindex
@api_call
def list_plugins(self) -> List[Dict[str, Any]]:
"""Return a list of installed plugins.
# Returned value
A list of _plugins_. A plugin is represented by a dictionary
with the following entries:
- applicationKey: a string
- applicationPluginType: a string (one of `'APPLICATION'`,
`'PRIMARY'`, `'UTILITY'`)
- description: a string
- enabled: a boolean
- key: a string
- links: a dictionary
- name: a string
- optional: a boolean
- remotable: a boolean
- static: a boolean
- unloadable: a boolean
- userInstalled: a boolean
- usesLicensing: a boolean
- vendor: a dictionary
- version: a string
The `links` dictionary may contain the following entries:
- manage: a string
- modify: a string
- plugin-icon: a string
- plugin-logo: a string
- plugin-summary: a string
- self: a string
The `vendor` dictionary may contain the following entries:
- link: a string
- marketplaceLink: a string
- name: a string
Not all entries are present for all plugins.
"""
return requests.get(
self.UPM_BASE_URL, auth=self.auth, verify=self.verify
).json()['plugins']
@api_call
def get_server_info(self, do_health_check: bool = False) -> Dict[str, Any]:
"""Return server info.
# Optional parameters
- do_health_check: a boolean (False by default)
# Returned value
A dictionary with the following entries:
- versionNumbers: a list of integers
- serverTitle: a string
- buildNumber: an integer
- deploymentType: a string
- version: a string
- baseUrl: a string
- scmInfo: a string
- buildDate: a datetime as a string
- serverTime: a datetime as a string
For example:
```python
{
'versionNumbers': [7, 3, 8],
'serverTitle': 'JIRA Dev',
'buildNumber': 73019,
'deploymentType': 'Server',
'version': '7.3.8',
'baseUrl': 'https://jira.example.com',
'scmInfo': '94e8771b8094eef96c119ec22b8e8868d286fa88',
'buildDate': '2017-06-12T00:00:00.000+0000',
'serverTime': '2018-01-15T11:07:40.690+0000'
}
```
"""
ensure_instance('do_health_check', bool)
result = self._get_json(
'serverInfo', params={'doHealthCheck': str(do_health_check)}
)
return result # type: ignore
@api_call
def reindex(
self,
kind: str,
index_comments: bool = False,
index_change_history: bool = False,
index_worklogs: bool = False,
) -> Dict[str, Any]:
"""Kicks off a reindex.
!!! note
Not using the Python API `reindex` method, which does not
use the API but simulate a page click.
Foreground reindexing rebuild all indexes (hence the irrelevancy
of the three optional parameters in that case).
# Required parameters
- kind: one of 'FOREGROUND', 'BACKGROUND',
'BACKGROUND_PREFFERED', or 'BACKGROUND_PREFERRED'.
# Optional parameters
- index_comments: a boolean (False by default for background
reindexing, irrelevant for foreground reindexing)
- index_change_history: a boolean (False by default for
background reindexing, irrelevant for foreground reindexing)
- index_worklogs: a boolean (False by default for background
reindexing, irrelevant for foreground reindexing)
# Returned value
A dictionary with the following entries:
- progressUrl: a string
- currentProgress: an integer
- currentSubTask: a string
- submittedTime: a string (an ISO timestamp)
- startTime: a string (an ISO timestamp)
- finishTime: a string (an ISO timestamp)
- success: a boolean
"""
ensure_instance('index_comments', bool)
ensure_instance('index_change_history', bool)
ensure_instance('index_worklogs', bool)
ensure_in('kind', REINDEX_KINDS)
result = self._post(
'reindex',
json={
'type': kind,
'indexComments': index_comments,
'indexChangeHistory': index_change_history,
'indexWorklogs': index_worklogs,
},
)
return result # type: ignore
####################################################################
# JIRA helpers
def session(self) -> requests.Session:
"""Return current session."""
return self._client()._session
def _get(
self,
uri: str,
params: Optional[
Mapping[str, Union[str, Iterable[str], int, bool]]
] = None,
) -> requests.Response:
return requests.get(
join_url(self.url, uri),
params=params,
auth=self.auth,
verify=self.verify,
)
def _post(
self, api: str, json: Optional[Mapping[str, Any]] = None
) -> requests.Response:
api_url = self._get_url(api)
return requests.post(
api_url, json=json, auth=self.auth, verify=self.verify
)
def _collect_data(
self,
api: str,
params: Optional[Mapping[str, Union[str, List[str], None]]] = None,
base: Optional[str] = None,
headers: Optional[Mapping[str, str]] = None,
start_at: str = 'startAt',
is_last: str = 'isLast',
) -> List[Any]:
api_url = self._get_url(api) if base is None else join_url(base, api)
collected: List[Any] = []
_params = dict(params or {})
more = True
with requests.Session() as session:
session.auth = self.auth
session.headers = headers # type: ignore
session.verify = self.verify
while more:
response = session.get(api_url, params=_params)
if response.status_code // 100 != 2:
raise ApiError(response.text)
try:
workload = response.json()
values = workload['values']
collected += values
except Exception as exception:
raise ApiError(exception)
more = not workload[is_last]
if more:
_params[start_at] = workload[start_at] + len(values)
return collected
def _collect_sd_data(
self,
api: str,
params: Optional[Mapping[str, Union[str, List[str], None]]] = None,
headers: Optional[Mapping[str, str]] = None,
) -> List[Any]:
return self._collect_data(
api,
params=params,
base=self.SERVICEDESK_BASE_URL,
headers=headers,
start_at='start',
is_last='isLastPage',
)
def _collect_agile_data(
self,
api: str,
params: Optional[Mapping[str, Union[str, List[str], None]]] = None,
) -> List[Any]:
return self._collect_data(api, params=params, base=self.AGILE_BASE_URL)
def _get_url(self, api: str) -> str:
return self._client()._get_url(api) # type: ignore
def _get_json(
self,
api: str,
params: Optional[Mapping[str, Union[str, List[str], None]]] = None,
) -> Any:
return self._client()._get_json(api, params=params)
# forms helpers
def _parse_data(
self, uri: str, pat_name: str, pat_id: str, pat_inactive: str
) -> List[Dict[str, Any]]:
page = self._get(uri)
return [
{
'name': name,
'id': int(sid),
'active': not re.search(pat_inactive % sid, page.text),
}
for name, sid in zip(
re.findall(pat_name, page.text), re.findall(pat_id, page.text)
)
]
def _do_form_step(
self, api: str, data: Dict[str, Any], cookies
) -> requests.Response:
"""Perform a project-config step."""
return requests.post(
join_url(self.url, api),
data=data,
headers={
'Content-Type': 'application/x-www-form-urlencoded',
'X-Atlassian-Token': 'no-check',
},
cookies=cookies,
auth=self.auth,
verify=self.verify,
)
def _get_projectconfig_scheme(
self, project_id_or_key: Union[str, int], scheme: str
) -> str:
"""Return scheme name."""
ensure_instance('project_id_or_key', (str, int))
project = self.get_project(project_id_or_key)
page = self._get(
f'plugins/servlet/project-config/{project["key"]}/{scheme}'
)
match = re.search(
r'class="project-config-scheme-name"[^>]+>([^<]+)<', page.text
)
if match is None:
raise ApiError('Scheme %s not found' % scheme)
return match.group(1)
def _get_projectconfig_option(
self, api: str, project_id: str, scheme: str
) -> Tuple[requests.Response, str]:
page = self._get(f'{api}?projectId={project_id}')
option = re.search(
r'<option value="(\d+)"[^>]*>\s*%s\s*</option>' % scheme, page.text
)
if option is None:
raise ApiError('Scheme %s not found.' % scheme)
return page, option.group(1) | zabel-elements | /zabel_elements-1.15.0-py3-none-any.whl/zabel/elements/clients/base/jira.py | jira.py |
__all__ = [
'Artifactory',
'CloudBeesJenkins',
'Confluence',
'GitHub',
'Kubernetes',
'Jira',
'SonarQube',
'SquashTM',
'Okta',
]
from typing import Any, Dict, Iterable, Optional
import json
import os
from zabel.commons.utils import api_call
from zabel.commons.interfaces import ManagedService, Utility
from zabel.elements import clients
########################################################################
# Helpers
def _get_credential(key: str) -> str:
value = os.environ.get(key)
if value is None:
raise ValueError(f'Environment variable {key} not defined.')
return value
def _maybe_get_credential(key: str) -> Optional[str]:
return os.environ.get(key)
def _has_credentials(*keys) -> bool:
return all(os.environ.get(key) for key in keys)
########################################################################
# Wrappers around low-level APIs
class Artifactory(clients.Artifactory, ManagedService):
"""Abstract base _Artifactory_ class.
Provides a default implementation for the following three
#::ManagedService methods:
- `__init__()`
- `list_members`
- `get_member`
The following environment variables must exist:
- ARTIFACTORY_URL: a string
- ARTIFACTORY_USER: a string
- ARTIFACTORY_TOKEN: a string
The `ARTIFACTORY_URL` entry refers to the API entry point:
https://artifactory.example.com/artifactory/api/
Implementations are expected to extend this class with their
platform specifics (canonical user IDs, ...).
"""
# pylint: disable=abstract-method
def __init__(self) -> None:
url = _get_credential('ARTIFACTORY_URL')
user = _get_credential('ARTIFACTORY_USER')
token = _get_credential('ARTIFACTORY_TOKEN')
super().__init__(url, user, token)
def get_internal_member_id(self, member_id: str) -> str:
raise NotImplementedError
@api_call
def list_members(self) -> Dict[str, Dict[str, Any]]:
"""Return the members on the service.
# Returned values
A dictionary. The keys are the canonical IDs and the values are
the representations of a user for the service.
"""
return {
self.get_canonical_member_id(user): user
for user in self.list_users_details()
}
@api_call
def get_member(self, member_id: str) -> Dict[str, Any]:
"""Return details on user.
# Required parameters
- member_id: a string
`member_id` is the canonical member ID.
# Returned value
The representation of the user for the service, which is
service-specific.
"""
return self.get_user(self.get_internal_member_id(member_id))
class CloudBeesJenkins(clients.CloudBeesJenkins, ManagedService):
"""Abstract base _CloudBeesJenkins_ class.
Provides a default implementation for the following three
#::ManagedService methods:
- `__init__()`
- `list_members`
- `get_member`
The following environment variables must exist:
- JENKINS_URL: a string
- JENKINS_USER: a string
- JENKINS_TOKEN: a string
The environment may also contain a `JENKINS_COOKIES` entry.
The `JENKINS_URL` entry refers to the API entry point:
https://cbj.example.com
"""
# pylint: disable=abstract-method
def __init__(self) -> None:
url = _get_credential('JENKINS_URL')
user = _get_credential('JENKINS_USER')
token = _get_credential('JENKINS_TOKEN')
cookies = None
if _has_credentials('JENKINS_COOKIES'):
cookies = json.loads(_get_credential('JENKINS_COOKIES'))
super().__init__(url, user, token, cookies)
def get_internal_member_id(self, member_id: str) -> str:
raise NotImplementedError
@api_call
def list_members(self) -> Dict[str, Dict[str, Any]]:
"""Return the members on the service.
# Returned values
A dictionary. The keys are the canonical IDs and the values are
the representations of a user for the service.
"""
return {
self.get_canonical_member_id(u): u for u in self.list_oc_users()
}
@api_call
def get_member(self, member_id: str) -> Dict[str, Any]:
"""Return details on user.
# Required parameters
- member_id: a string
`member_id` is the canonical member ID.
# Returned value
The representation of the user for the service, which is
service-specific.
"""
return self.list_members()[member_id]
class Confluence(clients.Confluence, ManagedService):
"""Abstract base _Confluence_ class.
Provides a default implementation for the following three
#::ManagedService methods:
- `__init__()`
- `list_members`
- `get_member`
The following environment variable must exist:
- CONFLUENCE_URL: a string
The environment also must have either the two following entries
(basic auth):
- CONFLUENCE_USER: a string
- CONFLUENCE_TOKEN: a string
Or the four following entries (oauth):
- CONFLUENCE_KEYCERT: a string
- CONFLUENCE_CONSUMERKEY: a string
- CONFLUENCE_ACCESSTOKEN: a string
- CONFLUENCE_ACCESSSECRET: a string
The `CONFLUENCE_URL` entry refers to the API entry point:
https://confluence.example.com
A _ValueError_ is raised if either none or both the basic and oauth
credentials are provided.
"""
# pylint: disable=abstract-method
def __init__(self) -> None:
url = _get_credential('CONFLUENCE_URL')
basic_auth = oauth = None
if _has_credentials('CONFLUENCE_USER', 'CONFLUENCE_TOKEN'):
basic_auth = (
_get_credential('CONFLUENCE_USER'),
_get_credential('CONFLUENCE_TOKEN'),
)
if _has_credentials(
'CONFLUENCE_KEYCERT',
'CONFLUENCE_CONSUMERKEY',
'CONFLUENCE_ACCESSTOKEN',
'CONFLUENCE_ACCESSSECRET',
):
oauth = {
'key_cert': _get_credential('CONFLUENCE_KEYCERT'),
'consumer_key': _get_credential('CONFLUENCE_CONSUMERKEY'),
'access_token': _get_credential('CONFLUENCE_ACCESSTOKEN'),
'access_token_secret': _get_credential(
'CONFLUENCE_ACCESSSECRET'
),
}
super().__init__(url, basic_auth=basic_auth, oauth=oauth)
def get_internal_member_id(self, member_id: str) -> str:
raise NotImplementedError
@api_call
def list_members(self) -> Dict[str, Dict[str, Any]]:
"""Return the members on the service.
# Returned values
A dictionary. The keys are the canonical IDs and the values are
the representations of a user for the service.
"""
return {u: self.get_user(u) for u in self.list_users()}
@api_call
def get_member(self, member_id: str) -> Dict[str, Any]:
"""Return details on user.
# Required parameters
- member_id: a string
`member_id` is the canonical member ID.
# Returned value
The representation of the user for the service, which is
service-specific.
"""
return self.get_user(member_id)
class GitHub(clients.GitHub, ManagedService):
"""Abstract base _GitHub_ class.
Provides a default implementation for the following three
#::ManagedService methods:
- `__init__()`
- `list_members`
- `get_member`
The following environment variables must exist:
- GITHUB_URL: a string
- GITHUB_USER: a string
- GITHUB_TOKEN: a string
The environment may also have a `GITHUB_MNGT` entry (a string).
The `GITHUB_URL` entry refers to the API entry point:
https://github.example.com/api/v3/
The `GITHUB_MNGT` entry is the management entry point:
https://github.example.com/
"""
# pylint: disable=abstract-method
def __init__(self) -> None:
url = _get_credential('GITHUB_URL')
user = _get_credential('GITHUB_USER')
token = _get_credential('GITHUB_TOKEN')
mngt = None
if _has_credentials('GITHUB_MNGT'):
mngt = _get_credential('GITHUB_MNGT')
super().__init__(url, user, token, mngt)
def get_internal_member_id(self, member_id: str) -> str:
raise NotImplementedError
@api_call
def list_members(self) -> Dict[str, Dict[str, Any]]:
"""Return the members on the service.
# Returned values
A dictionary. The keys are the canonical IDs and the values are
the representations of a user for the service.
"""
return {self.get_canonical_member_id(u): u for u in self.list_users()}
@api_call
def get_member(self, member_id: str) -> Dict[str, Any]:
"""Return details on user.
# Required parameters
- member_id: a string
`member_id` is the canonical member ID.
# Returned value
The representation of the user for the service, which is
service-specific.
"""
return self.get_user(self.get_internal_member_id(member_id))
class Kubernetes(clients.Kubernetes, Utility):
"""Abstract base _Kubernetes_ class.
Provides a default implementation for the following #::Utility
method:
- `__init__()`
The environment may contain none of the following `KUBERNETES_xxx`
entries, in which case the current user's `~/.kube/config` config
file with its default context will be used.
Alternatively, it may contain some of the following entries:
- KUBERNETES_CONFIGFILE: a string (a fully qualified file name)
- KUBERNETES_CONTEXT: a string
- KUBERNETES_CONFIG_URL: a string (an URL)
- KUBERNETES_CONFIG_API_KEY: a string
- KUBERNETES_CONFIG_VERIFY: a string
- KUBERNETES_CONFIG_SSL_CA_CERT: a string (a base64-encoded
certificate)
# Reusing an existing config file
If `KUBERNETES_CONFIGFILE` and/or `KUBERNETES_CONTEXT` are present,
there must be no `KUBERNETES_CONFIG_xxx` entries.
If `KUBERNETES_CONFIGFILE` is present, the specified config file
will be used. If not present, the default Kubernetes config file
will be used (`~/.kube/config`, usually).
If `KUBERNETES_CONTEXT` is present, the instance will use the
specified Kubernetes context. If not present, the default context
will be used instead.
# Specifying an explicit configuration (no config file needed)
If `KUBERNETES_CONFIG_xxx` entries are present, they provide an
explicit configuration. The possibly existing `~/.kube/config`
config file will be ignored.
In this case, `KUBERNETES_CONFIG_URL` is mandatory. It is the
top-level API point. E.g.:
https://FOOBARBAZ.example.com
`KUBERNETES_CONFIG_API_KEY` is also mandatory. It will typically be
a JWT token.
The following two additional entries may be present:
`KUBERNETES_CONFIG_VERIFY` can be set to 'false' (case insensitive)
if disabling certificate checks for Kubernetes communication is
required. Tons of warnings will occur if this is set to 'false'.
`KUBERNETES_CONFIG_SSL_CA_CERT` is a base64-encoded certificate.
"""
# pylint: disable=abstract-method
def __init__(self) -> None:
config_file = _maybe_get_credential('KUBERNETES_CONFIGFILE')
context = _maybe_get_credential('KUBERNETES_CONTEXT')
url = _maybe_get_credential('KUBERNETES_URL')
api_key = _maybe_get_credential('KUBERNETES_API_KEY')
ssl_ca_cert = _maybe_get_credential('KUBERNETES_SSL_CA_CERT')
verify = _maybe_get_credential('KUBERNETES_VERIFY')
config: Optional[Dict[str, Any]] = None
if config_file is None and context is None:
if url and api_key:
config = {'url': url, 'api_key': api_key}
if ssl_ca_cert:
config['ssl_ca_cert'] = ssl_ca_cert
if verify and verify.upper() == 'FALSE':
config['verify'] = False
elif url:
raise ValueError('URL defined but no API_KEY specified.')
elif api_key:
raise ValueError('API_KEY defined but no URL specifics.')
super().__init__(config_file, context, config)
class Jira(clients.Jira, ManagedService):
"""Abstract base _Jira_ class.
Provides a default implementation for the following three
#::ManagedService methods:
- `__init__()`
- `list_members`
- `get_member`
The following environment variable must exist:
- JIRA_URL: a string
The environment also must have either the two following entries
(basic auth):
- JIRA_USER: a string
- JIRA_TOKEN: a string
Or the four following entries (oauth):
- JIRA_KEYCERT: a string
- JIRA_CONSUMERKEY: a string
- JIRA_ACCESSTOKEN: a string
- JIRA_ACCESSSECRET: a string
The `JIRA_URL` entry refers to the API entry point:
https://jira.example.com
A _ValueError_ is raised if either none or both the basic and oauth
credentials are provided.
"""
# pylint: disable=abstract-method
def __init__(self) -> None:
url = _get_credential('JIRA_URL')
basic_auth = oauth = None
if _has_credentials('JIRA_USER', 'JIRA_TOKEN'):
basic_auth = (
_get_credential('JIRA_USER'),
_get_credential('JIRA_TOKEN'),
)
if _has_credentials(
'JIRA_KEYCERT',
'JIRA_CONSUMERKEY',
'JIRA_ACCESSTOKEN',
'JIRA_ACCESSSECRET',
):
oauth = {
'key_cert': _get_credential('JIRA_KEYCERT'),
'consumer_key': _get_credential('JIRA_CONSUMERKEY'),
'access_token': _get_credential('JIRA_ACCESSTOKEN'),
'access_token_secret': _get_credential('JIRA_ACCESSSECRET'),
}
super().__init__(url, basic_auth=basic_auth, oauth=oauth)
def get_internal_member_id(self, member_id: str) -> str:
raise NotImplementedError
@api_call
def list_members(self) -> Dict[str, Dict[str, Any]]:
"""Return the members on the service.
# Returned values
A dictionary. The keys are the canonical IDs and the values are
the representations of a user for the service.
"""
return {u: self.get_user(u) for u in self.list_users()}
@api_call
def get_member(self, member_id: str) -> Dict[str, Any]:
"""Return details on user.
# Required parameters
- member_id: a string
`member_id` is the canonical member ID.
# Returned value
The representation of the user for the service, which is
service-specific.
"""
return self.get_user(self.get_internal_member_id(member_id))
class SonarQube(clients.SonarQube, ManagedService):
"""Abstract base _SonarQube_ class.
Provides a default implementation for the following three
#::ManagedService methods:
- `__init__()`
- `list_members`
- `get_member`
The following environment variables must exist:
- SONARQUBE_URL: a string
- SONARQUBE_TOKEN: a string
The `SONARQUBE_URL` entry refers to the API entry point:
https://sonar.example.com/sonar/api/
"""
# pylint: disable=abstract-method
def __init__(self) -> None:
url = _get_credential('SONARQUBE_URL')
token = _get_credential('SONARQUBE_TOKEN')
super().__init__(url, token)
def get_internal_member_id(self, member_id: str) -> str:
raise NotImplementedError
@api_call
def list_members(self) -> Dict[str, Dict[str, Any]]:
"""Return the members on the service.
# Returned values
A dictionary. The keys are the canonical IDs and the values are
the representations of a user for the service.
"""
return {
self.get_canonical_member_id(u): u for u in self.search_users()
}
@api_call
def get_member(self, member_id: str) -> Dict[str, Any]:
"""Return details on user.
# Required parameters
- member_id: a string
`member_id` is the canonical member ID.
# Returned value
The representation of the user for the service, which is
service-specific.
"""
return self.get_user(self.get_internal_member_id(member_id))
class SquashTM(clients.SquashTM, ManagedService):
"""Abstract base _SquashTM_ class.
Provides a default implementation for the following three
#::ManagedService methods:
- `__init__()`
- `list_members`
- `get_member`
The following environment variables must exist:
- SQUASHTM_URL: a string
- SQUASHTM_USER: a string
- SQUASHTM_TOKEN: a string
The `SQUASHTM_URL` entry refers to the API entry point:
https://squash-tm.example.com/squash/api/rest/latest/
"""
# pylint: disable=abstract-method
def __init__(self) -> None:
url = _get_credential('SQUASHTM_URL')
user = _get_credential('SQUASHTM_USER')
token = _get_credential('SQUASHTM_TOKEN')
super().__init__(url, user, token)
def get_internal_member_id(self, member_id: str) -> int:
raise NotImplementedError
@api_call
def list_members(self) -> Dict[str, Dict[str, Any]]:
"""Return the members on the service.
# Returned values
A dictionary. The keys are the canonical IDs and the values are
the representations of a user for the service.
"""
return {
self.get_canonical_member_id(u): self.get_user(u['id'])
for u in self.list_users()
}
@api_call
def get_member(self, member_id: str) -> Dict[str, Any]:
"""Return details on user.
# Required parameters
- member_id: a string
`member_id` is the canonical member ID.
# Returned value
The representation of the user for the service, which is
service-specific.
"""
return self.get_user(self.get_internal_member_id(member_id))
class Okta(clients.Okta, Utility):
"""Abstract base _Okta_ class.
Provides a default implementation for the following #::Utility
method:
- `__init__()`
The following environment variables must exist:
- OKTA_URL: a string
- OKTA_TOKEN: a string
The `OKTA_URL` entry refers to the API entry point:
https://okta.example.com
"""
def __init__(self) -> None:
url = _get_credential('OKTA_URL')
token = _get_credential('OKTA_TOKEN')
super().__init__(url, token) | zabel-elements | /zabel_elements-1.15.0-py3-none-any.whl/zabel/elements/images/__init__.py | __init__.py |
import yaml
BOILERPLATE = {
"swagger": "2.0",
"openapi": "3.0.3",
"info": {"title": "Zabel", "version": "0.10.0"},
"paths": {
"/api/": {
"get": {
"description": "get available API versions",
"consumes": ["application/json",],
"produces": ["application/json",],
"schemes": ["https"],
"tags": ["core"],
"operationId": "getCoreAPIVersions",
"responses": {
"200": {"description": "OK",},
"401": {"description": "Unauthorized"},
},
}
},
"/api/v1/": {
"get": {
"description": "get available resources",
"consumes": ["application/json",],
"produces": ["application/json",],
"schemes": ["https"],
"tags": ["core_v1"],
"operationId": "getCoreV1APIResources",
"responses": {
"200": {"description": "OK",},
"401": {"description": "Unauthorized"},
},
}
},
},
}
BASE_GROUP = {
"description": "get information of a group",
"consumes": ["application/json",],
"produces": ["application/json",],
"schemes": ["https"],
"tags": ["{group}"],
"operationId": "get{Group}APIGroup",
"responses": {
"200": {"description": "OK",},
"401": {"description": "Unauthorized"},
},
}
BASE_RESOURCES = {
"description": "get available resources",
"consumes": ["application/json",],
"produces": ["application/json",],
"schemes": ["https"],
"tags": ["{group}_{version}"],
"operationId": "get{Group}{Version}APIResources",
"responses": {
"200": {"description": "OK",},
"401": {"description": "Unauthorized"},
},
}
BASE_CREATE_PARAMETERS = [
{"name": "body", "in": "body", "required": True,},
{
"uniqueItems": True,
"type": "string",
"description": "When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed",
"name": "dryRun",
"in": "query",
},
{
"uniqueItems": True,
"type": "string",
"description": "fieldManager is a name associated with the actor or entity that is making these changes. The value must be less than or 128 characters long, and only contain printable characters, as defined by https://golang.org/pkg/unicode/#IsPrint.",
"name": "fieldManager",
"in": "query",
},
]
BASE_LIST_PARAMETERS = [
{
"uniqueItems": True,
"type": "string",
"description": "The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\".\n\nThis field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications.",
"name": "continue",
"in": "query",
},
{
"uniqueItems": True,
"type": "string",
"description": "A selector to restrict the list of returned objects by their fields. Defaults to everything.",
"name": "fieldSelector",
"in": "query",
},
{
"uniqueItems": True,
"type": "string",
"description": "A selector to restrict the list of returned objects by their labels. Defaults to everything.",
"name": "labelSelector",
"in": "query",
},
{
"uniqueItems": True,
"type": "integer",
"description": "limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true.\n\nThe server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned.",
"name": "limit",
"in": "query",
},
{
"uniqueItems": True,
"type": "string",
"description": "When specified with a watch call, shows changes that occur after that particular version of a resource. Defaults to changes from the beginning of history. When specified for list: - if unset, then the result is returned from remote storage based on quorum-read flag; - if it's 0, then we simply return what we currently have in cache, no guarantee; - if set to non zero, then the result is at least as fresh as given rv.",
"name": "resourceVersion",
"in": "query",
},
{
"uniqueItems": True,
"type": "integer",
"description": "Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity.",
"name": "timeoutSeconds",
"in": "query",
},
{
"uniqueItems": True,
"type": "boolean",
"description": "Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion.",
"name": "watch",
"in": "query",
},
]
VERB_OPERATION = {
'list': 'get',
'create': 'post',
'deletecollection': 'delete',
'delete': 'delete',
'get': 'get',
'patch': 'patch',
'update': 'put',
}
GENERIC_VERBS = {'list', 'create', 'deletecollection'}
SPECIFIC_VERBS = {'delete', 'get', 'patch', 'update'}
PREFIX = b'/registry/apiregistration.k8s.io/apiservices/'
def _emit_generics(resource, context):
return {
VERB_OPERATION[verb]: _emit_operation(verb, resource, context)
for verb in GENERIC_VERBS & set(resource['verbs'])
}
def _emit_specifics(resource, context):
return {
VERB_OPERATION[verb]: _emit_operation(verb, resource, context)
for verb in SPECIFIC_VERBS & set(resource['verbs'])
}
def _emit_operation(verb, resource, context):
namespaced = 'Namespaced' if resource.get('namespaced') else ''
return {
'description': f'{verb} objects of kind {resource["kind"]}',
'consumes': ['*/*'],
'produces': ['application/json'],
'tags': [context['tag']],
'operationId': f'{verb}{context["baseoperation"]}{namespaced}{resource["kind"]}',
'responses': {
'200': {'description': 'OK'},
'401': {'description': 'Unauthorized'},
},
}
def _emit_subresource(root, resource, context):
res, sub = resource['name'].split('/')
if resource.get('namespaced'):
path = f'{root}/namespaces/{{namespace}}/{res}/{{name}}/{sub}'
else:
path = f'{root}/{res}/{{name}}/{sub}'
verbs = set(resource['verbs'])
if GENERIC_VERBS & verbs:
raise ValueError('oops, got generic verb for subresource')
BOILERPLATE['paths'][path] = _emit_specifics(resource, context)
def _emit_resource(root, resource, context):
if resource.get('namespaced'):
path = f'{root}/namespaces/{{namespace}}/{resource["name"]}'
else:
path = f'{root}/{resource["name"]}'
verbs = set(resource['verbs'])
if GENERIC_VERBS & verbs:
BOILERPLATE['paths'][path] = _emit_generics(resource, context)
if SPECIFIC_VERBS & verbs:
BOILERPLATE['paths'][f'{path}/{{name}}'] = _emit_specifics(
resource, context
)
def _ensure_emit_group(groupversion, context):
group, _ = groupversion.split('/')
key = f'/apis/{group}/'
if key not in BOILERPLATE['paths']:
name = group.split('.')[0] if group.endswith('.k8s.io') else group
information = BASE_GROUP.copy()
information['tags'] = [name.lower()]
information['operationId'] = f'get{context["baseoperation"]}ApiGroup'
BOILERPLATE['paths'][key] = {'get': information}
def _emit_group_version(root, context):
resources = BASE_RESOURCES.copy()
resources['tags'] = [context['tag']]
resources['operationId'] = f'get{context["baseoperation"]}APIResources'
BOILERPLATE['paths'][f'{root}/'] = {'get': resources}
def _make_tag(groupversion: str) -> str:
"""Make tag corresponding to groupversion."""
if '/' in groupversion:
group, version = groupversion.split('/')
splits = group.split('.')
lhs = splits[0].lower() + ''.join(s.title() for s in splits[1:])
return f'{lhs}_{version.lower()}'
return f'core_{groupversion.lower()}'
def _make_operationname(groupversion: str) -> str:
"""Make CamelCase base operation name."""
if '/' in groupversion:
group, version = groupversion.split('/')
if group.endswith('.k8s.io'):
group = group[:-7]
return ''.join(s.title() for s in group.split('.')) + version.title()
return f'Core{groupversion.title()}'
def get_openapi(cluster):
"""Generate a JSON swagger for the specified cluster.
Entries will be generated for each declared apiservices.
"""
apis = [yaml.safe_load(r) for r, _ in cluster.etcd.get_prefix(PREFIX)]
for api in apis:
groupversion = api['groupVersion']
context = {
'tag': _make_tag(groupversion),
'baseoperation': _make_operationname(groupversion),
}
if '/' in groupversion:
root = f'/apis/{groupversion}'
_ensure_emit_group(groupversion, context)
_emit_group_version(root, context)
else:
root = f'/api/{groupversion}'
for resource in api['resources']:
if '/' in resource['name']:
_emit_subresource(root, resource, context)
else:
_emit_resource(root, resource, context)
return BOILERPLATE | zabel-fabric | /zabel_fabric-0.16.1-py3-none-any.whl/zabel/fabric/standalone/openapi.py | openapi.py |
from typing import Any, Callable, Dict, Iterator, List, Optional, Tuple, Union
from datetime import datetime
import os
import pickle
import queue
import threading
########################################################################
########################################################################
# VData: data (bytes) with an attached revision number (int)
# HVData: when a key is deleted, its successive values (list of VData)
# with an attached revision number (int)
# KVData: the 'life' of a key, with the list of changes of its current
# incarnation (list of VData), as well as a list of its previous
# incarnations (list of HVData).
# Finding the value of a key at a given time (revision) is as follows:
#
# If the revision is equal or higher than the revision of the initial
# current incarnation value, the value is the value of the change that
# has a revision that is lesser or equal to the desired revision.
#
# If the revision predates the revision of the initial current
# incarnation value, iterate over the previous incarnations, in
# decreasing order. If the revision is higher or equal than the
# revision attached to the previous incarnation, the key had no value
# at the revision. Otherwise proceed as in the current incarnation
# step.
#
# If no value is found, the key had no value at this revision.
VData = Tuple[bytes, int] # data, revision
HVData = Tuple[List[VData], int] # changes, revision
KVData = Tuple[List[VData], List[HVData]] # changes, history
# Helpers
VALUE = 0
REVISION = 1
def _current(kvd: KVData, field: int) -> Any:
return kvd[0][-1][field]
def _initial(kvd: KVData, field: int) -> Any:
return kvd[0][0][field]
def _increment_last_byte(byte_string: bytes) -> bytes:
array = bytearray(byte_string)
array[-1] = array[-1] + 1
return bytes(array)
def _to_bytes(maybe_bytestring: Union[str, bytes]) -> bytes:
"""Encode string to bytes.
Convenience function to do a simple encode('utf-8') if the input is not
already bytes. Returns the data unmodified if the input is bytes.
"""
if isinstance(maybe_bytestring, bytes):
return maybe_bytestring
return maybe_bytestring.encode('utf-8')
# Events classes and helpers
class Event:
"""An event class for watchers.
Do not use this class directly, use one of its subclass, #PutEvent
and #DeleteEvent.
"""
def __init__(
self, key: bytes, value: Optional[bytes], prev_value: Optional[bytes]
) -> None:
self.key = key
self.value = value
self.prev_value = prev_value
def __str__(self) -> str:
return f'{self.__class__.__name__}: {self.key!r}, {self.value!r}'
def __repr__(self) -> str:
return f'<{self.__class__.__name__}: {self.key!r}, {self.value!r}>'
class PutEvent(Event):
"""Key creation or modification event."""
class DeleteEvent(Event):
"""Key deletion or expiration event."""
def _events_notifier(
event_queue: queue.Queue,
watchers: List[Tuple[bytes, bytes, Callable[..., None]]],
) -> None:
while True:
event = event_queue.get()
for watcher in watchers:
if not watcher:
continue
range_start, range_end, callback = watcher
key = _to_bytes(event.key)
if range_end is None:
if key == range_start:
callback(event)
elif range_start <= key < range_end:
callback(event)
# Metadata class
class KVMetadata:
"""A container for key metadata."""
def __init__(self, key: bytes, kv: KVData) -> None:
self.key = key
self.create_revision = _initial(kv, REVISION)
self.mod_revision = _current(kv, REVISION)
self.version = len(kv[0])
self.lease_id = None
def __str__(self) -> str:
return (
f'{self.__class__.__name__}: {self.key!r}, version={self.version}'
)
def __repr__(self) -> str:
return f'<{self.__class__.__name__}: {self.key!r}, version={self.version}>'
# EtcdClient class
WatcherEntry = Tuple[bytes, Optional[bytes], Callable[..., None]]
class EtcdClient:
"""An etcd-compatible implementation.
The public API it provides is a strict subset of the API offered by
the **python3-etcd** library. Switching to it should be seamless.
(The opposite is not true, as not all features are implemented.)
`sort_order` is one of `'ascend`', `'descend'` or None.
`sort_target` is one of `'key'`, `'version'`, `'create'`, `'mod'`,
or `'value'`.
Instances of this class can act as context managers:
```python
with EtcdClient(...) as etcd:
...
```
"""
store: Dict[bytes, KVData]
revisions: Dict[int, datetime]
revision: int
def __init__(self, host: str = 'localhost', port: int = 2379):
"""..."""
self._filename = f'{host}_{port}.pkl'
if os.path.isfile(self._filename):
with open(self._filename, 'rb') as f:
self.revision, self.store, self.revisions = pickle.load(f)
else:
self.revision, self.store, self.revisions = 1, {}, {}
self.watchers: List[Optional[WatcherEntry]] = []
self.event_queue = queue.Queue()
self.event_thread = threading.Thread(
target=_events_notifier,
args=[self.event_queue, self.watchers],
daemon=True,
)
self.event_thread.start()
def __str__(self) -> str:
return f'{self.__class__.__name__}: {self._filename}'
def __repr__(self) -> str:
return f'<{self.__class__.__name__}: {self._filename!r}>'
## Context manager helpers
def __enter__(self) -> 'EtcdClient':
return self
def __exit__(self, *args: Any) -> None:
self.close()
## Revision helpers
def _new_revision(self) -> int:
self.revision += 1
self.revisions[self.revision] = datetime.now()
return self.revision
## Public API
def close(self) -> None:
"""Snapshot and close the database."""
self.snapshot()
def snapshot(self, filename: Optional[str] = None) -> None:
"""Snapshot the database.
# Optional parameters
- filename: a non-empty string or None (None by default)
"""
with open(filename or self._filename, 'wb') as f:
pickle.dump(
(self.revision, self.store, self.revisions),
f,
pickle.HIGHEST_PROTOCOL,
)
def get(self, key: bytes) -> Tuple[Optional[bytes], Optional[KVMetadata]]:
"""Get the value of a key.
# Required parameters
- key: a non-empty bytes string
# Returned value
A (value, metadata) tuple. If `key` is not present, returns
(None, None).
"""
key = _to_bytes(key)
if key not in self.store or not self.store[key][0]:
return None, None
kvb = self.store[key]
return _current(kvb, VALUE), KVMetadata(key, kvb)
def get_prefix(
self,
key_prefix: bytes,
sort_order: Optional[str] = None,
sort_target: str = 'key',
) -> Iterator[Tuple[bytes, KVMetadata]]:
"""Get a range of keys with a prefix.
# Required parameters
- key_prefix: a non-empty bytes string
# Optional parameters
- sort_target: a non-empty string or None (None by default)
- sort_target: a non-empty string (`'key'` by default)
# Returned value
A sequence of (value, metadata) tuples.
"""
key_prefix = _to_bytes(key_prefix)
return self.get_range(
key_prefix,
_increment_last_byte(key_prefix),
sort_order,
sort_target,
)
def get_range(
self,
range_start: bytes,
range_end: bytes,
sort_order: Optional[str] = None,
sort_target: str = 'key',
) -> Iterator[Tuple[bytes, KVMetadata]]:
"""Get a range of keys.
# Required parameters
- range_start: a non-empty bytes string
- range_end: a non-empty bytes string
# Returned value
A sequence of (value, metadata) tuples.
"""
keys = [
k
for k in self.store
if range_start <= k < range_end and self.store[k][0]
]
for k in keys:
kvb = self.store[k]
yield _current(kvb, VALUE), KVMetadata(k, kvb)
def put(
self, key: bytes, value: bytes, lease: int = 0, prev_kv: bool = False
) -> Optional[bytes]:
"""Save a value.
# Required parameters
- key: a non-empty bytes string
- value: a bytes string
# Optional parameters
- lease: an integer (0 by default)
- prev_kv: a boolean (False by default)
"""
key = _to_bytes(key)
value = _to_bytes(value)
pair = (value, self._new_revision())
kvb = self.store.get(key, ([], []))
prev_value = _current(kvb, VALUE) if kvb[0] else None
kvb[0].append(pair)
self.store[key] = kvb
event = PutEvent(key, value, prev_value)
self.event_queue.put(event)
return prev_value if prev_kv else None
def delete(self, key: bytes, prev_kv: bool = False) -> bool:
"""Delete a single key.
# Required parameters
- key: a non-empty bytes string
# Optional parameters
- prev_kv: a boolean (False by default)
# Returned values
A boolean. True if the deletion was successful, False
otherwise.
"""
key = _to_bytes(key)
if key not in self.store or not self.store[key][0]:
return False
kvb = self.store[key]
pair = (kvb[0], self._new_revision())
self.store[key] = ([], kvb[1] + [pair])
event = DeleteEvent(key, None, _current(kvb, VALUE))
self.event_queue.put(event)
return True
def delete_prefix(self, prefix: bytes) -> int:
"""Delete a range of keys with a prefix.
The operation is atomic, in the sense that all deleted keys are
deleted at the same revision.
# Required parameters
- prefix: a non-empty bytes string
# Returned value
An integer, the number of deleted keys.
"""
prefix = _to_bytes(prefix)
keys = [
k for k in self.store if k.startswith(prefix) and self.store[k][0]
]
if not keys:
return 0
_revision = self._new_revision()
values = []
for k in keys:
kvb = self.store[k]
values.append(_current(kvb, VALUE))
self.store[k] = ([], kvb[1] + [(kvb[0], _revision)])
for k, value in zip(keys, values):
self.event_queue.put(DeleteEvent(k, None, value))
return len(keys)
def replace(
self, key: bytes, initial_value: bytes, new_value: bytes
) -> bool:
"""Atomically replace the value of a key with a new value.
# Required parameters
- key: a non-empty bytes string
- initial_value: a bytes string
- new_value: a bytes string
# Returned value
A boolean. True if the replace operation was successful, False
otherwise.
"""
if self.get(key)[0] == initial_value:
self.put(key, new_value)
return True
return False
def add_watch_callback(
self, key: bytes, callback: Callable[..., None], **kwargs: Any
) -> int:
"""Watch a key or range of keys and call a callback on every event.
# Required parameters
- key: a non-empty bytes string
- callback: a function
# Returned value
An integer. It can be used to cancel the watch. Refer to
#cancel_watch() for more information.
"""
self.watchers.append(
(_to_bytes(key), kwargs.get('range_end', None), callback)
)
return len(self.watchers)
def add_watch_prefix_callback(
self, key_prefix: bytes, callback: Callable[..., None], **kwargs: Any
) -> int:
"""Watch a prefix and call a callback on every event.
# Required parameters
- key_prefix: a non-empty bytes string
- callback: a function
# Returned value
An integer. It can be used to cancel the watch. Refer to
#cancel_watch() for more information.
"""
kwargs['range_end'] = _increment_last_byte(_to_bytes(key_prefix))
return self.add_watch_callback(key_prefix, callback, **kwargs)
def cancel_watch(self, watch_id: int) -> None:
"""Stop watching a key or range of keys.
# Required parameters
- watch_id: an integer
# Returned value
None.
"""
self.watchers[watch_id] = None
def compact(self, revision: int) -> None:
"""Compact the event history up to a given revision.
App superseded keys with a revision less than the compaction
revision will be removed.
"""
raise NotImplementedError
def client(host: str = 'localhost', port: int = 2379) -> EtcdClient:
"""Create a client."""
return EtcdClient(host, port) | zabel-fabric | /zabel_fabric-0.16.1-py3-none-any.whl/zabel/fabric/standalone/etcd3.py | etcd3.py |
from typing import Any, Dict, Iterable, List, Mapping, Optional, Tuple, Union
import base64
import datetime
import hashlib
import json
import os
import pkgutil
import re
import uuid
import sys
import tempfile
import threading
import jwt
import yaml
import kubernetes
from bottle import default_app, request, response
from zabel.commons.interfaces import ApiService
from zabel.commons.servers import (
DEFAULT_HEADERS,
entrypoint,
make_status as status,
make_items,
)
from zabel.commons.utils import patch as patchdict
from .environ import EnvironLocal
from .openapi import get_openapi
from .etcd3 import client, Event, PutEvent
from .resolver import (
add as add_resolver,
get as get_resolver,
remove as remove_resolver,
)
########################################################################
## Settings
SA_KEY_FILES = ['/etc/zabel/zabel-publickey']
########################################################################
## Constants
Object = Dict[str, Any]
NAME_PATTERN = r'^[0-9a-zA-Z]+([0-9A-Za-z-_.]*[0-9a-zA-Z])?$'
LABEL_PATTERN = r'^([^/]+/)?([0-9A-Za-z-_.]{1,63})$'
DNS_LABEL_PATTERN = r'^(?![0-9]+$)(?!-)[a-z0-9-]{1,63}(?<!-)$'
KEY = r'[a-z0-9A-Z-_./]+'
VALUE = r'[a-z0-9A-Z-_.]+'
EQUAL_EXPR = rf'^({KEY})\s*([=!]?=)\s*({VALUE})(?:,|$)'
SET_EXPR = rf'^({KEY})\s+(in|notin)\s+\(({VALUE}(\s*,\s*{VALUE})*)\)(?:,|$)'
EXISTS_EXPR = rf'^{KEY}(?:,|$)'
NEXISTS_EXPR = rf'^!{KEY}(?:,|$)'
########################################################################
## Routes
API_ROUTE = '/api'
APIGROUP_ROUTE = '/apis'
APISERVICE_ROUTES = [
f'{API_ROUTE}/v1',
f'{APIGROUP_ROUTE}/{{group}}/{{version}}',
]
CLUSTER_ROUTES = [f'{root}/{{kind}}' for root in APISERVICE_ROUTES]
NAMESPACED_ROUTES = [
f'{root}/namespaces/{{namespace}}/{{kind}}' for root in APISERVICE_ROUTES
]
CREATE_ROUTES = CLUSTER_ROUTES + NAMESPACED_ROUTES
DIRECT_ROUTES = [f'{root}/{{name}}' for root in CREATE_ROUTES]
STATUS_ROUTES = [f'{root}/status' for root in DIRECT_ROUTES]
########################################################################
## Keys templates
APISERVICE_PREFIX = b'/registry/apiregistration.k8s.io/apiservices/'
INGRESS_PREFIX = b'/registry/ingresses/'
DEPLOY_PREFIX = b'/registry/deployments/'
POD_PREFIX = b'/registry/pods/'
SVC_PREFIX = b'/registry/services/'
CRD_PREFIX = b'/registry/customresourcedefinitions/'
DEFAULT_NAMESPACE_KEY = b'/registry/namespaces/default'
NAMESPACED_KEY_TEMPLATE = '/registry/{resource}/{namespace}/{name}'
NAMESPACED_PREFIX_TEMPLATE = '/registry/{resource}/{namespace}/'
CLUSTER_KEY_TEMPLATE = '/registry/{resource}/{name}'
CLUSTER_PREFIX_TEMPLATE = '/registry/{resource}/'
########################################################################
## Bootstrap
APISERVICE_KEY_TEMPLATE = (
'/registry/apiregistration.k8s.io/apiservices/{version}.{group}'
)
APISERVICE_TEMPLATE = '''{{
"kind": "APIResourceList",
"apiVersion": "v1",
"groupVersion": "{group}/{version}",
"resources": []
}}'''
DEFAULT_NAMESPACE_NAME = 'default'
DEFAULT_NAMESPACE = {
'apiVersion': 'v1',
'kind': 'Namespace',
'metadata': {'name': DEFAULT_NAMESPACE_NAME},
}
########################################################################
## Helpers
## Validity checking
def _generate_hash(value: Any) -> str:
manifest = hashlib.sha256()
manifest.update(bytes(json.dumps(value), 'utf-8'))
return manifest.hexdigest()[:10]
def _is_dns_label(value: Any) -> bool:
return (
isinstance(value, str)
and re.match(DNS_LABEL_PATTERN, value) is not None
)
def _is_dns_domain(value: Any) -> bool:
if not isinstance(value, str) or len(value) > 253:
return False
return all(_is_dns_label(segment) for segment in value.split('.'))
def _is_label_key(value: Any) -> bool:
if not isinstance(value, str):
return False
match = re.match(LABEL_PATTERN, value)
if not match:
return False
prefix, name = match.groups()
if prefix and not _is_dns_domain(prefix[:-1]):
return False
return re.match(NAME_PATTERN, name) is not None
def _is_label_value(value: Any) -> bool:
if not isinstance(value, str):
return False
return len(value) <= 63 and re.match(NAME_PATTERN, value) is not None
## Selectors helpers
def _split_exprs(exprs: str) -> List[str]:
"""Split a comma-separated list of expressions.
# Required parameters
- exprs: a string
# Returned value
A (possibly empty) list of _expressions_. An expression is a
string, stripped.
"""
result = []
while exprs:
match = re.match(SET_EXPR, exprs)
if not match:
match = re.match(EQUAL_EXPR, exprs)
if not match:
match = re.match(EXISTS_EXPR, exprs)
if not match:
match = re.match(NEXISTS_EXPR, exprs)
if not match:
raise ValueError(f'Invalid expression {exprs}')
result.append(exprs[: match.end()].strip())
exprs = exprs[match.end() :].strip()
return result
def _resolve_path(path: str, obj: Object) -> Tuple[bool, Optional[str]]:
def inner(items, obj) -> Tuple[bool, Optional[str]]:
head, rest = items[0], items[1:]
if head in obj:
return (True, obj[head]) if not rest else inner(rest, obj[head])
return False, None
return inner(path.split('.'), obj)
def _evaluate_fields(req: str, obj: Object) -> bool:
if req == '':
return True
if re.match(EXISTS_EXPR, req):
return _resolve_path(req, obj)[0]
if re.match(NEXISTS_EXPR, req):
return not _resolve_path(req[1:], obj)[0]
expr = re.match(SET_EXPR, req)
if expr:
key, ope, list_, _ = expr.groups()
found, value = _resolve_path(key, obj)
if found:
values = [v.strip() for v in list_.split(',')]
if ope == 'in':
return value in values
return value not in values
return ope == 'notin'
expr = re.match(EQUAL_EXPR, req)
if expr is None:
raise ValueError(f'Invalid expression {req}.')
key, ope, expected = expr.groups()
found, value = _resolve_path(key, obj)
if found:
if ope in ('=', '=='):
return value == expected
return value != expected
return ope == '!='
def _evaluate(req: str, labels: Mapping[str, str]) -> bool:
"""Evaluate whether req matches labels.
# Required parameters
- req: a string
- labels: a dictionary
# Returned value
A boolean. True if `req` is satisfied by `labels`, False otherwise.
# Raised exceptions
A _ValueError_ exception is raised if `req` is not a valid
expression.
"""
if req == '':
return True
if re.match(EXISTS_EXPR, req):
return req in labels
if re.match(NEXISTS_EXPR, req):
return req[1:] not in labels
expr = re.match(SET_EXPR, req)
if expr:
key, ope, list_, _ = expr.groups()
if key in labels:
values = [v.strip() for v in list_.split(',')]
if ope == 'in':
return labels[key] in values
return labels[key] not in values
return ope == 'notin'
expr = re.match(EQUAL_EXPR, req)
if expr is None:
raise ValueError(f'Invalid expression {req}.')
key, ope, value = expr.groups()
if key in labels:
if ope in ('=', '=='):
return labels[key] == value
return labels[key] != value
return ope == '!='
def _match_field_selector(obj: Object, selector: str) -> bool:
"""Return True if the object matches the selector."""
return all(_evaluate_fields(sel, obj) for sel in _split_exprs(selector))
def _match_label_selector(obj: Object, selector: str) -> bool:
"""Return True if the service matches the selector.
An empty selector always matches.
The complete selector feature has been implemented. `selector` is
of form:
expr[,expr]*
where `expr` is one of `key`, `!key`, or `key op value`, with
`op` being one of `=`, `==`, or `!=`. The
`key in (value[, value...])` and `key notin (value[, value...])`
set-based requirements are also implemented.
# Required parameters
- obj: a Definition (a dictionary)
- selector: a string
# Returned value
A boolean.
"""
return all(_evaluate(sel, obj) for sel in _split_exprs(selector))
def _read_key_files(files: Iterable[str]) -> List[str]:
keys = []
for keyfile in files:
with open(keyfile) as key:
keys.append(key.read())
return keys
def _patch_kubernetes_incluster_config():
tmpfile = os.path.join(tempfile.gettempdir(), str(uuid.uuid4()))
with open(tmpfile, 'w') as f:
f.write(
'''apiVersion: v1
kind: Config
clusters:
- cluster:
insecure-skip-tls-verify: true
server: http://localhost:8080
name: local
current-context: local
contexts:
- context:
cluster: local
namespace: default
user: ""
name: local
users: []
'''
)
kubernetes.config.load_incluster_config = lambda *args: kubernetes.config.load_kube_config(
config_file=tmpfile
)
class Cluster(ApiService):
"""Clusters.
Clusters are collections of _objects_.
Each object has a name, a definition and a status. Most object are
attached to a _namespace_.
Namespaces are objects too, but they are attached to a cluster, not
to another namespace. In all other aspects, they are objects.
All objects are stored in an etcd database as JSON strings.
"""
def __init__(
self,
host: str = 'localhost',
port: int = 8080,
service_account_key_file: Iterable[str] = SA_KEY_FILES,
) -> None:
"""Create a new cluster.
The cluster will be bootstrapped if applicable.
A bootstrapped cluster has a default namespace (`'default'`).
Clusters persist their state.
After initialization, services are started and ready.
# TODO
Should not start services but instead have a `run()` method that
starts the cluster and serves its declared APIs.
"""
_patch_kubernetes_incluster_config()
if not isinstance(os.environ, EnvironLocal):
os.environ = EnvironLocal()
self.api_server = None
self.host = host
self.port = port
self._publickeys = _read_key_files(service_account_key_file)
self.deployments = {}
self.pods = {}
self.services = {}
self.start()
def start(self):
"""Start cluster.
If the cluster has never been initialized, it will be
bootstrapped.
Services, watchers, and APIs are initialized and started.
This method does not return.
"""
with client() as self.etcd:
self.start_apiserver()
self.register_apiservice(self)
self.start_watchers()
if self.etcd.get(DEFAULT_NAMESPACE_KEY) == (None, None):
self.bootstrap()
self.start_deployments()
# self.start_services()
# self.start_pods()
# self.start_ingress()
def bootstrap(self) -> None:
"""Bootstrap cluster.
Creates the initial resources, as defined in `coreapis.yaml`.
Also creates a `'default'` namespace.
If run on an already-initialized cluster, it will reset the
default resources definitions.
The `etcd` service is expected to be up and running.
"""
for apiservice in yaml.safe_load_all(
pkgutil.get_data('zabel.fabric', 'standalone/coreapis.yaml')
):
groupversion = apiservice['groupVersion']
if '/' in groupversion:
group, version = groupversion.split('/')
else:
group, version = '', groupversion
self.etcd.put(
APISERVICE_KEY_TEMPLATE.format(group=group, version=version),
json.dumps(apiservice),
)
try:
self.create('namespaces', DEFAULT_NAMESPACE)
except ValueError:
pass
# DNS proxy
def dns_proxy(
self, host: str, port: int, namespace: str
) -> Tuple[str, int]:
"""Resolve (host, port) in namespace.
Services expose a name and a port and redirect the requests they
receive to another address (and possibly another port).
A service name is either a short name, a qualified name, or a
fully qualified name:
{my-service}
{my-service}.{my-ns}
{my-service}.{my-ns}.svc.cluster.local
"""
if _is_dns_label(host):
if (f'{host}.{namespace}', port) in self.services:
return self.services[(f'{host}.{namespace}', port)]
elif (host, port) in self.services:
return self.services[(host, port)]
return (host, port)
# Controllers
def start_apiserver(self) -> None:
"""Start the API server.
The API server is started in a separate thread, running in
normal mode (i.e., not in daemon mode).
There is only one API server per cluster.
"""
self.api_server = threading.Thread(
target=default_app().run,
kwargs={'host': self.host, 'port': self.port},
)
self.api_server.start()
def register_apiservice(self, srv: ApiService) -> None:
"""Register an API service.
The API service is connected to the API server, and receive the
requests it can process.
Any number of API services can be connected to the API server.
In order to 'deregister' an API service, rebuild the router:
app = bottle.app[0]
# app.routes contains the routes list
# app.router contains the router
# must rebuild routes (and then router), in order to minimize
# downtime (simple switch)
for all apiservices:
routes += ...
router = bottle.Router()
for route in routes:
router.add(route.rule, route.method, route, name=route.name)
app.router = router
app.routes = routes
"""
def wrap(handler, rbac: bool):
def inner(*args, **kwargs):
for header, value in DEFAULT_HEADERS.items():
response.headers[header] = value
if rbac:
try:
user = self._ensure_authn()
self._ensure_authz(user)
except ValueError as err:
resp = err.args[0]
response.status = resp['code']
return resp
self._ensure_admission()
if request.query.limit:
kwargs['limit'] = request.query.limit
if request.query.labelSelector:
kwargs['labelselector'] = request.query.labelSelector
if request.query.fieldSelector:
kwargs['fieldselector'] = request.query.fieldSelector
if request.json:
kwargs['body'] = request.json
elif request.body:
body = request.body.read()
if body:
kwargs['body'] = yaml.safe_load(body)
try:
result = json.dumps(handler(*args, **kwargs))
return result
except ValueError as err:
resp = err.args[0]
response.status = resp['code']
return resp
return inner
for name in dir(srv):
method = getattr(srv, name)
for endpoint in getattr(method, 'entrypoint routes', []):
default_app().route(
path=endpoint['path'].replace('{', '<').replace('}', '>'),
method=endpoint['methods'],
callback=wrap(method, endpoint['rbac']),
)
def start_watchers(self) -> None:
"""Start watches of interest."""
self.etcd.add_watch_prefix_callback(DEPLOY_PREFIX, self.handle_deploy)
self.etcd.add_watch_prefix_callback(CRD_PREFIX, self.handle_crd)
self.etcd.add_watch_prefix_callback(POD_PREFIX, self.handle_pod)
self.etcd.add_watch_prefix_callback(
INGRESS_PREFIX, self.handle_ingress
)
self.etcd.add_watch_prefix_callback(SVC_PREFIX, self.handle_svc)
def start_deployments(self) -> None:
"""Start deployments.
Ensure deployments are up. Used at cluster startup.
"""
for namespace in self.list_allnamespaces('namespaces')['items']:
_ns = namespace['metadata']['name']
_deployments = self.list_namespaced('deployments', namespace=_ns)[
'items'
]
for deployment in _deployments:
name = deployment['metadata']['name']
fullname = f'{name}.{_ns}'
if fullname not in self.deployments:
manifest = deployment['spec']['template']
pod_template_hash = _generate_hash(manifest)
pod_name = f'{name}-{pod_template_hash}'
try:
self.delete('pods', pod_name, namespace=_ns)
except ValueError:
pass
manifest['kind'] = 'Pod'
manifest['apiVersion'] = 'v1'
metadata = manifest['metadata']
metadata['name'] = pod_name
metadata['namespace'] = _ns
metadata['labels']['pod-template-hash'] = pod_template_hash
self.create('pods', manifest, namespace=_ns)
self.deployments[fullname] = pod_template_hash
def handle_svc(self, event: Event) -> None:
"""Handle watch events for Services."""
if isinstance(event, PutEvent):
manifest = json.loads(event.value)
namespace = manifest['metadata']['namespace']
name = manifest['metadata']['name']
for port in manifest['spec']['ports']:
self.services[(f'{name}.{namespace}', port['port'])] = (
'localhost',
port.get('targetPort', port['port']),
)
else:
manifest = json.loads(event.prev_value)
namespace = manifest['metadata']['namespace']
name = manifest['metadata']['name']
for port in manifest['spec']['ports']:
del self.services[(f'{name}.{namespace}', port['port'])]
def handle_ingress(self, event: Event) -> None:
"""Handle watch events for Ingress."""
if isinstance(event, PutEvent):
manifest = json.loads(event.value)
_ns = manifest['metadata']['namespace']
rules = manifest['spec']['rules']
http_paths = rules[0]['http']['paths']
srvname = http_paths[0]['backend']['serviceName']
path = http_paths[0]['path']
if not path.endswith('/'):
path += '/'
# find Service
_services = self.list_namespaced(
'services',
namespace=_ns,
fieldselector=f'metadata.name=={srvname}',
)['items']
if not _services:
... # wait for service to exist
_service = _services[0]
# find Pods
selector = ','.join(
f'{k}={v}' for k, v in _service['spec']['selector'].items()
)
_pods = self.list_namespaced(
'pods', namespace=_ns, labelselector=selector
)['items']
if not _pods:
... # wait for pod to exist
pod = self.pods[f'{_pods[0]["metadata"]["name"]}.{_ns}']
# mount
default_app().mount(path, pod.app)
def handle_crd(self, event: Event) -> None:
"""Handle watch events for CustomResourceDefinition."""
if isinstance(event, PutEvent):
manifest = json.loads(event.value)
spec, grp = manifest['spec'], manifest['spec']['group']
for ver in spec['versions']:
key = APISERVICE_KEY_TEMPLATE.format(
group=grp, version=ver['name']
)
val, _ = self.etcd.get(key)
if val is None:
val = APISERVICE_TEMPLATE.format(
group=grp, version=ver['name']
)
apiservice = json.loads(val)
for resource in apiservice['resources']:
if resource['name'] == spec['names']['plural']:
resource['singularName'] = spec['names']['singular']
resource['kind'] = spec['names']['kind']
# resource['shortNames'] = spec['names']['shortNames']
resource['namespaced'] = spec['scope'] == 'Namespaced'
break
else:
apiservice['resources'].append(
{
'name': spec['names']['plural'],
'singularName': spec['names']['singular'],
'kind': spec['names']['kind'],
# 'shortNames': spec['names']['shortNames'],
'namespaced': spec['scope'] == 'Namespaced',
'verbs': [
'create',
'delete',
'deletecollection',
'get',
'list',
'patch',
'update',
'watch',
],
}
)
self.etcd.put(key, json.dumps(apiservice))
else:
...
def handle_pod(self, event: Event) -> None:
"""Handle Pod creation events."""
def runner(image, env, args):
ident = threading.current_thread().ident
for var, value in env.items():
os.environ[var] = value
os.environ['HOSTNAME'] = name
try:
pod = image()
self.pods[f'{name}.{_ns}'] = pod
add_resolver(
ident, lambda host, port: self.dns_proxy(host, port, _ns)
)
self.update_status('pods', name, {'phase': 'Running'}, _ns)
pod.run(*args)
self.update_status('pods', name, {'phase': 'Succeeded'}, _ns)
except Exception as err:
self.update_status('pods', name, {'phase': 'Failed'}, _ns)
print(err)
finally:
if get_resolver(ident):
remove_resolver(ident)
del self.pods[f'{name}.{_ns}']
if isinstance(event, PutEvent):
if event.prev_value:
return
manifest = json.loads(event.value)
name = manifest['metadata']['name']
_ns = manifest['metadata']['namespace']
self.update_status('pods', name, {'phase': 'Pending'}, _ns)
try:
threading.Thread(
target=runner, args=self._make_pod(manifest)
).start()
except Exception as err:
self.update_status('pods', name, {'phase': 'Failed'}, _ns)
print(
f'Oops, something went wrong while starting pod {name} in namespace {_ns}: {err}'
)
def handle_deploy(self, event: Event) -> None:
"""Handle Deployment events.
"""
if isinstance(event, PutEvent):
deployment = json.loads(event.value)
name = deployment['metadata']['name']
_ns = deployment['metadata']['namespace']
if event.prev_value:
return
pod = deployment['spec']['template']
pod_template_hash = _generate_hash(pod)
pod['kind'] = 'Pod'
pod['apiVersion'] = 'v1'
metadata = pod['metadata']
metadata['name'] = f'{name}-{pod_template_hash}'
metadata['namespace'] = _ns
metadata['labels']['pod-template-hash'] = pod_template_hash
self.create('pods', pod, namespace=_ns)
self.deployments[f'{name}.{_ns}'] = pod_template_hash
# Helpers
def _get_env_value(
self, definition: Dict[str, Any], namespace: str
) -> Optional[Union[str, Dict[str, Any]]]:
if 'valueFrom' in definition:
ref = definition['valueFrom']['secretKeyRef']
secret = self.get('secrets', ref['name'], namespace=namespace)
return str(base64.b64decode(secret['data'][ref['key']]), 'utf-8')
return definition['value']
def _make_pod(self, pod: Object) -> Any:
_namespace = pod['metadata']['namespace']
_container = pod['spec']['containers'][0]
_modulename, _classname = _container['image'].rsplit('/', 1)
_modulename = _modulename.replace('/', '.')
_module = __import__(_modulename)
for _name in _modulename.split('.')[1:]:
_module = getattr(_module, _name)
_image = getattr(_module, _classname)
_env = {
definition['name']: self._get_env_value(definition, _namespace)
for definition in _container.get('env', [])
}
return _image, _env, _container.get('args', [])
def _ensure_isnamespace(self, name: str) -> None:
"""Ensure the specified name is a known namespace.
# Returned value
None
# Raised exception
A _ValueError_ exception is raised if `name` is not a known
namespace name.
"""
namespace, _ = self.etcd.get(
CLUSTER_KEY_TEMPLATE.format(resource='namespaces', name=name)
)
if namespace is None:
raise ValueError(status('NotFound', f'Namespace {name} not found'))
def _ensure_isplurals(self, name: str) -> str:
"""Ensure the specified name is known.
# Returned value
A string, the corresponding kind.
# Raised exceptions
A _ValueError_ exception is raised if `name` is not a known
plurals name.
"""
for resources, _ in self.etcd.get_prefix(APISERVICE_PREFIX):
definitions = json.loads(resources)['resources']
for resource in definitions:
if name == resource['name']:
return resource['kind']
raise ValueError(status('NotFound', f'{name} not found'))
def _get_key(self, kind: str, name: str, namespace: str) -> str:
if namespace is None:
return CLUSTER_KEY_TEMPLATE.format(resource=kind, name=name)
self._ensure_isnamespace(namespace)
return NAMESPACED_KEY_TEMPLATE.format(
resource=kind, namespace=namespace, name=name
)
def _ensure_authn(self) -> str:
"""Ensure the incoming request is authenticated.
If from localhost, assume the `'localhost'` identity.
If from somewhere else, use the subject value in the provided
token.
Raises a _ValueError_ exception if the token is missing or
invalid, with the 'Unauthorized' flag set.
"""
if request.remote_addr == '127.0.0.1':
return 'localhost'
authz = request.headers.get('Authorization')
if authz is None:
raise ValueError(status('Unauthorized', 'No Bearer token'))
parts = authz.split()
if parts[0].lower() != 'bearer' or len(parts) != 2:
raise ValueError(
status('Unauthorized', 'Invalid Authorization header')
)
try:
return jwt.decode(parts[1], self._publickeys[0])['sub']
except:
raise ValueError(status('Unauthorized', 'Invalid token'))
def _ensure_authz(self, user: str) -> None:
"""Ensure the incoming request is authorized.
Raises a _ValueError_ exception if the request is not
authorized, with the 'Forbidden' flag set.
"""
if user in ('servicedesk', 'localhost') or ':' in user:
return
raise ValueError(status('Forbidden', 'Operation not allowed'))
def _ensure_admission(self) -> None:
pass
# API
# generic
@entrypoint('/openapi/v2', methods=['GET'])
def discover_openapi(self, **kwargs: Any) -> Dict[str, Any]:
"""Return the cluster's OpenAPI definitions."""
return get_openapi(self)
@entrypoint(API_ROUTE, methods=['GET'])
def discover_api_versions(self, **kwargs: Any) -> Dict[str, Any]:
"""Describe cluster APIs."""
return {
'kind': 'APIVersions',
'versions': ['v1'],
'serverAddressByClientCIDRs': [
{'clientCIDR': '0.0.0.0/0', 'serverAddress': 'localhost:8080'}
],
}
@entrypoint(APIGROUP_ROUTE, methods=['GET'])
def discover_api_groups(self, **kwargs: Any) -> Object:
"""Describe available API groups."""
versions = {
yaml.safe_load(r)['groupVersion']
for r, _ in self.etcd.get_prefix(APISERVICE_PREFIX)
}
return {
'kind': 'APIGroupList',
'apiVersion': 'v1',
'groups': [
{
'name': version.split('/')[0],
'versions': [
{
'groupVersion': version,
'version': version.split('/')[1],
}
],
'preferredVersion': {
'groupVersion': version,
'version': version.split('/')[1],
},
}
for version in versions
if '/' in version
],
}
@entrypoint(APISERVICE_ROUTES, methods=['GET'])
def discover_api_resources(
self, group: str = '', version: str = 'v1', **kwargs: Any
) -> Dict[str, Any]:
"""Describe available API resources."""
apiservice, _ = self.etcd.get(
APISERVICE_KEY_TEMPLATE.format(group=group, version=version)
)
return yaml.safe_load(apiservice)
@entrypoint(CLUSTER_ROUTES, methods=['GET'])
def list_allnamespaces(
self,
kind: str,
labelselector: str = '',
fieldselector: str = '',
**kwargs: Any,
) -> Object:
"""Return a list of matching objects in all namespaces.
Also used to returl list of cluster-level resources.
# Required parameters
- kind: a non-empty string
# Optional parameters
- labelselector: a string (empty by default)
- fieldselector: a string (empty by default)
# Returned value
A dictionary with the following entries:
- apiVersion: a string
- kind: a string
- items: a possibly empty list of dictionaries
"""
_kind = self._ensure_isplurals(kind)
prefix = CLUSTER_PREFIX_TEMPLATE.format(resource=kind)
return make_items(
_kind,
[
json.loads(obj)
for obj, _ in self.etcd.get_prefix(prefix)
if _match_label_selector(json.loads(obj), labelselector)
and _match_field_selector(json.loads(obj), fieldselector)
],
)
@entrypoint(NAMESPACED_ROUTES, methods=['GET'])
def list_namespaced(
self,
kind: str,
namespace: str = DEFAULT_NAMESPACE_NAME,
labelselector: str = '',
fieldselector: str = '',
**kwargs: Any,
) -> Object:
"""Return a list of objects matching kind and selectors.
# Required parameters
- kind: a non-empty string
# Optional parameters
- namespace: a non-empty string (default namespace by default)
- labelselector: a string (empty by default)
- fieldselector: a string (empty by default)
# Returned value
A dictionary with the following entries:
- apiVersion: a string
- kind: a string
- items: a possibly empty list of dictionaries
"""
self._ensure_isnamespace(namespace)
_kind = self._ensure_isplurals(kind)
prefix = NAMESPACED_PREFIX_TEMPLATE.format(
resource=kind, namespace=namespace
)
return make_items(
_kind,
[
json.loads(obj)
for obj, _ in self.etcd.get_prefix(prefix)
if _match_label_selector(json.loads(obj), labelselector)
and _match_field_selector(json.loads(obj), fieldselector)
],
)
@entrypoint(DIRECT_ROUTES)
def get(
self,
kind: str,
name: str,
namespace: Optional[str] = None,
**kwargs: Any,
) -> Object:
"""Return the requested object.
# Required parameters
- kind: a non-empty string
- name: a non-empty string
# Optional parameters
- namespace: a non-empty string or None (None by default)
# Return value
An object (a dictionary)
"""
self._ensure_isplurals(kind)
obj, meta = self.etcd.get(self._get_key(kind, name, namespace))
if obj is None:
raise ValueError(status('NotFound', f'Object {name} not found'))
obj = json.loads(obj)
obj['metadata']['resourceVersion'] = str(meta.mod_revision)
return obj
@entrypoint(CREATE_ROUTES)
def create(
self,
kind: str,
body: Object,
namespace: Optional[str] = None,
**kwargs,
) -> Object:
"""Create a new object.
# Required parameters
- body: a dictionary
# Optional parameters
- namespace: a non-empty string or None (None by default)
If `namespace` is specified, it overrides the metadata.namespace
value in `body`.
# Returned value
The created object (a dictionary).
"""
_kind = self._ensure_isplurals(kind)
if body['kind'] != _kind:
raise ValueError(
status(
'Invalid', f'Mismatched kinds: {_kind} and {body["kind"]}'
)
)
metadata = body['metadata']
name = metadata['name']
key = self._get_key(kind, name, namespace)
item, _ = self.etcd.get(key)
if item is not None:
raise ValueError(
status('AlreadyExists', f'Object {name} already exists')
)
if namespace is not None:
metadata['namespace'] = namespace
metadata['creationTimestamp'] = datetime.datetime.now().isoformat()
metadata['uid'] = str(uuid.uuid1())
metadata['generation'] = 1
body['status'] = {}
self.etcd.put(key, json.dumps(body))
return body
@entrypoint(DIRECT_ROUTES)
def update(
self,
kind: str,
name: str,
body: Object,
namespace: Optional[str] = None,
**kwargs: Any,
) -> Object:
"""Replace an existing object.
# Required parameters
- kind: a non-empty string
- name: a non-empty string
- body: a dictionary
# Optional parameters
- namespace: a non-empty string or None (None by default)
# Returned value
An object (a dictionary).
"""
self._ensure_isplurals(kind)
key = self._get_key(kind, name, namespace)
obj, _ = self.etcd.get(key)
if obj is None:
raise ValueError(status('NotFound', f'Object {name} not found'))
obj = json.loads(obj)
if obj['metadata']['uid'] != body['metadata']['uid']:
raise ValueError(status('Conflict', 'uid does not match'))
if obj['metadata']['namespace'] != body['metadata']['namespace']:
raise ValueError(status('Conflict', 'namespace does not match'))
self.etcd.put(key, json.dumps(body))
return body
@entrypoint(DIRECT_ROUTES)
def patch(
self,
kind: str,
name: str,
body: Object,
namespace: Optional[str] = None,
**kwargs: Any,
) -> Object:
"""Patch an existing object.
# Required parameters
- kind: a non-empty string
- name: a non-empty string
- body: a dictionary
# Optional parameters
- namespace: a non-empty string or None (None by default)
If `namespace` is specified, it overrides the metadata.namespace
value in `body`.
# Returned value
The patched object (a dictionary).
"""
self._ensure_isplurals(kind)
if 'generation' in body['metadata']:
raise ValueError(status('Invalid', 'generation field in metadata'))
key = self._get_key(kind, name, namespace)
item, _ = self.etcd.get(key)
if item is None:
raise ValueError(status('NotFound', f'Object {name} not found'))
obj = patchdict(json.loads(item), body)
obj['metadata']['generation'] += 1
self.etcd.put(key, json.dumps(obj))
return obj
@entrypoint(DIRECT_ROUTES)
def delete(
self, kind: str, name: str, namespace: Optional[str] = None, **kwargs,
) -> Object:
"""Delete an existing object.
# Required parameters
- kind: a non-empty string
- name: a non-empty string
# Optional parameters
- namespace: a non-empty string or None (None by default)
If `namespace` is not specified, it will delete the object in
the default namespace.
# Returned value
The deleted object (a dictionary).
"""
self._ensure_isplurals(kind)
key = self._get_key(kind, name, namespace)
obj, _ = self.etcd.get(key)
if obj is None:
raise ValueError(status('NotFound', f'Object {name} not found'))
self.etcd.delete(key)
return json.loads(obj)
@entrypoint(STATUS_ROUTES, methods=['PATCH'])
def patch_status(
self,
kind: str,
name: str,
body: Object,
namespace: Optional[str] = None,
**kwargs: Any,
) -> Object:
pass
@entrypoint(STATUS_ROUTES, methods=['UPDATE'])
def update_status(
self,
kind: str,
name: str,
body: Object,
namespace: Optional[str] = None,
**kwargs: Any,
) -> Object:
"""Update an existing object status block."""
key = self._get_key(kind, name, namespace)
item, _ = self.etcd.get(key)
if item is None:
raise ValueError(status('NotFound', f'Object {name} not found'))
obj = json.loads(item)
obj['status'] = body
self.etcd.put(key, json.dumps(obj))
return obj
@entrypoint(STATUS_ROUTES, methods=['GET'])
def get_status(
self,
kind: str,
name: Object,
namespace: Optional[str] = None,
**kwargs: Any,
) -> Object:
pass | zabel-fabric | /zabel_fabric-0.16.1-py3-none-any.whl/zabel/fabric/standalone/__init__.py | __init__.py |
from typing import Any, Dict, Iterator, List, Optional
import json
from zabel.commons.exceptions import ApiError
from zabel.commons.utils import ensure_instance, ensure_nonemptystring
########################################################################
## Storage Interfaces
##
## Storage
## ObjectStorage
## CollectionStorage
class Storage:
"""Abstract base storage wrapper.
Declares the minimal set of features storage implementation must
provide:
- constructor (`__init__`)
The constructor will take one parameter, a dictionary. Its content
is implementation-dependent.
"""
# pylint: disable=too-few-public-methods
def __init__(self, configuration: Dict[str, Any]) -> None:
"""Initialize a new storage object.
# Required parameters
- `configuration`: a dictionary
The `configuration` dictionary content is
implementation-dependent.
"""
ensure_instance('configuration', dict)
self.configuration = configuration
class ObjectStorage(Storage):
"""Abstract object storage wrapper.
Declares the minimal set of features storage implementation must
provide:
- `read`
- `create`
- `update`
- `delete`
The data format is unspecified, and is implementation-dependent.
"""
def read(self) -> Any:
"""Return object from storage."""
raise NotImplementedError
def create(self, data: Any) -> None:
"""Create object in storage."""
raise NotImplementedError
def update(self, data: Any) -> None:
"""Update object in storage."""
raise NotImplementedError
def delete(self) -> None:
"""Remove object from storage."""
raise NotImplementedError
class CollectionStorage(Storage):
"""Abstract collection storage wrapper.
Declares the minimal set of features storage implementation must
provide:
- `list`
- `read`
- `create`
- `update`
- `delete`
The data format is unspecified, and is implementation-dependent.
"""
def list(self) -> List[Dict[str, Any]]:
"""Return list of items in storage.
# Returned value
A list. Each element in the list is a dictionary with at least
a `key` entry:
- `key`: a string
It may contain a `last_modified` entry too, if appropriate:
- `last_modified`: a datetime object
"""
raise NotImplementedError
def read(self, key: str) -> Any:
"""Return specified item from storage."""
raise NotImplementedError
def create(self, key: str, data: Any) -> None:
"""Create specified item in storage."""
raise NotImplementedError
def update(self, key: str, data: Any) -> None:
"""Update specified item in storage."""
raise NotImplementedError
def delete(self, key: str) -> None:
"""Remove specified item from storage."""
raise NotImplementedError
########################################################################
## AWS Storage
class AWSS3Storage(Storage):
"""A base Storage abstract class for AWS S3 classes.
Assumes the configuration dictionary contains the following entries:
```python
{
'storage': {
'bucket': 'a string'
}
}
```
"""
# pylint: disable=too-few-public-methods
def __init__(self, configuration: Dict[str, Any]) -> None:
"""Initialize a new AWSS3Storage object."""
super().__init__(configuration)
from boto3.resources.factory import ServiceResource
self._s3: Optional[ServiceResource] = None
# Helpers
def _get_bucket(self) -> Any:
"""Return the bucket."""
if self._s3 is None:
import boto3
self._s3 = boto3.resource('s3')
if self._s3 is None:
raise ApiError('AWS S3 service resource is None.')
return self._s3.Bucket(self.configuration['storage']['bucket'])
def _get_object(self, name: str) -> Any:
"""Return the object."""
return self._get_bucket().Object(name)
class AWSS3Bucket(AWSS3Storage, CollectionStorage):
"""Simple AWS S3 Storage class.
This class handles objects stored in a S3 bucket.
It uses the following configuration entry:
```python
{
'storage': {
'bucket': 'a string'
}
}
```
It must be an existing bucket name.
Objects are stored as JSON objects.
"""
def list(self) -> List[Dict[str, Any]]:
return [
{'key': item.key, 'last_modified': item.last_modified}
for item in self._get_bucket().objects.all()
]
def read(self, key: str) -> Dict[str, Any]:
ensure_nonemptystring('key')
obj = self._get_object(key)
return json.loads(obj.get()['Body'].read().decode('utf-8')) # type: ignore
def create(self, key: str, data: Dict[str, Any]) -> None:
ensure_nonemptystring('key')
ensure_instance('data', dict)
if self._has(key):
raise ApiError('Object %s already exist.' % key)
obj = self._get_object(key)
obj.put(Body=bytes(json.dumps(data, default=str), 'utf-8'))
def update(self, key: str, data: Dict[str, Any]) -> None:
ensure_nonemptystring('key')
ensure_instance('data', dict)
if not self._has(key):
raise ApiError('Object %s does not exist.' % key)
obj = self._get_object(key)
obj.put(Body=bytes(json.dumps(data, default=str), 'utf-8'))
def _has(self, key: str) -> bool:
_exists = True
import botocore
try:
self._get_object(key).load()
except botocore.exceptions.ClientError as error:
if error.response['Error']['Code'] == '404':
_exists = False
else:
raise
return _exists
class AWSS3Object(AWSS3Storage, ObjectStorage):
"""Simple AWS S3 Object Storage class.
This class handles objects stored in a S3 bucket.
It enhance the platform definition with the following entries:
```python
{
'storage': {
'bucket': 'a string',
'filename': 'a string'
}
}
```
It must be an existing bucket name, and file name is expected to
be an existing and valid JSON object.
The bucket may contain other objects, they will be ignored.
"""
def read(self) -> Dict[str, Any]:
"""Return stored object as JSON."""
obj = self._get_object(self.configuration['storage']['filename'])
return json.loads(obj.get()['Body'].read().decode('utf-8')) # type: ignore
def update(self, data: Dict[str, Any]) -> None:
"""Update stored object."""
obj = self._get_object(self.configuration['storage']['filename'])
obj.put(Body=bytes(json.dumps(data, default=str), 'utf-8'))
########################################################################
## Useful Helpers
class ManagedDict(Dict[str, Any]):
"""Simple wrapper for dictionaries.
It expects an _ObjectStorage_ object defined in its configuration
data.
Entries starting with an underscore ('_') are 'hidden', in that they
are not returned by the `__iter__` method and `__contains__` does
not see them (but they can be used to store values and are
persisted).
In other words, assuming `foo` is a managed dictionary:
```python
>>> foo['bar'] = 123
>>> foo['_bar'] = 456
>>> foo['bar'] # => 123
>>> foo['_bar'] # => 456
>>> 'bar' in foo # => True
>>> '_bar' in foo # => False
>>> keys = [k for k in foo] # => ['bar']
>>> sorted(foo) # => ['bar']
>>> # but
>>> len(foo) # => 2
>>> foo.keys() # => dict_keys(['bar', '_bar'])
>>> foo.items() # => dict_items([('bar', 123),
>>> # ('_bar', 456)])
>>> foo.values() # => dict_values([123, 456])
```
"""
def __init__(self, configuration: Dict[str, Any]) -> None:
"""Initialize a managed dict.
The initial content of the managed dictionary is read according
to `configuration`.
Changes are allowed during the life of the managed dictionary.
Use the `persist` method to save them.
# Required parameters
- `configuration`: a dictionary
`configuration` is a dictionary with the following entry:
- `storage`: a dictionary
`storage` is a dictionary with at least the following entry:
- `type`: a class subclassing __ObjectStorage__
It may also contain additional entries, depending on its `type`
class.
"""
super().__init__()
self._storage = configuration['storage']['type'](configuration)
source = self._storage.read()
for key in source:
self[key] = source[key]
def __iter__(self) -> Iterator[str]:
for k in super().__iter__():
if not k.startswith('_'):
yield k
def __contains__(self, key: object) -> bool:
return (
isinstance(key, str)
and super().__contains__(key)
and not key.startswith('_')
)
def persist(self) -> None:
"""Persist the dictionary."""
self._storage.update(self) | zabel | /core/storage.py | storage.py |
from typing import Any, Dict, List
from zabel.commons.interfaces import Manager, ManagedProjectDefinition
from zabel.commons.utils import ensure_instance, ensure_nonemptystring
########################################################################
## Abstract ManagedProjecDefinitionManager class
class ManagedProjectDefinitionManager(Manager):
"""Abstract ManagedProjectDefinition manager.
Provides a default implementation for the set of features a managed
project definition manager implementation must provide:
- constructor (`__init__`)
- `list_managedprojects`
- `get_managedproject`
- `create_managedproject`
- `update_managedproject`
Concrete classes deriving this abstract class must implement the
following methods if they want to use the provided defaults:
- `get_managedproject_key`
- `get_key_managedproject`
- `is_managedproject_key`
The constructor, if overwritten, must take a metadata definition as
a parameter, which is expected to be a dictionary with at least the
following entries:
- `storage (dict)`: a dictionary
- `instances (class)`: a class
`storage` is expected to contain a `type` entry (a class inheriting
_CollectionStorage_). It may contain other entries, as required by
the said storage class.
`instances` is a class inheriting _ManagedProjectDefinition_.
"""
def __init__(self, configuration: Dict[str, Any]) -> None:
self.configuration = configuration
self._storage = configuration['storage']['type'](configuration)
# abstract methods
def get_managedproject_key(self, project_id: str) -> str:
"""Return the storage key for project_id.
This method converts a project_id to the corresponding storage
key.
# Required parameters
- `project_id`: a non-empty string
# Returned value
A non-empty string.
"""
raise NotImplementedError
def get_key_managedproject(self, key: str) -> str:
"""Return the project_id for storage key.
This method converts a storage key to the corresponding
project_id.
It is only called on keys that are managed project definition
keys.
# Required parameters
- `key`: a non-empty string
# Returned value
A non-empty string.
"""
raise NotImplementedError
def is_managedproject_key(self, key: str) -> bool:
"""Return True if key is a managed project definition key.
The underlying storage collection may contain objects that are
not managed project definitions. This method is used to
differentiate those.
# Required parameters
- `key`: a non-empty string
# Returned value
A boolean
"""
raise NotImplementedError
# default implementation
def list_managedprojects(self) -> List[Dict[str, Any]]:
"""Return list of managed projects on platform.
# Returned value
A list. Each item in the list is a dictionary with the
following entries:
- `project_id`: a string
- `last_modified`: a timestamp or None
"""
return [
{
'project_id': self.get_key_managedproject(item['key']),
'last_modified': item.get('last_modified', None),
}
for item in self._storage.list()
if self.is_managedproject_key(item['key'])
]
def get_managedproject(self, project_id: str) -> ManagedProjectDefinition:
"""Return managed project details.
# Required parameters
- `project_id`: a non-empty string
# Returned value
Please refer to the concrete managed project definition used for
more information on the returned value format.
"""
ensure_nonemptystring('project_id')
obj = self._storage.read(self.get_managedproject_key(project_id))
mpd: ManagedProjectDefinition = self.configuration['instances']
return mpd.from_dict(obj)
def create_managedproject(
self, project_id: str, project: ManagedProjectDefinition
) -> None:
"""Create new managed project.
# Required parameters
- `project_id`: a non-empty string
- `project`: a dictionary
`project_id` must not be the ID of an existing managed project.
If it is, an _ApiError_ exception will be raised.
"""
ensure_nonemptystring('project_id')
ensure_instance('project', dict)
self._storage.create(self.get_managedproject_key(project_id), project)
def update_managedproject(
self, project_id: str, project: ManagedProjectDefinition
) -> None:
"""Update managed project definition.
# Required parameters
- `project_id`: a non-empty string
- `project`: a dictionary
If no existing managed project with the provided ID exists,
an _ApiError_ exception is raised.
"""
ensure_nonemptystring('project_id')
ensure_instance('project', dict)
self._storage.update(self.get_managedproject_key(project_id), project) | zabel | /core/projects/mpdmanager.py | mpdmanager.py |
from typing import Any, Callable, Dict, List, NoReturn, Optional, Set
from zabel.commons.exceptions import ApiError
from zabel.commons.interfaces import ManagedProjectDefinition
from zabel.commons.utils import (
ensure_in,
ensure_nonemptystring,
ensure_noneorinstance,
)
########################################################################
########################################################################
## managed project definitions
class DomainProviderManagedProjectDefinition(ManagedProjectDefinition):
"""Managed Project Definition with Domains and Providers.
It extends _ManagedProjectDefinition_.
A _DomainProviderManagedProject_ contains a set of _domains_ (e.g.
tools or services) that themselves contain a set of _providers_
(e.g. teams).
Each provider contains _members_, split in _categories_, and a set
of additional fields.
By default there are three member categories ('admins', 'readers'
and 'users') and one additional field ('options').
This wrapper offers a set of helpers managing members and additional
fields in domain providers.
## Usage
If an implementation wants to make use of the offered helpers, it
will have to declare the list of its domains in the `DOMAINS`
attribute.
Each domain will be able to contain any number of providers. Those
providers then can have members ('admins', 'users' and 'readers' by
default, as specified by the `MEMBER_CATEGORIES` attribute) and
additional fields ('options' by default, as specified by the
`OTHER_CATEGORIES` attribute).
## Attributes
| Attribute | Description |
| ------------------- | ------------------------------------------ |
| `DOMAINS` | A dictionary. The keys are the internal
names (the entry in the MPD) and the values
are the public names (what will be used in
the accessors method names).<br/>
The internal names will be keys in the
managed project definition dictionaries and
public names will be used in accessors
method names.<br/>
Internal and public names should not start
with an underscore. |
| `MEMBER_CATEGORIES` | A dictionary. The keys are the members
categories names, and the values are their
types (_dict_ being the only supported
value).<br/>
The 'member' category name is reserved. |
| `OTHER_CATEGORIES` | A dictionary. The keys are the 'other'
categories names, and the values are their
types (_list_ being the only supported
value). |
## Accessors
Accessors methods for domains are automatically generated if not
redefined in the children class.
For each domain, the accessors methods are (assuming the default
categories for providers):
- `list_{public_name}s()`
- `create_{public_name}(provider)`
- `list_{public_name}s_members()`
- `delete_{public_name}s_member()`
- `list_{public_name}_admins(provider)`
- `add_{public_name}_admin(provider, name, service='*')`
- `remove_{public_name}_admin(provider, name, service=None)`
- `list_{public_name}_users(provider)`
- `add_{public_name}_user(provider, name, service='*')`
- `remove_{public_name}_user(provider, name, service=None)`
- `list_{public_name}_readers(provider)`
- `add_{public_name}_reader(provider, name, service='*')`
- `remove_{public_name}_reader(provider, name, service=None)`
- `list_{public_name}_options(provider)`
- `add_{public_name}_option(provider, item)`
- `remove_{public_name}_option(provider, item)`
In addition to those domain accessors, the following methods are
defined:
- `list_members()`
- `remove_member(item)`
- `list_domain_provider_{category}s(domain, provider)`
- `add_domain_provider_{category}(domain, provider, item, service='*')`
- `remove_domain_provider_{category}(domain, provider, item, service=None)`
"""
DOMAINS: Dict[str, str] = {}
MEMBER_CATEGORIES: Dict[str, type] = {
'admin': list,
'user': list,
'reader': list,
}
OTHER_CATEGORIES: Dict[str, type] = {'option': list}
def __init__(self) -> None:
"""Create a new managed project."""
super().__init__()
if 'member' in self.MEMBER_CATEGORIES:
raise ApiError('Key \'member\' is reserved in MEMBER_CATEGORIES')
for category in self.MEMBER_CATEGORIES:
self._expose_domain_provider_membercategoryaccessor(category)
for category in self.OTHER_CATEGORIES:
self._expose_domain_provider_othercategoryaccessor(category)
self['spec'] = {}
for domain in self.DOMAINS:
self['spec'][domain] = []
self._expose_accessors(domain)
# Domains
#
def is_domain_publicname(self, domain: str) -> bool:
"""Return whether a domain is a domain public name."""
return domain in self.DOMAINS.values()
def get_domain_publicname(self, internal: str) -> str:
"""Return domain's public name."""
return self.DOMAINS[internal]
def get_domain_internalname(self, domain: str) -> str:
"""Return domain's internal name."""
return [d for d in self.DOMAINS if self.DOMAINS[d] == domain][0]
# Members
#
def list_members(self) -> Set[str]:
"""Return the set of domain provider members."""
members: Set[str] = set()
for domain in self.DOMAINS:
members = members.union(self.list_domain_members(domain))
return members
def remove_member(self, member: str) -> None:
"""Remove member from all domain providers."""
for domain in self.DOMAINS:
self.remove_domain_member(domain, member)
# Domains Members Helpers
#
# list_domain_members
# remove_domain_member
def list_domain_members(self, domain: str) -> Set[str]:
"""Return a set of declared domain members.
`domain` is the domain internalname.
# Required parameters
- domain: a non-empty string
# Returned value
A set. Each item in the set is a string.
"""
members: Set[str] = set()
for provider in self['spec'][domain]:
for category in self.MEMBER_CATEGORIES:
members = members.union(
m['account'] for m in provider[f'{category}s']
)
return members
def remove_domain_member(self, domain: str, member: str) -> None:
"""Remove a member from all domain providers.
`domain` is the domain internal name.
# Required parameters
- domain: a non-empty string
- member: a non-empty string
# Returned value
None.
"""
for provider in self['spec'][domain]:
for category in self.MEMBER_CATEGORIES:
members = provider[f'{category}s']
for item in [m for m in members if m['account'] == member]:
members.remove(item)
# Domains Providers Helpers
#
# list_domain_providers
# create_domain_provider
# delete_domain_provider
def list_domain_providers(self, domain: str) -> Set[str]:
"""Return a set of declared domain providers.
# Required parameters
- `domain`: a non-empty string
# Returned value
A set. Each item in the set is a string.
"""
if not domain.startswith('__'):
ensure_in('domain', self.DOMAINS)
return set(provider['name'] for provider in self['spec'][domain])
def create_domain_provider(
self, domain: str, provider: str, **fields: Any
) -> None:
"""Add a provider to a domain.
# Required parameters
- `domain`: a non-empty string
- `provider`: a non-empty string
# Optional parameters
- `fields`: a dictionary or None
# Raised exceptions
If `provider` is already declared in `domain`, an _ApiError_
exception is raised.
"""
ensure_nonemptystring('provider')
ensure_noneorinstance('fields', dict)
if not domain.startswith('__'):
ensure_in('domain', self.DOMAINS)
if provider in self.list_domain_providers(domain):
raise ApiError(
'Provider %s already declared in domain %s.'
% (provider, domain)
)
_definition = {'name': provider}
self['spec'][domain].append(_definition)
for category in self.MEMBER_CATEGORIES:
_definition[f'{category}s'] = self.MEMBER_CATEGORIES[category]()
for category in self.OTHER_CATEGORIES:
_definition[f'{category}s'] = self.OTHER_CATEGORIES[category]()
if fields:
for field in fields:
_definition[field] = fields[field]
def delete_domain_provider(self, domain: str, provider: str) -> None:
"""Remove provider.
# Required parameters
- `domain`: a non-empty string
- `provider`: a non-empty string
# Raised exceptions
If `provider` is not declared in `domain`, an _ApiError_
exception is raised.
"""
ensure_nonemptystring('provider')
if not domain.startswith('__'):
ensure_in('domain', self.DOMAINS)
if provider not in self.list_domain_providers(domain):
raise ApiError(
'Provider %s not declared in domain %s.' % (provider, domain)
)
for item in [p for p in self['spec'][domain] if p['name'] == provider]:
self['spec'][domain].remove(item)
def list_domain_provider_category_members(
self, domain: str, provider: str, category: str
) -> List[Dict[str, Any]]:
"""Return specified members for domain provider.
`domain` is the domain internal name.
# Required parameters
- `domain`: a non-empty string
- `provider`: a non-empty string
- `category`: a non-empty string
# Returned value
A list of dictionaries.
# Raised exceptions
Raises an _ApiError_ exception if `provider`, `domain`, or
`category` are not known.
"""
ensure_nonemptystring('domain')
ensure_nonemptystring('provider')
ensure_nonemptystring('category')
try:
for p in self['spec'][domain]:
if p['name'] == provider:
return list(p[category])
else:
self._raise_keyerror(domain, provider, category)
except KeyError:
self._raise_keyerror(domain, provider, category)
# private helpers
def _raise_keyerror(
self, domain: str, provider: str, item: str, key: Optional[str] = None
) -> NoReturn:
if domain not in self['spec']:
raise ApiError('Domain %s not known.' % domain)
if provider not in self['spec'][domain]:
raise ApiError(
'Provider %s not known in domain %s.' % (provider, domain)
)
if item not in self['spec'][domain][provider]:
raise ApiError(
'Item %s not known in provider %s for domain %s.'
% (item, provider, domain)
)
raise ApiError(
'%s %s not known in provider %s for domain %s.'
% (item.title(), key, provider, domain)
)
def _add_category_member_option(
self,
domain: str,
provider: str,
category: str,
member: str,
option: str,
) -> None:
"""Add option to member, or create member with specified option."""
ensure_nonemptystring('domain')
ensure_nonemptystring('provider')
ensure_nonemptystring('category')
try:
for p in self['spec'][domain]:
if p['name'] == provider:
members = p[category]
break
else:
self._raise_keyerror(domain, provider, category)
for m in members:
if m['account'] == member:
if option in m['options']:
raise ApiError(
'%s %s already set to %s for provider %s.'
% (category, member, option, provider)
)
m['options'].append(option)
break
else:
members.append({'account': member, 'options': [option]})
except KeyError:
self._raise_keyerror(domain, provider, category)
def _remove_category_member_option(
self,
domain: str,
provider: str,
category: str,
member: str,
option: Optional[str],
) -> None:
ensure_nonemptystring('domain')
ensure_nonemptystring('provider')
ensure_nonemptystring('category')
try:
for p in self['spec'][domain]:
if p['name'] == provider:
members = p[category]
break
else:
self._raise_keyerror(domain, provider, category, member)
for m in members:
if m['account'] == member:
if option is not None:
if option in m['options']:
m['options'].remove(option)
else:
raise ApiError(
'%s %s not set to %s for provider %s.'
% (category, member, option, provider)
)
if option is None or not m['options']:
members.remove(m)
break
else:
self._raise_keyerror(domain, provider, category, member)
except KeyError:
self._raise_keyerror(domain, provider, category, member)
def _list_domain_provider_category_items(
self, domain: str, provider: str, category: str
) -> List[str]:
ensure_nonemptystring('domain')
ensure_nonemptystring('provider')
ensure_nonemptystring('category')
try:
for p in self['spec'][domain]:
if p['name'] == provider:
return list(p[category])
else:
self._raise_keyerror(domain, provider, category)
except KeyError:
self._raise_keyerror(domain, provider, category)
def _add_category_item(
self, domain: str, provider: str, category: str, item: str
) -> None:
ensure_nonemptystring('domain')
ensure_nonemptystring('provider')
ensure_nonemptystring('category')
try:
for p in self['spec'][domain]:
if p['name'] == provider:
if item in p[category]:
raise ApiError(
'%s %s already declared for provider %s'
% (category, item, provider)
)
p[category].append(item)
break
else:
self._raise_keyerror(domain, provider, category)
except KeyError:
self._raise_keyerror(domain, provider, category)
def _remove_category_item(
self, domain: str, provider: str, category: str, item: str
) -> None:
ensure_nonemptystring('domain')
ensure_nonemptystring('provider')
ensure_nonemptystring('category')
try:
for p in self['spec'][domain]:
if p['name'] == provider:
if item not in p['category']:
raise ApiError(
'%s %s not declared for provider %s'
% (category, item, provider)
)
p[category].remove(item)
break
else:
self._raise_keyerror(domain, provider, category)
except KeyError:
self._raise_keyerror(domain, provider, category)
# accessors generators
def _create_method(self, name: str, body: Callable[..., Any]) -> None:
if not hasattr(self, name):
self.__setattr__(name, body)
def _expose_member_category_accessors(
self, domain: str, name: str, category: str
) -> None:
"""Create undefined accessors for domain member categories.
Will attempt to create the following methods:
- `list_{domain}_{category}s(provider)`
- `add_{domain>_{category}(provider, member, service='*')`
- `remove_{domain}_{category}(provider, member, service=None)`
If a method already exists, it is left as-is.
"""
cat = '%ss' % category
self._create_method(
'list_%s_%ss' % (name, category),
lambda provider: self.list_domain_provider_category_members(
domain, provider, cat
),
)
self._create_method(
'add_%s_%s' % (name, category),
lambda provider, member, service='*': self._add_category_member_option(
domain, provider, cat, member, service
),
)
self._create_method(
'remove_%s_%s' % (name, category),
lambda provider, member, service=None: self._remove_category_member_option(
domain, provider, cat, member, service
),
)
def _expose_other_category_accessors(
self, domain: str, name: str, category: str
) -> None:
"""Create undefined accessors for domain 'other' categories.
Will attempt to create the following methods:
- `list_{domain}_{category}s(provider)`
- `add_{domain}_{category}(provider, item)`
- `remove_{domain}_{category}(provider, item)`
If a method already exists, it is left as-is.
"""
cat = '%ss' % category
self._create_method(
'list_%s_%ss' % (name, category),
lambda provider: self._list_domain_provider_category_items(
domain, provider, cat
),
)
self._create_method(
'add_%s_%s' % (name, category),
lambda provider, option: self._add_category_item(
domain, provider, cat, option
),
)
self._create_method(
'remove_%s_%s' % (name, category),
lambda provider, option: self._remove_category_item(
domain, provider, cat, option
),
)
def _expose_accessors(self, domain: str) -> None:
"""Create accessors for domain, if not already defined.
Will attempt to create the following methods:
- `list_{domain}s()`
- `create_{domain}(provider)`
- `delete_{domain}(provider)`
- `list_{domain}s_members()`
- `remove_{domain}s_member()`
Will also attempt to create the category accessors.
If a method already exists, it is left as-is.
"""
name = self.DOMAINS[domain]
# providers
self._create_method(
'list_%ss' % name, lambda: self.list_domain_providers(domain)
)
self._create_method(
'create_%s' % name,
lambda provider: self.create_domain_provider(domain, provider),
)
self._create_method(
'delete_%s' % name,
lambda provider: self.delete_domain_provider(domain, provider),
)
self._create_method(
'list_%ss_members' % name, lambda: self.list_domain_members(domain)
)
self._create_method(
'remove_%ss_member' % name,
lambda member: self.remove_domain_member(domain, member),
)
# providers members
for category in self.MEMBER_CATEGORIES:
self._expose_member_category_accessors(domain, name, category)
for category in self.OTHER_CATEGORIES:
self._expose_other_category_accessors(domain, name, category)
def _expose_domain_provider_membercategoryaccessor(
self, category: str
) -> None:
"""Create domain_provider accessors for member category.
Will attempt to create the following methods:
- `list_domain_provider_{category}s(domain, provider)`
- `add_domain_provider_{category}(domain, provider, member,
service='*')`
- `remove_domain_provider_{category}(domain, provider, member,
service=None)`
If a method already exists, it is left as-is.
"""
cat = '%ss' % category
self._create_method(
'list_domain_provider_%ss' % category,
lambda domain, provider: self.list_domain_provider_category_members(
domain, provider, cat
),
)
self._create_method(
'add_domain_provider_%s' % category,
lambda domain, provider, member, service='*': self._add_category_member_option(
domain, provider, cat, member, service
),
)
self._create_method(
'remove_domain_provider_%s' % category,
lambda domain, provider, member, service=None: self._remove_category_member_option(
domain, provider, cat, member, service
),
)
def _expose_domain_provider_othercategoryaccessor(
self, category: str
) -> None:
"""Create domain_provider accessors for other category.
Will attempt to create the following methods:
- `list_domain_provider_{category}s(domain, provider)`
- `add_domain_provider_{category}(domain, provider, item)`
- `remove_domain_provider_{category}(domain, provider, item)`
If a method already exists, it is left as-is.
"""
cat = '%ss' % category
self._create_method(
'list_domain_provider_%ss' % category,
lambda domain, provider: self._list_domain_provider_category_items(
domain, provider, cat
),
)
self._create_method(
'add_domain_provider_%s' % category,
lambda domain, provider, member: self._add_category_item(
domain, provider, cat, member
),
)
self._create_method(
'remove_domain_provider_%s' % category,
lambda domain, provider, member: self._remove_category_item(
domain, provider, cat, member
),
) | zabel | /core/projects/domainprovidermpd.py | domainprovidermpd.py |
from typing import (
Any,
Callable,
Dict,
Iterable,
List,
Mapping,
MutableMapping,
Optional,
)
from zabel.commons.exceptions import ApiError
from zabel.commons.interfaces import (
BaseService,
ManagedService,
ManagedProjectDefinition,
ManagedAccount,
)
from zabel.commons.utils import (
api_call,
ensure_instance,
ensure_nonemptystring,
ensure_noneorinstance,
)
from ..accounts import ManagedAccountManager
from ..projects import ManagedProjectDefinitionManager
########################################################################
## Service
class Platform(BaseService):
"""A _Platform_ service.
A platform is a collection of _services_ with associated _members_,
_properties_, and _managed projects_.
This class has no public constructor. You have to use the
#::amac.make_platform() factory method to instantiate a _Platform_
object.
# Services
Services are of two kinds: _utilities_, that are not hosting managed
projects, and _managed services_, that are hosting managed projects.
Utilities are for example directories or external services (AWS,
Azure, ...).
Managed services are typically tools (Artifactory, Jira, ...).
If no credentials are provided for a service, the service will not
be instantiated (except if it is defined as being an _anonymous_
service).
# Members
A platform has associated members (members of the managed services
that are associated to the platform).
Members have _canonical_ IDs, which are platform-specific.
Each service must implement the necessary translation methods, to
and from those canonical IDs from and to their internal IDs.
Canonical IDs are strings. Internal IDs are service-specific
(but typically either strings or integers).
# Properties
Properties are data objects attached to a platform. Those
properties can be literals (say, a text description) or 'live'
objects (instances of a class, for example #::Manager or
#::ManagedDict).
Properties have a name and a value. They are singletons.
Each platform has at least two associated properties,
`managedprojectdefinitions`, which is an object implementing the
#::ManagedProjectDefinitionManager interface and `managedaccounts`,
which is an object implementing the #::ManagedAccountManager
interface.
# Managed projects
Managed projects each have a definition. Those definitions are
objects implementing the #::ManagedProjectDefinition interface.
Managed projects can be 'pushed' to the platform managed services.
# Managed accounts
Managed accounts represent the managed projects members, and may
possibly represent former managed projects members, as well as
other members (for example, platform administrators, ...).
Managed accounts can be queried or disabled.
## Platform attributes
In addition to the methods it provides, 3 attributes are exposed:
| Attribute | Description |
| ------------- | ------------------------------------------------ |
| `definition` | A dictionary, the platform definition |
| `services` | A dictionary where keys are service names and
values are instances of the specified services |
"""
def __init__(
self, _: str, definition: Dict[str, Any] # , credentials: Credentials
) -> None:
self.definition = definition
self.credentials = credentials
self.services: Dict[str, Service] = {}
self._mpdmanager: Optional[ManagedProjectDefinitionManager] = None
self._mamanager: Optional[ManagedAccountManager] = None
for name, service in definition['services'].items():
if service.get('anonymous', False) or credentials.contains(name):
self.services[name] = _make_service(name, service, credentials)
for service in self.services.values():
service.platform = self
def __str__(self) -> str:
return f'{self.__class__.__name__}: {self.definition["name"]!r}'
def __repr__(self) -> str:
return f'<{self.__class__.__name__}: {self.definition["name"]!r}>'
####################################################################
# platform members
#
# list_members
# get_member
@api_call
def list_members(self) -> Dict[str, Dict[str, Any]]:
"""Return the members of the service.
Member of the platform are those of the managed services it
contains.
# Returned value
A dictionary. The keys are the canonical IDs and the values are
dictionaries.
Those dictionaries have one entry per platform service in which
the member is known.
Entries are the name of the service, and values are the service
member representation.
"""
def _add_member(
members: Dict[str, Dict[str, Any]],
name: str,
member: str,
user: Any,
) -> None:
if member not in members:
members[member] = dict()
members[member][name] = user
members: Dict[str, Dict[str, Any]] = dict()
for name, service in self.services.items():
if not isinstance(service, ManagedService):
continue
service_members = service.list_members()
for member in service_members:
_add_member(members, name, member, service_members[member])
return members
@api_call
def get_member(self, member_id: str) -> Dict[str, Any]:
"""Return the user details.
# Required parameters
- member_id: a string
`member_id` is the canonical member ID.
# Returned value
A dictionary. See #list_members() for more information on
its content.
# Raised exceptions
An _ApiError_ exception is raised if `member_id` is not a member
of the platform.
"""
ensure_nonemptystring('member_id')
member = dict()
for name, service in self.services.items():
if isinstance(service, ManagedService):
try:
member[name] = service.get_member(member_id)
except ApiError:
pass
if not member:
raise ApiError('Member %s not found on platform' % member_id)
return member
####################################################################
# platform renderer
#
def render(self, target: Any = None) -> None:
"""Render the platform object for the specified target."""
raise NotImplementedError
####################################################################
# platform properties
#
_properties: Dict[str, Any] = {}
def get_property(self, name: str) -> Any:
"""Return platform property.
# Required parameters
- name: a string
# Returned value
A data object, which is either the literal value as stored
in the platform definition or an instance of a class specified
in the `type` entry of the literal.
This class is expected to have a constructor with one parameter,
which will be the dictionary associated with the property.
For example, if the 'properties' section in the platform
definition contains the following:
```python
{
'foo': [1, 2, 3],
'bar': {
'type': Something,
'arg': [1, 2, 3]
}
}
```
`get_property('foo')` will return `[1, 2, 3]`.
`get_property('bar')` will return the result of the following
call:
```python
Something({'type': Something, 'arg': [1, 2, 3]})
```
Properties are singletons.
"""
if name not in self._properties:
what = self.definition['properties'][name]
if isinstance(what, dict) and 'type' in what:
self._properties[name] = what['type'](what)
else:
self._properties[name] = what
return self._properties[name]
####################################################################
# platform managed accounts
#
# get_managedaccount
# create_managedaccount
# update_managedaccount
@api_call
def get_managedaccount(self, canonical_id: str) -> ManagedAccount:
"""Return managed account details.
Please refer to the concrete managed account used for more
information on its content.
# Required parameters
- canonical_id: a non-empty string
# Return value
An instance of _ManagedAccount_.
"""
ensure_nonemptystring('canonical_id')
return self._get_mamanager().get_managedaccount(canonical_id)
@api_call
def create_managedaccount(
self, canonical_id: str, account: ManagedAccount
) -> None:
"""Create new managed account.
# Required parameters
- canonical_id: a string
- account: a #::ManagedAccount object
# Raised exceptions
`canonical_id` must not be the ID of an existing managed
account. If it is, an _ApiError_ exception will be raised.
"""
ensure_nonemptystring('canonical_id')
ensure_instance('account', ManagedAccount)
self._get_mamanager().create_managedaccount(canonical_id, account)
@api_call
def update_managedaccount(
self, canonical_id: str, account: ManagedAccount
) -> None:
"""Update managed account
# Required parameters
- canonical_id: a string
- account: a #::ManagedAccount object
# Raised exceptions
If no existing managed account with the provided ID exists,
an _ApiError_ exception is raised.
"""
ensure_nonemptystring('canonical_id')
ensure_instance('account', ManagedAccount)
self._get_mamanager().update_managedaccount(canonical_id, account)
####################################################################
# platform managed projects
#
# list_managedprojects
# get_managedproject
# create_managedproject
# update_managedproject
# push_managedproject
# render_managedproject
# reading/writing projects
@api_call
def list_managedprojects(self) -> List[Dict[str, Any]]:
"""Return list of managed projects on platform.
# Returned value
Each item in the list is a dictionary with the following
entries:
- project_id: a string
- last_modified: a _datetime.datetime_ object
"""
return self._get_mpdmanager().list_managedprojects()
@api_call
def get_managedproject(self, project_id: str) -> ManagedProjectDefinition:
"""Return managed project details.
Please refer to the concrete managed project definition used for
more information on its content.
# Required parameters
- project_id: a non-empty string
# Returned value
An instance of #::ManagedProjectDefinition.
"""
ensure_nonemptystring('project_id')
return self._get_mpdmanager().get_managedproject(project_id)
@api_call
def create_managedproject(
self, project_id: str, project: ManagedProjectDefinition
) -> None:
"""Create new managed project.
# Required parameters
- project_id: a string
- project: a dictionary
# Raised exceptions
`project_id` must not be the ID of an existing managed project.
If it is, an _ApiError_ exception will be raised.
"""
ensure_nonemptystring('project_id')
ensure_instance('project', dict)
self._get_mpdmanager().create_managedproject(project_id, project)
@api_call
def update_managedproject(
self, project_id: str, project: ManagedProjectDefinition
) -> None:
"""Update managed project definition.
# Required parameters
- project_id: a string
- project: a dictionary
# Raised exceptions
If no existing managed project with the provided ID exists,
an _ApiError_ exception is raised.
"""
ensure_nonemptystring('project_id')
ensure_instance('project', dict)
self._get_mpdmanager().update_managedproject(project_id, project)
def render_managedproject(
self, project_id: str, target: Any = None
) -> Any:
"""Render the managed project on the specified target."""
raise NotImplementedError
# pushing projects
def get_push_strategy(
self, project: ManagedProjectDefinition, context: Mapping[str, Any]
) -> Iterable[Callable[[ManagedProjectDefinition], None]]:
"""Return the push strategy.
!!! important
Subclasses must implement this method if the platform allows
for managed project pushes.
This method is called by #push_managedproject() to get the _push
strategy_ for the given project and context.
A push strategy is an iterable that returns _push steps_.
A push step is a function that takes one argument, the project
definition, and returns no value but may raise an exception.
Steps are called in order by #push_managedproject().
If a step raises an exception, the possible remaining steps are
ignored, and the rollback strategy will be queried and executed
if the exception is an _ApiError_ exception.
If all steps complete successfully, #push_managedproject() will
return True.
# Required parameters
- project: a _ManagedProjectDefinition_
- context: a dictionary
# Returned value
A possibly empty list of push steps (callables taking a
#::ManagedProjectDefinition as their only parameter).
"""
raise NotImplementedError
def get_rollback_strategy(
self,
project: ManagedProjectDefinition,
trace: Iterable[Callable[[ManagedProjectDefinition], None]],
ex: ApiError,
) -> Iterable[Callable[[ManagedProjectDefinition], None]]:
"""Return the failed push rollback strategy.
This function is called by #push_managedproject() if an
_ApiError_ exception occurs while executing the push strategy.
By default, it returns a _rollback strategy_ that simply
re-raise the exception.
A platform may choose to refine this rollback strategy,
possibly allowing for 'clean' failures.
A rollback strategy is an iterable that returns _rollback
steps_.
A rollback step is a function that takes one argument, the
project definition (a #::ManagedProjectDefinition).
If the returned strategy is empty, nothing will be attempted,
and the #push_managedproject() function will simply return
False.
If the returned strategy is not empty, steps will be called in
order. If a step raises an exception, it will be propagated
(and the possible remaining steps will be ignored).
If all steps complete successfully (i.e., they don't raise an
exception), the #push_managedproject() function will return
False with no further ado.
# Required parameters
- project: a #::ManagedProjectDefinition
- trace: a list of performed actions, the last one having
raised an _ApiError_ exception
- ex: the raised exception
If `trace` is the empty list, the exception was raised while
acquiring the push strategy.
# Returned value
A possibly empty list of rollback steps (callables taking a
single parameter, a #::ManagedProjectDefinition).
"""
# pylint: disable=unused-argument,no-self-use
def _raise(_: ManagedProjectDefinition) -> None:
raise ex
return [_raise]
@api_call
def push_managedproject(
self,
project_id: str,
context: Optional[MutableMapping[str, Any]] = None,
) -> bool:
"""Push (aka publish) managed project on platform.
This method queries the platform push strategy by calling
#get_push_strategy() and execute it.
If an _ApiError_ exception occurs while executing the push
strategy, a roll-back strategy is queried by calling
#get_rollback_strategy() and is then performed.
# Required parameters
- project_id: a non-empty string
# Optional parameters
- context: a dictionary (None by default)
If `context` is provided, a transient `_trace` entry will be
added (or, if already there, will be reinitialized) and will
contain the resulting execution _trace_ (the collection of
values passed to info, warning, and debug). The `_trace` value
is a list of tuples: (stamp, severity, message). The list is
ordered (most recent entry last).
# Returned value
A boolean. True if the managed project is successfully pushed,
False if a successful rollback was performed.
Raises an exception otherwise.
"""
ensure_nonemptystring('project_id')
ensure_noneorinstance('context', dict)
project = self.get_managedproject(project_id)
if context is None:
context = {}
trace = []
try:
for step in self.get_push_strategy(project, context):
trace.append(step)
step(project)
except ApiError as ex:
context['_trace'] = project['_transient']['trace']
for step in self.get_rollback_strategy(project, trace, ex):
step(project)
return False
context['_trace'] = project['_transient']['trace']
return True
# Helpers
def _get_mpdmanager(self) -> ManagedProjectDefinitionManager:
if self._mpdmanager is None:
self._mpdmanager = self.get_property('managedprojectdefinitions')
if self._mpdmanager is None:
raise ApiError('ManagedProjectDefinitionManager is None.')
self._mpdmanager.platform = self
return self._mpdmanager
def _get_mamanager(self) -> ManagedAccountManager:
if self._mamanager is None:
self._mamanager = self.get_property('managedaccounts')
if self._mpdmanager is None:
raise ApiError('ManagedAccountManager is None.')
self._mamanager.platform = self
return self._mamanager | zabel | /core/services/__init__.py | __init__.py |
from enum import Enum
from typing import Union, Literal
class Units(Enum):
NATIVE = ''
LENGTH_METRES = 'Length:metres'
LENGTH_CENTIMETRES = 'Length:centimetres'
LENGTH_MILLIMETRES = 'Length:millimetres'
LENGTH_MICROMETRES = 'Length:micrometres'
LENGTH_NANOMETRES = 'Length:nanometres'
LENGTH_INCHES = 'Length:inches'
VELOCITY_METRES_PER_SECOND = 'Velocity:metres per second'
VELOCITY_CENTIMETRES_PER_SECOND = 'Velocity:centimetres per second'
VELOCITY_MILLIMETRES_PER_SECOND = 'Velocity:millimetres per second'
VELOCITY_MICROMETRES_PER_SECOND = 'Velocity:micrometres per second'
VELOCITY_NANOMETRES_PER_SECOND = 'Velocity:nanometres per second'
VELOCITY_INCHES_PER_SECOND = 'Velocity:inches per second'
ACCELERATION_METRES_PER_SECOND_SQUARED = 'Acceleration:metres per second squared'
ACCELERATION_CENTIMETRES_PER_SECOND_SQUARED = 'Acceleration:centimetres per second squared'
ACCELERATION_MILLIMETRES_PER_SECOND_SQUARED = 'Acceleration:millimetres per second squared'
ACCELERATION_MICROMETRES_PER_SECOND_SQUARED = 'Acceleration:micrometres per second squared'
ACCELERATION_NANOMETRES_PER_SECOND_SQUARED = 'Acceleration:nanometres per second squared'
ACCELERATION_INCHES_PER_SECOND_SQUARED = 'Acceleration:inches per second squared'
ANGLE_DEGREES = 'Angle:degrees'
ANGLE_RADIANS = 'Angle:radians'
ANGULAR_VELOCITY_DEGREES_PER_SECOND = 'Angular Velocity:degrees per second'
ANGULAR_VELOCITY_RADIANS_PER_SECOND = 'Angular Velocity:radians per second'
ANGULAR_ACCELERATION_DEGREES_PER_SECOND_SQUARED = 'Angular Acceleration:degrees per second squared'
ANGULAR_ACCELERATION_RADIANS_PER_SECOND_SQUARED = 'Angular Acceleration:radians per second squared'
AC_ELECTRIC_CURRENT_AMPERES_PEAK = 'AC Electric Current:amperes peak'
AC_ELECTRIC_CURRENT_AMPERES_RMS = 'AC Electric Current:amperes RMS'
PERCENT_PERCENT = 'Percent:percent'
DC_ELECTRIC_CURRENT_AMPERES = 'DC Electric Current:amperes'
FORCE_NEWTONS = 'Force:newtons'
FORCE_MILLINEWTONS = 'Force:millinewtons'
FORCE_POUNDS_FORCE = 'Force:pounds-force'
FORCE_KILONEWTONS = 'Force:kilonewtons'
TIME_SECONDS = 'Time:seconds'
TIME_MILLISECONDS = 'Time:milliseconds'
TIME_MICROSECONDS = 'Time:microseconds'
TORQUE_NEWTON_METRES = 'Torque:newton metres'
TORQUE_NEWTON_CENTIMETRES = 'Torque:newton centimetres'
TORQUE_POUND_FORCE_FEET = 'Torque:pound-force-feet'
TORQUE_OUNCE_FORCE_INCHES = 'Torque:ounce-force-inches'
INERTIA_GRAMS = 'Inertia:grams'
INERTIA_KILOGRAMS = 'Inertia:kilograms'
INERTIA_MILLIGRAMS = 'Inertia:milligrams'
INERTIA_POUNDS = 'Inertia:pounds'
INERTIA_OUNCES = 'Inertia:ounces'
ROTATIONAL_INERTIA_GRAM_SQUARE_METRE = 'Rotational Inertia:gram-square metre'
ROTATIONAL_INERTIA_KILOGRAM_SQUARE_METRE = 'Rotational Inertia:kilogram-square metre'
ROTATIONAL_INERTIA_POUND_SQUARE_FEET = 'Rotational Inertia:pound-square-feet'
FORCE_CONSTANT_NEWTONS_PER_AMP = 'Force Constant:newtons per amp'
FORCE_CONSTANT_MILLINEWTONS_PER_AMP = 'Force Constant:millinewtons per amp'
FORCE_CONSTANT_KILONEWTONS_PER_AMP = 'Force Constant:kilonewtons per amp'
FORCE_CONSTANT_POUNDS_FORCE_PER_AMP = 'Force Constant:pounds-force per amp'
TORQUE_CONSTANT_NEWTON_METRES_PER_AMP = 'Torque Constant:newton metres per amp'
TORQUE_CONSTANT_MILLINEWTON_METRES_PER_AMP = 'Torque Constant:millinewton metres per amp'
TORQUE_CONSTANT_KILONEWTON_METRES_PER_AMP = 'Torque Constant:kilonewton metres per amp'
TORQUE_CONSTANT_POUND_FORCE_FEET_PER_AMP = 'Torque Constant:pound-force-feet per amp'
VOLTAGE_VOLTS = 'Voltage:volts'
VOLTAGE_MILLIVOLTS = 'Voltage:millivolts'
VOLTAGE_MICROVOLTS = 'Voltage:microvolts'
CURRENT_CONTROLLER_PROPORTIONAL_GAIN_VOLTS_PER_AMP = 'Current Controller Proportional Gain:volts per amp'
CURRENT_CONTROLLER_PROPORTIONAL_GAIN_MILLIVOLTS_PER_AMP = 'Current Controller Proportional Gain:millivolts per amp'
CURRENT_CONTROLLER_PROPORTIONAL_GAIN_MICROVOLTS_PER_AMP = 'Current Controller Proportional Gain:microvolts per amp'
CURRENT_CONTROLLER_INTEGRAL_GAIN_VOLTS_PER_AMP_PER_SECOND = 'Current Controller Integral Gain:volts per amp per second'
CURRENT_CONTROLLER_INTEGRAL_GAIN_MILLIVOLTS_PER_AMP_PER_SECOND = 'Current Controller Integral Gain:millivolts per amp per second'
CURRENT_CONTROLLER_INTEGRAL_GAIN_MICROVOLTS_PER_AMP_PER_SECOND = 'Current Controller Integral Gain:microvolts per amp per second'
CURRENT_CONTROLLER_DERIVATIVE_GAIN_VOLTS_SECOND_PER_AMP = 'Current Controller Derivative Gain:volts second per amp'
CURRENT_CONTROLLER_DERIVATIVE_GAIN_MILLIVOLTS_SECOND_PER_AMP = 'Current Controller Derivative Gain:millivolts second per amp'
CURRENT_CONTROLLER_DERIVATIVE_GAIN_MICROVOLTS_SECOND_PER_AMP = 'Current Controller Derivative Gain:microvolts second per amp'
RESISTANCE_KILOOHMS = 'Resistance:kiloohms'
RESISTANCE_OHMS = 'Resistance:ohms'
RESISTANCE_MILLIOHMS = 'Resistance:milliohms'
RESISTANCE_MICROOHMS = 'Resistance:microohms'
RESISTANCE_NANOOHMS = 'Resistance:nanoohms'
INDUCTANCE_HENRIES = 'Inductance:henries'
INDUCTANCE_MILLIHENRIES = 'Inductance:millihenries'
INDUCTANCE_MICROHENRIES = 'Inductance:microhenries'
INDUCTANCE_NANOHENRIES = 'Inductance:nanohenries'
VOLTAGE_CONSTANT_VOLT_SECONDS_PER_RADIAN = 'Voltage Constant:volt seconds per radian'
VOLTAGE_CONSTANT_MILLIVOLT_SECONDS_PER_RADIAN = 'Voltage Constant:millivolt seconds per radian'
VOLTAGE_CONSTANT_MICROVOLT_SECONDS_PER_RADIAN = 'Voltage Constant:microvolt seconds per radian'
ABSOLUTE_TEMPERATURE_DEGREES_CELSIUS = 'Absolute Temperature:degrees Celsius'
ABSOLUTE_TEMPERATURE_KELVINS = 'Absolute Temperature:kelvins'
ABSOLUTE_TEMPERATURE_DEGREES_FAHRENHEIT = 'Absolute Temperature:degrees Fahrenheit'
ABSOLUTE_TEMPERATURE_DEGREES_RANKINE = 'Absolute Temperature:degrees Rankine'
UnitsAndLiterals = Union[Units, Literal["m", "cm", "mm", "µm", "um", "nm", "in", "m/s", "cm/s", "mm/s", "µm/s", "um/s", "nm/s", "in/s", "m/s²", "m/s^2", "cm/s²", "cm/s^2", "mm/s²", "mm/s^2", "µm/s²", "um/s^2", "nm/s²", "nm/s^2", "in/s²", "in/s^2", "°", "deg", "rad", "°/s", "deg/s", "rad/s", "°/s²", "deg/s^2", "rad/s²", "rad/s^2", "%", "s", "ms", "µs", "us"]]
LengthUnits = Union[Units, Literal["m", "cm", "mm", "µm", "um", "nm", "in", "°", "deg", "rad"]]
VelocityUnits = Union[Units, Literal["m/s", "cm/s", "mm/s", "µm/s", "um/s", "nm/s", "in/s", "°/s", "deg/s", "rad/s"]]
AccelerationUnits = Union[Units, Literal["m/s²", "m/s^2", "cm/s²", "cm/s^2", "mm/s²", "mm/s^2", "µm/s²", "um/s^2", "nm/s²", "nm/s^2", "in/s²", "in/s^2", "°/s²", "deg/s^2", "rad/s²", "rad/s^2"]]
TimeUnits = Union[Units, Literal["s", "ms", "µs", "us"]]
LITERALS_TO_UNITS = {
"m": Units.LENGTH_METRES,
"cm": Units.LENGTH_CENTIMETRES,
"mm": Units.LENGTH_MILLIMETRES,
"µm": Units.LENGTH_MICROMETRES,
"um": Units.LENGTH_MICROMETRES,
"nm": Units.LENGTH_NANOMETRES,
"in": Units.LENGTH_INCHES,
"m/s": Units.VELOCITY_METRES_PER_SECOND,
"cm/s": Units.VELOCITY_CENTIMETRES_PER_SECOND,
"mm/s": Units.VELOCITY_MILLIMETRES_PER_SECOND,
"µm/s": Units.VELOCITY_MICROMETRES_PER_SECOND,
"um/s": Units.VELOCITY_MICROMETRES_PER_SECOND,
"nm/s": Units.VELOCITY_NANOMETRES_PER_SECOND,
"in/s": Units.VELOCITY_INCHES_PER_SECOND,
"m/s²": Units.ACCELERATION_METRES_PER_SECOND_SQUARED,
"m/s^2": Units.ACCELERATION_METRES_PER_SECOND_SQUARED,
"cm/s²": Units.ACCELERATION_CENTIMETRES_PER_SECOND_SQUARED,
"cm/s^2": Units.ACCELERATION_CENTIMETRES_PER_SECOND_SQUARED,
"mm/s²": Units.ACCELERATION_MILLIMETRES_PER_SECOND_SQUARED,
"mm/s^2": Units.ACCELERATION_MILLIMETRES_PER_SECOND_SQUARED,
"µm/s²": Units.ACCELERATION_MICROMETRES_PER_SECOND_SQUARED,
"um/s^2": Units.ACCELERATION_MICROMETRES_PER_SECOND_SQUARED,
"nm/s²": Units.ACCELERATION_NANOMETRES_PER_SECOND_SQUARED,
"nm/s^2": Units.ACCELERATION_NANOMETRES_PER_SECOND_SQUARED,
"in/s²": Units.ACCELERATION_INCHES_PER_SECOND_SQUARED,
"in/s^2": Units.ACCELERATION_INCHES_PER_SECOND_SQUARED,
"°": Units.ANGLE_DEGREES,
"deg": Units.ANGLE_DEGREES,
"rad": Units.ANGLE_RADIANS,
"°/s": Units.ANGULAR_VELOCITY_DEGREES_PER_SECOND,
"deg/s": Units.ANGULAR_VELOCITY_DEGREES_PER_SECOND,
"rad/s": Units.ANGULAR_VELOCITY_RADIANS_PER_SECOND,
"°/s²": Units.ANGULAR_ACCELERATION_DEGREES_PER_SECOND_SQUARED,
"deg/s^2": Units.ANGULAR_ACCELERATION_DEGREES_PER_SECOND_SQUARED,
"rad/s²": Units.ANGULAR_ACCELERATION_RADIANS_PER_SECOND_SQUARED,
"rad/s^2": Units.ANGULAR_ACCELERATION_RADIANS_PER_SECOND_SQUARED,
"%": Units.PERCENT_PERCENT,
"s": Units.TIME_SECONDS,
"ms": Units.TIME_MILLISECONDS,
"µs": Units.TIME_MICROSECONDS,
"us": Units.TIME_MICROSECONDS,
}
def units_from_literals(units: UnitsAndLiterals) -> Units:
if isinstance(units, Units):
return units
converted = LITERALS_TO_UNITS.get(units)
if converted is None:
raise ValueError(f"Invalid units: {units}")
return converted | zaber-motion | /zaber_motion-4.3.0.tar.gz/zaber_motion-4.3.0/zaber_motion/units.py | units.py |
from threading import Lock
import asyncio
import functools
from typing import TYPE_CHECKING, Optional, List, Set, Callable, Any # pylint: disable=unused-import
from ctypes import c_void_p, c_int64
import queue
from google.protobuf.message import Message
from .convert_exception import convert_exception
from .protobufs import main_pb2
from .serialization import serialize, deserialize
from .bindings import c_call, CALLBACK
class CallbackWrap:
def __init__(self, callbackFunc: Callable[[c_void_p, c_int64], None]):
self._callback = CALLBACK(callbackFunc)
@property
def callback(self) -> Any:
return self._callback
# we must store callback in a set to prevent garbage collection in case the future gets cancelled
callbacks: Set[CallbackWrap] = set()
callbacks_lock = Lock()
def call(request: str, data: Optional[Message] = None, response_data: Optional[Message] = None) -> None:
buffer = get_request_buffer(request, data)
promise = queue.Queue(maxsize=1) # type: ignore
def callback(response_data: c_void_p, _tag: c_int64) -> None:
resp_buffer = deserialize(response_data)
promise.put(resp_buffer)
cb = CALLBACK(callback)
result = c_call(buffer, 0, cb, 1)
if result != 0:
raise RuntimeError(f"Invalid result code: {result}")
response_buffers = promise.get()
process_response(response_buffers, response_data)
def set_result(future: 'asyncio.Future[List[bytes]]', resp_buffer: List[bytes]) -> None:
if not future.done():
future.set_result(resp_buffer)
async def call_async(request: str, data: Optional[Message] = None, response_data: Optional[Message] = None) -> None:
buffer = get_request_buffer(request, data)
cb: CallbackWrap = None # type: ignore
loop = asyncio.get_event_loop()
future = loop.create_future()
def callback(response_data: c_void_p, _tag: c_int64) -> None:
resp_buffer = deserialize(response_data)
try:
loop.call_soon_threadsafe(functools.partial(set_result, future, resp_buffer))
except RuntimeError:
# the loop may be done already
pass
with callbacks_lock:
callbacks.remove(cb)
cb = CallbackWrap(callback)
with callbacks_lock:
callbacks.add(cb)
result = c_call(buffer, 0, cb.callback, 1)
if result != 0:
raise RuntimeError(f"Invalid result code: {result}")
response_buffers = await future
process_response(response_buffers, response_data)
def call_sync(request: str, data: Optional[Message] = None, response_data: Optional[Message] = None) -> None:
buffer = get_request_buffer(request, data)
resp_buffers = [None] # type: Any
def callback(response_data: c_void_p, _tag: c_int64) -> None:
resp_buffers[0] = deserialize(response_data)
cb = CALLBACK(callback)
result = c_call(buffer, 0, cb, 0)
if result != 0:
raise RuntimeError(f"Invalid result code: {result}")
process_response(resp_buffers[0], response_data)
def get_request_buffer(request: str, data: Optional[Message]) -> bytes:
request_proto = main_pb2.Request()
request_proto.request = request
messages = [request_proto.SerializeToString()]
if data is not None:
messages.append(data.SerializeToString())
buffer = serialize(messages)
return buffer
def process_response(response_buffers: List[bytes], response_data: Optional[Message]) -> None:
response_proto = main_pb2.Response()
response_proto.ParseFromString(response_buffers[0])
if response_proto.response != main_pb2.Response.OK:
if len(response_buffers) > 1:
raise convert_exception(response_proto.error_type, response_proto.error_message, response_buffers[1])
raise convert_exception(response_proto.error_type, response_proto.error_message)
if len(response_buffers) > 1:
if response_data is None:
raise RuntimeError("Response from library is ignored, response_data==None")
response_data.ParseFromString(response_buffers[1])
else:
if response_data is not None:
raise RuntimeError("No response from library") | zaber-motion | /zaber_motion-4.3.0.tar.gz/zaber_motion-4.3.0/zaber_motion/call.py | call.py |
import atexit
from concurrent.futures import ThreadPoolExecutor
from typing import List
from ctypes import c_void_p, c_int64
import traceback
from rx.subject import Subject
from .protobufs import main_pb2
from .serialization import deserialize
from .bindings import c_set_event_handler, CALLBACK
events = Subject()
event_executor = ThreadPoolExecutor(max_workers=1) # pylint: disable=consider-using-with
def on_shutdown() -> None:
event_executor.shutdown()
atexit.register(on_shutdown)
def event_handler(event_data: c_void_p, _tag: c_int64) -> None:
try:
event_buffers = deserialize(event_data)
event_executor.submit(process_event_catch, event_buffers)
except RuntimeError:
# the error appears due to race condition with python shutting down and cannot be prevented
pass
event_handler_cb = CALLBACK(event_handler)
c_set_event_handler(0, event_handler_cb)
parsers = {
'test/event': main_pb2.TestEvent,
'interface/unknown_response': main_pb2.UnknownResponseEvent,
'binary/interface/unknown_response': main_pb2.UnknownBinaryResponseEvent,
'interface/alert': main_pb2.AlertEvent,
'binary/interface/reply_only': main_pb2.BinaryReplyOnlyEvent,
'interface/disconnected': main_pb2.DisconnectedEvent,
}
def process_event_catch(event_buffers: List[bytes]) -> None:
try:
process_event(event_buffers)
except: # noqa, pylint: disable=W0702
print("Unhandled exception in event:")
traceback.print_exc()
def process_event(event_buffers: List[bytes]) -> None:
event = main_pb2.Event()
event.ParseFromString(event_buffers[0])
EventData = parsers.get(event.event, False)
if EventData is False:
raise RuntimeError(f"Unknown event: {event.event}")
has_parser = EventData is not None
has_data = len(event_buffers) > 1
if has_data != has_parser:
raise RuntimeError(f"Event has no data or parser not provided for {event.event}")
event_data = None
if has_data:
event_data = EventData() # type: ignore
event_data.ParseFromString(event_buffers[1])
event_tuple = (event.event, event_data)
events.on_next(event_tuple) | zaber-motion | /zaber_motion-4.3.0.tar.gz/zaber_motion-4.3.0/zaber_motion/events.py | events.py |
from .connection import Connection as Connection
from .device import Device as Device
from .axis import Axis as Axis
from .all_axes import AllAxes as AllAxes
from .axis_settings import AxisSettings as AxisSettings
from .device_settings import DeviceSettings as DeviceSettings
from .io_port_type import IoPortType as IoPortType
from .warnings import Warnings as Warnings
from .warning_flags import WarningFlags as WarningFlags
from .unknown_response_event import UnknownResponseEvent as UnknownResponseEvent
from .device_identity import DeviceIdentity as DeviceIdentity
from .device_io import DeviceIO as DeviceIO
from .device_io_info import DeviceIOInfo as DeviceIOInfo
from .axis_identity import AxisIdentity as AxisIdentity
from .message_type import MessageType as MessageType
from .axis_type import AxisType as AxisType
from .alert_event import AlertEvent as AlertEvent
from .lockstep_axes import LockstepAxes as LockstepAxes
from .lockstep import Lockstep as Lockstep
from .oscilloscope import Oscilloscope as Oscilloscope
from .oscilloscope_data import OscilloscopeData as OscilloscopeData
from .oscilloscope_data_source import OscilloscopeDataSource as OscilloscopeDataSource
from .oscilloscope_capture_properties import OscilloscopeCaptureProperties as OscilloscopeCaptureProperties
from .response import Response as Response
from .setting_constants import SettingConstants as SettingConstants
from .stream import Stream as Stream
from .stream_buffer import StreamBuffer as StreamBuffer
from .stream_mode import StreamMode as StreamMode
from .stream_axis_type import StreamAxisType as StreamAxisType
from .stream_axis_definition import StreamAxisDefinition as StreamAxisDefinition
from .transport import Transport as Transport
from .servo_tuner import ServoTuner as ServoTuner
from .servo_tuning_paramset import ServoTuningParamset as ServoTuningParamset
from .paramset_info import ParamsetInfo as ParamsetInfo
from .pid_tuning import PidTuning as PidTuning
from .servo_tuning_param import ServoTuningParam as ServoTuningParam
from .simple_tuning_param_definition import SimpleTuningParamDefinition as SimpleTuningParamDefinition
from .storage import AxisStorage as AxisStorage, DeviceStorage as DeviceStorage
from .conversion_factor import ConversionFactor as ConversionFactor
from .can_set_state_axis_response import CanSetStateAxisResponse as CanSetStateAxisResponse
from .can_set_state_device_response import CanSetStateDeviceResponse as CanSetStateDeviceResponse
from .process_controller import ProcessController as ProcessController
from .process import Process as Process
from .process_controller_source import ProcessControllerSource as ProcessControllerSource
from .process_controller_source_sensor import ProcessControllerSourceSensor as ProcessControllerSourceSensor
from .process_controller_mode import ProcessControllerMode as ProcessControllerMode
from .pvt_sequence import PvtSequence as PvtSequence
from .pvt_buffer import PvtBuffer as PvtBuffer
from .pvt_mode import PvtMode as PvtMode
from .pvt_axis_type import PvtAxisType as PvtAxisType
from .pvt_axis_definition import PvtAxisDefinition as PvtAxisDefinition | zaber-motion | /zaber_motion-4.3.0.tar.gz/zaber_motion-4.3.0/zaber_motion/ascii/__init__.py | __init__.py |
Zaber Python Serial Library
===========================
A software library to help control Zaber_ devices over a serial port.
.. _Zaber: http://www.zaber.com
Installation
------------
The Zaber Python Serial Library can be installed from PyPI using pip::
pip install zaber.serial
It can also be installed directly using the ``setup.py`` file provided
with the source code::
python setup.py install
License
-------
This library is licensed for use under the `Apache Software License
v2.0.`_ A copy of this license is provided with the source in the file
``LICENSE.txt``.
.. _Apache Software License v2.0: http://apache.org/licenses/LICENSE-2.0
| zaber.serial | /zaber.serial-0.9.1.tar.gz/zaber.serial-0.9.1/README.rst | README.rst |
Zaber Python Serial Library
===========================
The Zaber Serial Library is a small Python library for communicating
with Zaber devices over a serial port. It is built on top of
`PySerial`_.
The library is tested with Python 2.7, 3.5 and later. It is released under
the `Apache License`_.
Please note this library is now deprecated in favor of the
`Zaber Motion Library`_.
.. _`PySerial`: https://pythonhosted.org/pyserial/
.. _`Apache License`: http://apache.org/licenses/LICENSE-2.0
.. _`Zaber Motion Library`: https://pypi.org/project/zaber-motion/
| zaber.serial | /zaber.serial-0.9.1.tar.gz/zaber.serial-0.9.1/DESCRIPTION.rst | DESCRIPTION.rst |
import logging
import time
from .asciiaxis import AsciiAxis
from .asciicommand import AsciiCommand
from .unexpectedreplyerror import UnexpectedReplyError
from .asciimovementmixin import AsciiMovementMixin
from .asciilockstep import AsciiLockstep
# See https://docs.python.org/2/howto/logging.html#configuring-logging-
# for-a-library for info on why we have these two lines here.
logger = logging.getLogger(__name__)
logger.addHandler(logging.NullHandler())
class AsciiDevice(AsciiMovementMixin):
"""Represents an ASCII device. It is safe to use in multi-threaded
environments.
Attributes:
port: The port to which this device is connected.
address: The address of this device. 1-99.
"""
def __init__(self, port, address):
"""
Args:
port: An AsciiSerial object representing the port to which
this device is connected.
address: An integer representing the address of this
device. It must be between 1-99.
Raises:
ValueError: The address was not between 1 and 99.
"""
AsciiMovementMixin.__init__(self)
if address < 1 or address > 99:
raise ValueError("Address must be between 1 and 99.")
self.address = address
self.port = port
def axis(self, number):
"""Returns an AsciiAxis with this device as a parent and the
number specified.
Args:
number: The number of the axis. 1-9.
Notes:
This function will always return a *new* AsciiAxis instance.
If you are working extensively with axes, you may want to
create just one set of AsciiAxis objects by directly using
the AsciiAxis constructor instead of this function to avoid
creating lots and lots of objects.
Returns:
A new AsciiAxis instance to represent the axis specified.
"""
return AsciiAxis(self, number)
def lockstep(self, lockstep_group=1):
"""Returns an AsciiLockstep using this device for lockstep (synchronized movement of axes).
Args:
lockstep_group: The number of the lockstep group between 1-9.
Defaults to first lockstep group of the device.
Notes:
This function will always return a *new* AsciiLockstep instance.
If you are working extensively with locksteps, you may want to
preserve returned instance instead of calling the function repeatedly.
Returns:
A new AsciiLockstep instance to represent the lockstep group specified.
"""
return AsciiLockstep(self, lockstep_group=lockstep_group)
def send(self, message):
r"""Sends a message to the device, then waits for a reply.
Args:
message: A string or AsciiCommand representing the message
to be sent to the device.
Notes:
Regardless of the device address specified in the message,
this function will always send the message to this device.
The axis number will be preserved.
This behaviour is intended to prevent the user from
accidentally sending a message to all devices instead of
just one. For example, if ``device1`` is an AsciiDevice
with an address of 1, device1.send("home") will send the
ASCII string "/1 0 home\\r\\n", instead of sending the
command "globally" with "/0 0 home\\r\\n".
Raises:
UnexpectedReplyError: The reply received was not sent by
the expected device.
Returns:
An AsciiReply containing the reply received.
"""
if isinstance(message, (str, bytes)):
message = AsciiCommand(message)
# Always send an AsciiCommand to *this* device.
message.device_address = self.address
with self.port.lock:
# Write and read to the port while holding the lock
# to ensure we get the correct response.
self.port.write(message)
reply = self.port.read()
if (reply.device_address != self.address or
reply.axis_number != message.axis_number or
reply.message_id != message.message_id):
raise UnexpectedReplyError(
"Received an unexpected reply from device with address {0:d}, "
"axis {1:d}".format(reply.device_address, reply.axis_number),
reply
)
return reply
def poll_until_idle(self, axis_number=0):
"""Polls the device's status, blocking until it is idle.
Args:
axis_number: An optional integer specifying a particular
axis whose status to poll. axis_number must be between
0 and 9. If provided, the device will only report the
busy status of the axis specified. When omitted, the
device will report itself as busy if any axis is moving.
Raises:
UnexpectedReplyError: The reply received was not sent by
the expected device.
Returns:
An AsciiReply containing the last reply received.
"""
while True:
reply = self.send(AsciiCommand(self.address, axis_number, ""))
if reply.device_status == "IDLE":
break
time.sleep(0.05)
return reply
def get_status(self, axis_number=0):
"""Queries the device for its status and returns the result.
Args:
axis_number: An optional integer specifying a particular
axis whose status to query. axis_number must be between
0 and 9. If provided, the device will only report the
busy status of the axis specified. When omitted, the
device will report itself as busy if any axis is moving.
Raises:
UnexpectedReplyError: The reply received was not sent by
the expected device.
Returns:
A string containing either "BUSY" or "IDLE", depending on
the response received from the device.
"""
reply = self.send(AsciiCommand(self.address, axis_number, ""))
return reply.device_status
def get_position(self):
"""Queries the axis for its position and returns the result.
Raises:
UnexpectedReplyError: The reply received was not sent by the
expected device and axis.
Returns:
A number representing the current device position in its native
units of measure. See the device manual for unit conversions.
If this command is used on a multi-axis device, the return value
is the position of the first axis.
"""
data = self.send("get pos").data
if " " in data:
data = data.split(" ")[0]
return int(data) | zaber.serial | /zaber.serial-0.9.1.tar.gz/zaber.serial-0.9.1/zaber/serial/asciidevice.py | asciidevice.py |
import logging
# See https://docs.python.org/2/howto/logging.html#configuring-logging-
# for-a-library for info on why we have these two lines here.
logger = logging.getLogger(__name__)
logger.addHandler(logging.NullHandler())
class AsciiCommand(object):
"""Models a single command in Zaber's ASCII protocol.
Attributes:
device_address: An integer representing the address of the
device to which to send this command.
axis_number: The integer number of the particular axis which
should execute this command. An axis number of 0 specifies
that all axes should execute the command, or that the
command is "device scope".
message_id: Optional. An integer to be used as a message ID.
If a command has a message ID, then the device will send a
reply with a matching message ID. A message_id value of
None indicates that a message ID is not to be used.
0 is a valid message ID.
data: The bulk of the command. data includes a valid ASCII
command and any parameters of that command, separated by
spaces. A data value of "" (the empty string) is valid,
and is often used as a "get status" command to query
whether a device is busy or idle.
"""
def __init__(self, *args):
r"""
Args:
*args: A number of arguments beginning with 0 to 3 integers
followed by one or more strings.
Notes:
For every absent integer argument to ``__init__``, any string
argument(s) will be examined for leading integers. The first
integer found (as an argument or as the leading part of a
string) will set the ``device_address`` property, the second
integer will be taken as the ``axis_number`` property, and
the third integer found will be the ``message_id`` property.
When a string argument contains text which can not be
interpreted as an integer, all arguments which follow it
are considered to be a part of the data. This is consistent
with how ASCII commands are parsed by the Zaber device
firmware.
All leading '/' and trailing '\\r\\n' characters in string
arguments are stripped when the arguments are parsed.
Examples:
The flexible argument structure of this constructor allows
commands to be constructed by passing in integers followed
by a command and its parameters, or by passing in one
fully-formed, valid ASCII command string, or a mix of the
two if the user desires.
For example, all of the following constructors will create
identical AsciiCommand objects::
>>> AsciiCommand("/1 0 move abs 10000\r\n")
>>> AsciiCommand("1 move abs 10000")
>>> AsciiCommand(1, 0, "move abs 10000")
>>> AsciiCommand(1, "move abs 10000")
>>> AsciiCommand("1", "move abs", "10000")
>>> AsciiCommand(1, "move abs", 10000)
Raises:
TypeError: An argument was passed to the constructor which
was neither an integer nor a string.
"""
processed_args = []
for arg in args:
if isinstance(arg, int):
processed_args.append(arg)
elif isinstance(arg, (bytes, str)):
if isinstance(arg, bytes):
arg = arg.decode()
# Trim leading '/' and trailing "\r\n".
arg = arg.lstrip('/')
arg = arg.rstrip('\r\n')
tokens = arg.split(' ')
processed_args.extend(tokens)
else:
raise TypeError("All arguments to AsciiCommand() must be "
"either strings or integers. An argument of "
"type {0:s} was passed.".format(str(type(arg))))
self.data = ''
attributes = iter(["device_address", "axis_number", "message_id"])
for i, arg in enumerate(processed_args):
try:
number = int(arg) # Is it a number?
next_attr = next(attributes) # If it *is* a number...
setattr(self, next_attr, number) # ...set the next attribute.
except (ValueError, StopIteration):
# If arg is not a number, or if we are out of
# attributes, the remaining text is data.
remaning_args = processed_args[i:]
self.data = ' '.join([str(remaining) for remaining in remaning_args])
break
# Set remaining attributes.
if not hasattr(self, "device_address"):
self.device_address = 0
if not hasattr(self, "axis_number"):
self.axis_number = 0
if not hasattr(self, "message_id"):
self.message_id = None
def encode(self):
"""Return a valid ASCII command based on this object's
attributes.
The string returned by this function is a fully valid command,
formatted according to Zaber's `Ascii Protocol Manual`_.
Returns:
A valid, fully-formed ASCII command.
"""
if self.message_id is not None:
if self.data:
return "/{0:d} {1:d} {2:d} {3:s}\r\n".format(
self.device_address,
self.axis_number,
self.message_id,
self.data
).encode()
else:
return "/{0:d} {1:d} {2:d}\r\n".format(
self.device_address,
self.axis_number,
self.message_id
).encode()
if self.data:
return "/{0:d} {1:d} {2:s}\r\n".format(
self.device_address,
self.axis_number,
self.data
).encode()
else:
return "/{0:d} {1:d}\r\n".format(
self.device_address,
self.axis_number
).encode()
def __str__(self):
"""Returns an encoded ASCII command, without the newline
terminator.
Returns:
A string containing an otherwise-valid ASCII command,
without the newline (ie. "\r\n") at the end of the string
for ease of printing.
"""
return self.encode().decode().rstrip("\r\n") | zaber.serial | /zaber.serial-0.9.1.tar.gz/zaber.serial-0.9.1/zaber/serial/asciicommand.py | asciicommand.py |
import re
from .utils import isstring
class AsciiLockstepInfo(object):
"""Models lockstep info message reply in Zaber's ASCII protocol.
Attributes:
is_enabled: A boolean determining whether lockstep is enabled for this lockstep group.
axis1: An integer between 1 and 9 representing the first axis
for which the lockstep is enabled.
axis2: An integer between 1 and 9 representing the second axis
for which the lockstep is enabled.
offset: An integer representing the offset between axis1 and axis2 for which the
lockstep was enabled. This offset will be maintained during movement of the lockstep group.
Offset is determined from position of both axes at time when lockstep is enabled.
twist: An integer representing the current twist between axis1 and axis2
considering the offset. Value 0 represents perfect sync between both axes in the lockstep group.
.. lockstep section: http://www.zaber.com/wiki/Manuals/ASCII_
Protocol_Manual#lockstep
"""
def __init__(self, message_data):
"""
Args:
message_data: A string from AsciiReply's attribute data. It will be parsed by
this constructor in order to populate the attributes of
the new AsciiLockstepInfo.
Raises:
TypeError: The message_data is not a string.
ValueError: The message_data string could not be parsed.
"""
if not isstring(message_data):
raise TypeError("message_data must be a string")
if message_data == "disabled":
self.is_enabled = False
return
self.is_enabled = True
match = re.match(r"(\d+)\s(\d+)\s(-?\d+)\s(-?\d+)", message_data)
if not match:
raise ValueError("Failed to parse message_data: {}".format(message_data))
self.axis1 = int(match.group(1))
self.axis2 = int(match.group(2))
self.offset = int(match.group(3))
self.twist = int(match.group(4))
def __str__(self):
"""Returns a string representation of AsciiLockstepInfo
containing all attributes or string "Disabled" if lockstep is not enabled.
"""
if not self.is_enabled:
return "Disabled"
return "Axis1={},Axis2={},Offset={},Twist={}".format(self.axis1, self.axis2, self.offset, self.twist) | zaber.serial | /zaber.serial-0.9.1.tar.gz/zaber.serial-0.9.1/zaber/serial/asciilockstepinfo.py | asciilockstepinfo.py |
import logging
import sys
import serial
from .binarycommand import BinaryCommand
from .binaryreply import BinaryReply
from .timeouterror import TimeoutError
from .portlock import PortLock
# See https://docs.python.org/2/howto/logging.html#configuring-logging-
# for-a-library for info on why we have these two lines here.
logger = logging.getLogger(__name__)
logger.addHandler(logging.NullHandler())
MESSAGE_LENGTH = 6
class BinarySerial(object):
"""A class for interacting with Zaber devices using the Binary protocol.
This class defines a few simple methods for writing to and reading
from a device connected over the serial port. It is safe to use in multi-
threaded environments.
Attributes:
baudrate: An integer representing the desired communication baud rate.
Valid bauds are 115200, 57600, 38400, 19200, and 9600.
timeout: A number representing the number of seconds to wait for input
before timing out. Floating-point numbers can be used to specify
times shorter than one second. A value of None can also be used to
specify an infinite timeout. A value of 0 specifies that all reads
and writes should be non-blocking (return immediately without
waiting). Defaults to 5.
lock: The threading.RLock guarding the port. Each method takes the lock
and is therefore thread safe. However, to ensure no other threads
access the port across multiple method calls, the caller should
acquire the lock and release it once all methods have returned.
"""
def __init__(self, port, baud=9600, timeout=5, inter_char_timeout=0.5):
"""Creates a new instance of the BinarySerial class.
Args:
port: A string containing the name of the serial port to
which to connect.
baud: An integer representing the baud rate at which to
communicate over the serial port.
timeout: A number representing the number of seconds to wait
for a reply. Fractional numbers are accepted and can be
used to specify times shorter than a second.
inter_char_timeout : A number representing the number of seconds
to wait between bytes in a reply. If your computer is bad at
reading incoming serial data in a timely fashion, try
increasing this value.
Notes:
This class will open the port immediately upon
instantiation. This follows the pattern set by PySerial,
which this class uses internally to perform serial
communication.
Raises:
TypeError: The port argument passed was not a string.
"""
if not isinstance(port, str):
raise TypeError("port must be a string.")
try:
self._ser = serial.serial_for_url(port, do_not_open=True)
self._ser.baudrate = baud
self._ser.timeout = timeout
self._ser.interCharTimeout = inter_char_timeout
self._ser.open()
except AttributeError:
# serial_for_url not supported; use fallback
self._ser = serial.Serial(port, baud, timeout=timeout,
interCharTimeout=inter_char_timeout)
self._lock = PortLock()
def write(self, *args):
r"""Writes a command to the port.
This function accepts either a BinaryCommand object, a set
of integer arguments, a list of integers, or a string.
If passed integer arguments or a list of integers, those
integers must be in the same order as would be passed to the
BinaryCommand constructor (ie. device number, then command
number, then data, and then an optional message ID).
Args:
*args: A BinaryCommand to be sent, or between 2 and 4
integer arguments, or a list containing between 2 and
4 integers, or a string representing a
properly-formatted Binary command.
Notes:
Passing integers or a list of integers is equivalent to
passing a BinaryCommand with those integers as constructor
arguments.
For example, all of the following are equivalent::
>>> write(BinaryCommand(1, 55, 1000))
>>> write(1, 55, 1000)
>>> write([1, 55, 1000])
>>> write(struct.pack("<2Bl", 1, 55, 1000))
>>> write('\x01\x37\xe8\x03\x00\x00')
Raises:
TypeError: The arguments passed to write() did not conform
to the specification of ``*args`` above.
ValueError: A string of length other than 6 was passed.
"""
if len(args) == 1:
message = args[0]
if isinstance(message, list):
message = BinaryCommand(*message)
elif 1 < len(args) < 5:
message = BinaryCommand(*args) # pylint: disable=E1120
else:
raise TypeError("write() takes at least 1 and no more than 4 "
"arguments ({0:d} given)".format(len(args)))
if isinstance(message, str):
logger.debug("> %s", message)
if len(message) != MESSAGE_LENGTH:
raise ValueError("write of a string expects length 6.")
# pyserial doesn't handle hex strings.
if sys.version_info > (3, 0):
data = bytes(message, "UTF-8")
else:
data = bytes(message)
elif isinstance(message, BinaryCommand):
data = message.encode()
logger.debug("> %s", message)
else:
raise TypeError("write must be passed several integers, or a "
"string, list, or BinaryCommand.")
with self._lock.write_lock:
self._ser.write(data)
def read(self, message_id=False):
"""Reads six bytes from the port and returns a BinaryReply.
Args:
message_id: True if the response is expected to have a
message ID. Defaults to False.
Returns:
A BinaryCommand containing all of the information read from
the serial port.
Raises:
zaber.serial.TimeoutError: No data was read before the
specified timeout elapsed.
"""
with self._lock.read_lock:
reply = self._ser.read(MESSAGE_LENGTH)
if len(reply) != MESSAGE_LENGTH:
logger.debug("< Receive timeout!")
raise TimeoutError("read timed out.")
parsed_reply = BinaryReply(reply, message_id)
logger.debug("< %s", parsed_reply)
return parsed_reply
def flush(self):
"""Flushes the buffers of the underlying serial port."""
with self._lock.write_lock:
self._ser.flush()
def can_read(self):
"""Checks if enough data has been received to read a response, without blocking.
If the return value is True, it means at least six bytes are available
to read from the serial port, so calling read() will not block.
Returns:
True if a response is available to read; False otherwise.
"""
with self._lock.read_lock:
if hasattr(self._ser, "in_waiting"):
return self._ser.in_waiting >= MESSAGE_LENGTH
else:
return self._ser.inWaiting() >= MESSAGE_LENGTH
def open(self):
"""Opens the serial port."""
with self._lock:
self._ser.open()
def close(self):
"""Closes the serial port."""
with self._lock:
self._ser.close()
def __enter__(self):
return self
def __exit__(self, exc_type, exc_value, traceback):
self.close()
@property
def lock(self):
return self._lock
@property
def timeout(self):
"""The number of seconds to wait for input while reading.
The ``timeout`` property accepts floating point numbers for
fractional wait times.
"""
with self._lock:
return self._ser.timeout
@timeout.setter
def timeout(self, value):
with self._lock:
self._ser.timeout = value
@property
def baudrate(self):
"""The baud rate at which to read and write.
The default baud rate for the Binary protocol is 9600. T-Series
devices are only capable of communication at 9600 baud.
A-Series devices can communicate at 115200, 57600, 38400,
19200, and 9600 baud.
Note that this changes the baud rate of the computer on which
this code is running. It does not change the baud rate of
connected devices.
"""
with self._lock:
return self._ser.baudrate
@baudrate.setter
def baudrate(self, value):
if value not in (115200, 57600, 38400, 19200, 9600):
raise ValueError("Invalid baud rate: {:d}. Valid baud rates are "
"115200, 57600, 38400, 19200, and 9600.".format(value))
with self._lock:
self._ser.baudrate = value | zaber.serial | /zaber.serial-0.9.1.tar.gz/zaber.serial-0.9.1/zaber/serial/binaryserial.py | binaryserial.py |
import logging
import struct
# See https://docs.python.org/2/howto/logging.html#configuring-logging-
# for-a-library for info on why we have these two lines here.
logger = logging.getLogger(__name__)
logger.addHandler(logging.NullHandler())
class BinaryCommand(object):
"""Models a single command in Zaber's Binary protocol.
Attributes:
device_number: An integer representing the number (*a.k.a.*
address) of the device to which to send the command. A
device number of 0 indicates the command should be executed
by all devices. 0-255.
command_number: An integer representing the command to be sent
to the device. Command numbers are listed in Zaber's
`Binary Protocol Manual`_. 0-255.
data: The data value to be transmitted with the command.
message_id: The `message ID`_ of the command. 0-255, or None if
not present.
.. _Binary Protocol Manual: http://www.zaber.com/wiki/Manuals/Binary
_Protocol_Manual#Quick_Command_Reference
.. _message ID: http://www.zaber.com/wiki/Manuals/Binary_Protocol_Ma
nual#Set_Message_Id_Mode_-_Cmd_102
"""
def __init__(self, device_number, command_number, data=0,
message_id=None):
"""
Args:
device_number: An integer specifying the number of the
target device to which to send this command. 0-255.
command_number: An integer specifying the command to be
sent. 0-255.
data: An optional integer containing the data value to be
sent with the command. When omitted, *data* will be set
to 0.
message_id: An optional integer specifying a message ID to
give to the message. 0-255, or None if no message ID is
to be used.
Raises:
ValueError: An invalid value was passed.
"""
if device_number < 0 or command_number < 0:
raise ValueError(
"Device and command number must be between 0 and 255."
)
self.device_number = device_number
self.command_number = command_number
self.data = data
if message_id is not None and (message_id < 0 or message_id > 255):
raise ValueError("Message ID must be between 0 and 255.")
self.message_id = message_id
def encode(self):
"""Encodes a 6-byte byte string to be transmitted to a device.
Returns:
A byte string of length 6, formatted according to Zaber's
`Binary Protocol Manual`_.
"""
packed = struct.pack("<2Bl",
self.device_number,
self.command_number,
self.data)
if self.message_id is not None:
packed = packed[:5] + struct.pack("B", self.message_id)
return packed
def __str__(self):
return "[{:d}, {:d}, {:d}]".format(self.device_number,
self.command_number,
self.data) | zaber.serial | /zaber.serial-0.9.1.tar.gz/zaber.serial-0.9.1/zaber/serial/binarycommand.py | binarycommand.py |
import logging
import re
from .utils import isstring
# See https://docs.python.org/2/howto/logging.html#configuring-logging-
# for-a-library for info on why we have these two lines here.
logger = logging.getLogger(__name__)
logger.addHandler(logging.NullHandler())
class AsciiReply(object):
"""Models a single reply in Zaber's ASCII protocol.
Attributes:
message_type: A string of length 1 containing either '@', '!',
or '#', depending on whether the message type was a
"reply", "alert", or "info", respectively. Most messages
received from Zaber devices are of type "reply", or '@'.
device_address: An integer between 1 and 99 representing the
address of the device from which the reply was sent.
axis_number: An integer between 0 and 9 representing the axis
from which the reply was sent. An axis number of 0
represents a reply received from the device as a whole.
message_id: An integer between 0 and 255 if present, or None
otherwise.
reply_flag: A string of length two, containing either "OK" or
"RJ", depending on whether the command was accepted or
rejected by the device. Value will be None for device replies
that do not have a reply flag, such as info and alert messages.
device_status: A string of length 4, containing either "BUSY"
or "IDLE", depending on whether the device is moving or
stationary.
warning_flag: A string of length 2, usually "--". If it is not
"--", it will be one of the two-letter warning flags
described in the `Warning Flags section`_ of the Ascii
Protocol Manual.
data: A string containing the response data.
checksum: A string of length 2 containing two characters
representing a hexadecimal checksum, or None if a checksum
was not found in the reply.
.. _Warning Flags section: http://www.zaber.com/wiki/Manuals/ASCII_
Protocol_Manual#Warning_Flags
"""
def __init__(self, reply_string):
"""
Args:
reply_string: A string in one of the formats described in
Zaber's `Ascii Protocol Manual`_. It will be parsed by
this constructor in order to populate the attributes of
the new AsciiReply.
Raises:
TypeError: The reply_string is not a string.
ValueError: The string could not be parsed.
.. _Ascii Protocol Manual: http://www.zaber.com/wiki/Manuals/AS
CII_Protocol_Manual
"""
if not isstring(reply_string):
raise TypeError("reply_string must be a string.")
reply_string = reply_string.strip("\r\n")
if len(reply_string) < 5:
raise ValueError("Reply string too short to be a valid reply.")
# CHECK CHECKSUM
reply_string = self._strip_checksum(reply_string)
# SET ATTRIBUTES
self.message_type = reply_string[0]
self.device_address = None
self.axis_number = None
self.message_id = None
self.reply_flag = None
self.device_status = None
self.warning_flag = None
self.data = None
# @ is the "Reply" type
if '@' == self.message_type:
match = re.match(r"@(\d+)\s(\d+)\s(?:(\d+)\s)?(\S+)\s(\S+)\s(\S+)\s(.+)", reply_string)
if not match:
raise ValueError("Failed to parse reply: {}".format(reply_string))
self.device_address = int(match.group(1))
self.axis_number = int(match.group(2))
if match.group(3) is not None:
self.message_id = int(match.group(3))
self.reply_flag = match.group(4)
self.device_status = match.group(5)
self.warning_flag = match.group(6)
self.data = match.group(7) or ""
# # is the "Info" type
elif '#' == self.message_type:
match = re.match(r"#(\d+)\s(\d+)\s(?:(\d+)\s)?(.*)", reply_string)
if not match:
raise ValueError("Failed to parse info message: {}".format(reply_string))
self.device_address = int(match.group(1))
self.axis_number = int(match.group(2))
if match.group(3) is not None:
self.message_id = int(match.group(3))
self.data = match.group(4) or ""
# ! is the "Alert" type
elif '!' == self.message_type:
match = re.match(r"!(\d+)\s(\d+)\s(\S+)\s(\S+)(?:\s(.*))?", reply_string)
if not match:
raise ValueError("Failed to parse alert: {}".format(reply_string))
self.device_address = int(match.group(1))
self.axis_number = int(match.group(2))
self.device_status = match.group(3)
self.warning_flag = match.group(4)
self.data = match.group(5) or ""
else:
raise ValueError("Invalid response type: {}".format(self.message_type))
def _strip_checksum(self, reply_string):
# Any message type could have a checksum.
if reply_string[-3] == ':':
self.checksum = reply_string[-2:]
reply_string = reply_string[:-3]
# Test checksum
charsum = 0
for char in reply_string[1:]:
try:
charsum += ord(char)
except TypeError:
charsum += char # bytes() elements are ints.
# Truncate to last byte and XOR + 1, as per the LRC.
# Convert to HEX but keep only last 2 digits, left padded by 0's
correct_checksum = "{:02X}".format(((charsum & 0xFF) ^ 0xFF) + 1)[-2:]
if self.checksum != correct_checksum:
raise ValueError(
"Checksum incorrect. Found {:s}, expected {:s}. Possible "
"data corruption detected.".format(self.checksum,
correct_checksum)
)
else:
self.checksum = None
return reply_string
def encode(self):
"""Encodes the AsciiReply's attributes back into a valid string
resembling the string which would have created the AsciiReply.
Returns:
A string in the format described in Zaber's `Ascii Protocol
Manual`_.
.. _Ascii Protocol Manual: http://www.zaber.com/wiki/Manuals/AS
CII_Protocol_Manual
"""
retstr = ""
if self.message_type == '@':
if self.message_id is None:
retstr = "@{:02d} {:d} {:s} {:s} {:s} {:s}".format(
self.device_address,
self.axis_number,
self.reply_flag,
self.device_status,
self.warning_flag,
self.data
)
else:
retstr = "@{:02d} {:d} {:02d} {:s} {:s} {:s} {:s}".format(
self.device_address,
self.axis_number,
self.message_id,
self.reply_flag,
self.device_status,
self.warning_flag,
self.data
)
elif self.message_type == '#':
if self.message_id is None:
retstr = "#{:02d} {:d} {:s}".format(self.device_address,
self.axis_number,
self.data)
else:
retstr = "#{:02d} {:d} {:02d} {:s}".format(self.device_address,
self.axis_number,
self.message_id,
self.data)
elif self.message_type == '!':
if self.message_id is None:
retstr = "!{:02d} {:d} {:s} {:s}".format(self.device_address,
self.axis_number,
self.device_status,
self.warning_flag)
else:
retstr = "!{:02d} {:d} {:02d} {:s} {:s}".format(
self.device_address,
self.axis_number,
self.message_id,
self.device_status,
self.warning_flag
)
if self.checksum is not None:
return "{:s}:{:s}\r\n".format(retstr, self.checksum)
else:
return "{:s}\r\n".format(retstr)
def __str__(self):
"""Returns a reply string resembling the string which would have
created this AsciiReply.
Returns:
The same string as is returned by encode().
"""
return self.encode() | zaber.serial | /zaber.serial-0.9.1.tar.gz/zaber.serial-0.9.1/zaber/serial/asciireply.py | asciireply.py |
from .asciilockstepinfo import AsciiLockstepInfo
from .asciimovementmixin import AsciiMovementMixin
from .utils import isstring
class AsciiLockstep(AsciiMovementMixin):
"""Represents an lockstep group of particular device (AsciiDevice).
Allows to setup and control lockstep (synchronized movement of axes).
Attributes:
device: The AsciiDevice of this lockstep group.
lockstep_group: The lockstep group number of the device.
.. lockstep section: http://www.zaber.com/wiki/Manuals/ASCII_
Protocol_Manual#lockstep
"""
def __init__(self, device, lockstep_group=1):
"""Constructs object allowing to setup lockstep (synchronized movement of axes).
Controls movement of the lockstep.
Requires instance of AsciiDevice on which lockstep is performed.
Args:
device: An AsciiDevice instance on which lockstep is performed.
lockstep_group: An integer representing the lockstep group of this
device. It must be greater or equal to 1.
Different devices may support different number of lockstep groups.
Defaults to lockstep group 1.
Raises:
ValueError: The lockstep_group was not greater or equal to 1.
.. lockstep section: http://www.zaber.com/wiki/Manuals/ASCII_
Protocol_Manual#lockstep
"""
AsciiMovementMixin.__init__(self)
if lockstep_group < 1:
raise ValueError("lockstep_group must be greater or equal to 1.")
self.device = device
self.lockstep_group = lockstep_group
def disable(self):
"""Disables this lockstep group. Allows participating axes to move independently again.
Returns:
An AsciiReply containing the reply received.
"""
reply = self.send("setup disable")
return reply
def enable(self, axis1=1, axis2=2):
"""Enables this lockstep group and sets up axes participating in lockstep group.
After calling this function axes will move together maintaining offset from
time of this call.
Future movement must be performed using this lockstep group (this instance).
Movement commands sent directly to axis or device won't be performed.
Args:
axis1: An integer between 1 and 9 representing the first device axis
which participates in lockstep group. Defaults to first axis of the device.
axis2: An integer between 1 and 9 representing the second device axis
which participates in lockstep group. Defaults to second axis of the device.
Returns:
An AsciiReply containing the reply received.
.. lockstep section: http://www.zaber.com/wiki/Manuals/ASCII_
Protocol_Manual#lockstep
"""
reply = self.send("setup enable {} {}".format(axis1, axis2))
return reply
def info(self):
"""Queries lockstep group's state returning AsciiLockstepInfo.
Observe AsciiLockstepInfo.is_enabled to determine whether lockstep is enabled for this lockstep group.
Observe AsciiLockstepInfo.twist to find out whether axis are in perfect sync.
Returns:
An AsciiLockstepInfo containing the state of this lockstep group.
"""
reply = self.send("info")
return AsciiLockstepInfo(reply.data)
def poll_until_idle(self):
"""Polls the lockstep group's status, blocking until it is idle.
Returns:
An AsciiReply containing the last reply received.
"""
info = self.info()
# it is sufficient to poll just one axis
return self.device.poll_until_idle(axis_number=info.axis1)
def get_status(self):
"""Queries the lockstep for its status and returns the result.
Returns:
A string containing either "BUSY" or "IDLE", depending on
the response received from the device.
"""
info = self.info()
# it is sufficient to query just one axis
return self.device.get_status(axis_number=info.axis1)
def send(self, message):
"""Sends a raw message to this lockstep group, then waits for a reply.
It is preferred to use functions as e.g. "enable" or "move_abs" to perform commands
rather than sending raw messages using this function.
Args:
message: A string representing the message
to be sent to the lockstep group.
Raises:
UnexpectedReplyError: The reply received was not sent by
the expected device.
TypeError: The message is not a string.
Returns:
An AsciiReply containing the reply received.
"""
if not isstring(message):
raise TypeError("message must be a string.")
return self.device.send("lockstep {} {}".format(self.lockstep_group, message)) | zaber.serial | /zaber.serial-0.9.1.tar.gz/zaber.serial-0.9.1/zaber/serial/asciilockstep.py | asciilockstep.py |
import logging
from .binarycommand import BinaryCommand
from .unexpectedreplyerror import UnexpectedReplyError
# See https://docs.python.org/2/howto/logging.html#configuring-logging-
# for-a-library for info on why we have these two lines here.
logger = logging.getLogger(__name__)
logger.addHandler(logging.NullHandler())
class BinaryDevice(object):
"""A class to represent a Zaber device in the Binary protocol. It is safe
to use in multi-threaded environments.
Attributes:
port: A BinarySerial object which represents the port to which
this device is connected.
number: The integer number of this device. 1-255.
"""
def __init__(self, port, number):
"""
Args:
port: A BinarySerial object to use as a parent port.
number: An integer between 1 and 255 which is the number of
this device.
Raises:
ValueError: The device number was invalid.
"""
if number > 255 or number < 1:
raise ValueError("Device number must be 1-255.")
self.number = number
self.port = port
def send(self, *args):
"""Sends a command to this device, then waits for a response.
Args:
*args: Either a single BinaryCommand, or 1-3 integers
specifying, in order, the command number, data value,
and message ID of the command to be sent.
Notes:
The ability to pass integers to this function is provided
as a convenience to the programmer. Calling
``device.send(2)`` is equivalent to calling
``device.send(BinaryCommand(device.number, 2))``.
Note that in the Binary protocol, devices will only reply
once they have completed a command. Since this function
waits for a reply from the device, this function may block
for a long time while it waits for a response. For the same
reason, it is important to set the timeout of this device's
parent port to a value sufficiently high that any command
sent will be completed within the timeout.
Regardless of the device address specified to this function,
the device number of the transmitted command will be
overwritten with the number of this device.
If the command has a message ID set, this function will return
a reply with a message ID, after checking whether the message
IDs match.
Raises:
UnexpectedReplyError: The reply read was not sent by this
device or the message ID of the reply (if in use) did not
match the message ID of the command.
Returns: A BinaryReply containing the reply received.
"""
if len(args) == 1 and isinstance(args[0], BinaryCommand):
command = args[0]
elif len(args) < 4:
command = BinaryCommand(self.number, *args) # pylint: disable=E1120
command.device_number = self.number
with self.port.lock:
self.port.write(command)
reply = self.port.read(command.message_id is not None)
if ((reply.device_number != self.number) or
((reply.message_id or 0) != (command.message_id or 0))):
raise UnexpectedReplyError(
"Received an unexpected reply from device number {0:d}".format(
reply.device_number
),
reply
)
return reply
def home(self):
"""Sends the "home" command (1), then waits for the device to
reply.
Returns: A BinaryReply containing the reply received.
"""
return self.send(1)
def move_abs(self, position):
"""Sends the "move absolute" command (20), then waits for the
device to reply.
Args:
position: The position in microsteps to which to move.
Returns: A BinaryReply containing the reply received.
"""
return self.send(20, position)
def move_rel(self, distance):
"""Sends the "move relative" command (21), then waits for the
device to reply.
Args:
distance: The distance in microsteps to which to move.
Returns: A BinaryReply containing the reply received.
"""
return self.send(21, distance)
def move_vel(self, speed):
"""Sends the "move at constant speed" command (22), then waits
for the device to reply.
Args:
speed: An integer representing the speed at which to move.
Notes:
Unlike the other "move" commands, the device replies
immediately to this command. This means that when this
function returns, it is likely that the device is still
moving.
Returns: A BinaryReply containing the reply received.
"""
return self.send(22, speed)
def stop(self):
"""Sends the "stop" command (23), then waits for the device to
reply.
Returns: A BinaryReply containing the reply received.
"""
return self.send(23)
def get_status(self):
"""Sends the "Return Status" command (54), and returns the
result.
Returns:
An integer representing a `status code`_, according to
Zaber's Binary Protocol Manual.
.. _status code: http://www.zaber.com/wiki/Manuals/Binary_Protoc
ol_Manual#Return_Status_-_Cmd_54
"""
return self.send(54).data
def get_position(self):
"""Sends the "Return Current Position" command (60), and returns the
result.
Returns:
An integer representing the device's current position, it its
native units of measure - see the device manual for unit conversions.
"""
return self.send(60).data | zaber.serial | /zaber.serial-0.9.1.tar.gz/zaber.serial-0.9.1/zaber/serial/binarydevice.py | binarydevice.py |
import logging
import serial
from .asciicommand import AsciiCommand
from .asciireply import AsciiReply
from .timeouterror import TimeoutError
from .portlock import PortLock
# See https://docs.python.org/2/howto/logging.html#configuring-logging-
# for-a-library for info on why we have these two lines here.
logger = logging.getLogger(__name__)
logger.addHandler(logging.NullHandler())
class AsciiSerial(object):
"""A class for interacting with Zaber devices using the ASCII protocol. It
is safe to use in multi-threaded environments.
Attributes:
baudrate: An integer representing the desired communication
baud rate. Valid bauds are 115200, 57600, 38400, 19200, and
9600.
timeout: A number representing the number of seconds to wait
for input before timing out. Floating-point numbers can be
used to specify times shorter than one second. A value of
None can also be used to specify an infinite timeout. A
value of 0 specifies that all reads and writes should be
non-blocking (return immediately without waiting). Defaults
to 5.
lock: The threading.RLock guarding the port. Each method takes the lock
and is therefore thread safe. However, to ensure no other threads
access the port across multiple method calls, the caller should
acquire the lock.
"""
def __init__(self, port, baud=115200, timeout=5, inter_char_timeout=0.5):
"""
Args:
port: A string containing the name or URL of the serial port to
which to connect.
baud: An integer representing the baud rate at which to
communicate over the serial port.
timeout: A number representing the number of seconds to wait
for a reply. Fractional numbers are accepted and can be
used to specify times shorter than a second.
inter_char_timeout : A number representing the number of seconds
to wait between bytes in a reply. If your computer is bad at
reading incoming serial data in a timely fashion, try
increasing this value.
Notes:
When *port* is not None, this constructor immediately
opens the serial port. There is no need to call open()
after creating this object, unless you passed None as
*port*.
Raises:
ValueError: An invalid baud rate was specified.
"""
if not isinstance(port, str):
raise TypeError("port must be a string.")
try:
self._ser = serial.serial_for_url(port, do_not_open=True)
self._ser.baudrate = baud
self._ser.timeout = timeout
self._ser.interCharTimeout = inter_char_timeout
self._ser.open()
except AttributeError:
# serial_for_url not supported; use fallback
self._ser = serial.Serial(port, baud, timeout=timeout,
interCharTimeout=inter_char_timeout)
self._lock = PortLock()
def write(self, command):
"""Writes a command to the serial port.
Args:
command: A string or AsciiCommand representing a command
to be sent.
"""
if isinstance(command, (str, bytes)):
command = AsciiCommand(command)
if not isinstance(command, AsciiCommand):
raise TypeError("write must be passed a string or AsciiCommand.")
logger.debug("> %s", command)
# From "Porting Python 2 Code to Python 3":
# "...when you receive text in binary data, you should
# immediately decode it. And if your code needs to send text as
# binary data then encode it as late as possible.
# This allows your code to work with only [unicode] text
# internally and thus eliminates having to keep track of what
# type of data you are working with."
# See https://docs.python.org/3/howto/pyporting.html#text-versu
# s-binary-data
with self._lock.write_lock:
self._ser.write(command.encode())
def read(self):
"""Reads a reply from the serial port.
Raises:
zaber.serial.TimeoutError: The duration specified by *timeout*
elapsed before a full reply could be read.
ValueError: The reply read could not be parsed and is
invalid.
Returns:
An `AsciiReply` containing the reply received.
"""
with self._lock.read_lock:
line = self._ser.readline()
if not line:
logger.debug("< Receive timeout!")
raise TimeoutError("read timed out.")
decoded_line = line.decode()
logger.debug("< %s", decoded_line.rstrip("\r\n"))
return AsciiReply(decoded_line)
def can_read(self):
"""Checks if any data has been received by the port, without blocking.
If the return value is True, it means some data is available but
it does not guarantee there is enough to read a complete reply; it's
still possible for the next read call to block waiting for data, and
it's still possible to time out if transmission was interrupted.
Returns:
True if data is available to read; False otherwise.
"""
with self._lock.read_lock:
if hasattr(self._ser, "in_waiting"):
return self._ser.in_waiting > 0
else:
return self._ser.inWaiting() > 0
def flush(self):
"""Flushes the buffers of the underlying serial port."""
with self._lock.write_lock:
self._ser.flush()
def open(self):
"""Opens the serial port."""
with self._lock:
self._ser.open()
def close(self):
"""Closes the serial port."""
with self._lock:
self._ser.close()
def __enter__(self):
return self
def __exit__(self, exc_type, exc_value, traceback):
self.close()
@property
def lock(self):
return self._lock
@property
def timeout(self):
with self._lock:
return self._ser.timeout
@timeout.setter
def timeout(self, value):
with self._lock:
self._ser.timeout = value
@property
def baudrate(self):
with self._lock:
return self._ser.baudrate
@baudrate.setter
def baudrate(self, value):
with self._lock:
if value not in (115200, 57600, 38400, 19200, 9600):
raise ValueError(
"Invalid baud rate: {:d}. Valid baud rates are 115200, "
"57600, 38400, 19200, and 9600.".format(value)
)
self._ser.baudrate = value | zaber.serial | /zaber.serial-0.9.1.tar.gz/zaber.serial-0.9.1/zaber/serial/asciiserial.py | asciiserial.py |
import logging
from .asciicommand import AsciiCommand
from .asciimovementmixin import AsciiMovementMixin
from .unexpectedreplyerror import UnexpectedReplyError
# See https://docs.python.org/2/howto/logging.html#configuring-logging-
# for-a-library for info on why we have these two lines here.
logger = logging.getLogger(__name__)
logger.addHandler(logging.NullHandler())
class AsciiAxis(AsciiMovementMixin):
"""Represents one axis of an ASCII device. It is safe to use in multi-
threaded environments.
Attributes:
parent: An AsciiDevice which represents the device which has
this axis.
number: The number of this axis. 1-9.
"""
def __init__(self, device, number):
"""
Args:
device: An AsciiDevice which is the parent of this axis.
number: The number of this axis. Must be 1-9.
Raises:
ValueError: The axis number was not between 1 and 9.
"""
AsciiMovementMixin.__init__(self)
if number < 1 or number > 9:
raise ValueError("Axis number must be between 1 and 9.")
self.number = number
self.parent = device
def send(self, message):
"""Sends a message to the axis and then waits for a reply.
Args:
message: A string or AsciiCommand object containing a
command to be sent to this axis.
Notes:
Regardless of the device address or axis number supplied in
(or omitted from) the message passed to this function, this
function will always send the command to only this axis.
Though this is intended to make sending commands to a
particular axis easier by allowing the user to pass in a
"global command" (ie. one whose target device and axis are
both 0), this can result in some unexpected behaviour. For
example, if the user tries to call send() with an
AsciiCommand which has a different target axis number than
the number of this axis, they may be surprised to find that
the command was sent to this axis rather than the one
originally specified in the AsciiCommand.
Examples:
Since send() will automatically set (or overwrite) the
target axis and device address of the message, all of the
following calls to send() will result in identical ASCII
messages being sent to the serial port::
>>> axis.send("home")
>>> axis.send(AsciiCommand("home"))
>>> axis.send("0 0 home")
>>> axis.send("4 8 home")
>>> axis.send(AsciiCommand(1, 4, "home"))
Raises:
UnexpectedReplyError: The reply received was not sent by the
expected device and axis.
Returns: An AsciiReply object containing the reply received.
"""
if isinstance(message, (str, bytes)):
message = AsciiCommand(message)
# Always send the AsciiCommand to *this* axis.
message.axis_number = self.number
reply = self.parent.send(message)
if reply.axis_number != self.number:
raise UnexpectedReplyError(
"Received a reply from an unexpected axis: axis {}".format(
reply.axis_number
),
reply
)
return reply
def get_status(self):
"""Queries the axis for its status and returns the result.
Raises:
UnexpectedReplyError: The reply received was not sent by the
expected device and axis.
Returns:
A string containing either "BUSY" or "IDLE", depending on
the response received from the axis.
"""
return self.send("").device_status
def get_position(self):
"""Queries the axis for its position and returns the result.
Raises:
UnexpectedReplyError: The reply received was not sent by the
expected device and axis.
Returns:
A number representing the current device position in its native
units of measure. See the device manual for unit conversions.
"""
return int(self.send("get pos").data)
def poll_until_idle(self):
"""Polls the axis and blocks until the device reports that the
axis is idle.
Raises:
UnexpectedReplyError: The reply received was not sent by the
expected device and axis.
Returns: An AsciiReply object containing the last reply
received.
"""
return self.parent.poll_until_idle(self.number) | zaber.serial | /zaber.serial-0.9.1.tar.gz/zaber.serial-0.9.1/zaber/serial/asciiaxis.py | asciiaxis.py |
import logging
import struct
# See https://docs.python.org/2/howto/logging.html#configuring-logging-
# for-a-library for info on why we have these two lines here.
logger = logging.getLogger(__name__)
logger.addHandler(logging.NullHandler())
class BinaryReply(object):
"""Models a single reply in Zaber's Binary protocol.
Attributes:
device_number: The number of the device from which this reply
was sent.
command_number: The number of the command which triggered this
reply.
data: The data value associated with the reply.
message_id: The message ID number, if present, otherwise None.
"""
def __init__(self, reply, message_id=False):
"""
Args:
reply: A byte string of length 6 containing a binary reply
encoded according to Zaber's Binary Protocol Manual.
message_id: True if a message ID should be extracted from
the reply, False if not.
Notes:
Because a Binary reply's message ID truncates the last byte
of the data value of the reply, it is impossible to tell
whether a reply contains a message ID or not. Therefore, the
user must specify whether or not a message ID should be
assumed to be present.
Raises:
TypeError: An invalid type was passed as *reply*. This may
indicate that a unicode string was passed instead of a
binary (ascii) string.
"""
if isinstance(reply, bytes):
self.device_number, self.command_number, self.data = \
struct.unpack("<2Bl", reply)
if message_id:
# Use bitmasks to extract the message ID.
self.message_id = (self.data & 0xFF000000) >> 24
self.data = self.data & 0x00FFFFFF
# Sign extend 24 to 32 bits in the message ID case.
# If the data is more than 24 bits it will still be wrong,
# but now negative smaller values will be right.
if 0 != (self.data & 0x00800000):
self.data = (int)((self.data | 0xFF000000) - (1 << 32))
else:
self.message_id = None
elif isinstance(reply, list):
# Assume a 4th element is a message ID.
if len(reply) > 3:
message_id = True
self.device_number = reply[0]
self.command_number = reply[1]
self.data = reply[2]
self.message_id = reply[3] if message_id else None
else:
raise TypeError("BinaryReply must be passed a byte string "
"('bytes' type) or a list.")
def encode(self):
"""Returns the reply as a binary string, in the form in which it
would appear if it had been read from the serial port.
Returns:
A byte string of length 6 formatted according to the Binary
Protocol Manual.
"""
return struct.pack("<2Bl",
self.device_number,
self.command_number,
self.data)
def __str__(self):
return "[{:d}, {:d}, {:d}]".format(self.device_number,
self.command_number,
self.data) | zaber.serial | /zaber.serial-0.9.1.tar.gz/zaber.serial-0.9.1/zaber/serial/binaryreply.py | binaryreply.py |
class AsciiMovementMixin(object):
"""Provides mixin giving ability to move device, axis or lockstep.
"""
def home(self):
"""Sends the "home" command, then polls the device or axis until it is
idle.
Raises:
UnexpectedReplyError: The reply received was not sent by the
expected device or axis.
Returns:
An AsciiReply containing the first reply received.
"""
reply = self.send("home")
self.poll_until_idle()
return reply
def move_abs(self, position, blocking=True):
"""Sends the "move abs" command to the device or axis to move it to the
specified position, then polls the device until it is idle.
Args:
position: An integer representing the position in
microsteps to which to move the device.
blocking: An optional boolean, True by default. If set to
False, this function will return immediately after
receiving a reply from the device and it will not poll
the device further.
Raises:
UnexpectedReplyError: The reply received was not sent by
the expected device or axis.
Returns:
An AsciiReply containing the first reply received.
"""
reply = self.send("move abs {0:d}".format(position))
if blocking:
self.poll_until_idle()
return reply
def move_rel(self, distance, blocking=True):
"""Sends the "move rel" command to the device or axis to move it by the
specified distance, then polls the device until it is idle.
Args:
distance: An integer representing the number of microsteps
by which to move the device.
blocking: An optional boolean, True by default. If set to
False, this function will return immediately after
receiving a reply from the device, and it will not poll
the device further.
Raises:
UnexpectedReplyError: The reply received was not sent by
the expected device or axis.
Returns:
An AsciiReply containing the first reply received.
"""
reply = self.send("move rel {0:d}".format(distance))
if blocking:
self.poll_until_idle()
return reply
def move_vel(self, speed, blocking=False):
"""Sends the "move vel" command to make the device or axis move at the
specified speed.
Args:
speed: An integer representing the speed at which to move
the device.
blocking: An optional boolean, False by default. If set to
True, this function will poll the device repeatedly
until it reports that it is idle.
Notes:
Unlike the other two move commands, move_vel() does not by
default poll the device until it is idle. move_vel() will
return immediately after receiving a response from the
device unless the "blocking" argument is set to True.
Raises:
UnexpectedReplyError: The reply received was not sent by
the expected device or axis.
Returns:
An AsciiReply containing the first reply received.
"""
reply = self.send("move vel {0:d}".format(speed))
if blocking:
self.poll_until_idle()
return reply
def stop(self):
"""Sends the "stop" command to the device or axis.
Notes:
The stop command can be used to pre-empt any movement
command in order to stop the device early.
Raises:
UnexpectedReplyError: The reply received was not sent by
the expected device or axis.
Returns:
An AsciiReply containing the first reply received.
"""
reply = self.send("stop")
self.poll_until_idle()
return reply | zaber.serial | /zaber.serial-0.9.1.tar.gz/zaber.serial-0.9.1/zaber/serial/asciimovementmixin.py | asciimovementmixin.py |
import os
import click
from zabier import commands
from zabier.zabbix import configure
@click.group(invoke_without_command=True)
@click.option(
'-H', '--host',
required=False,
type=str,
default=os.environ.get('ZABBIX_HOST', ''))
@click.option(
'-u', '--user',
required=False,
type=str,
default=os.environ.get('ZABBIX_USER', ''))
@click.option(
'-p', '--password',
required=False,
type=str,
default=os.environ.get('ZABBIX_PASSWORD', ''))
@click.pass_context
def main(ctx: click.Context, host: str, user: str, password: str):
if ctx.invoked_subcommand is None:
click.echo(ctx.get_help())
else:
configure(host, user, password)
@main.command(help='Apply the host group')
@click.option('-n', '--name', required=True, type=str)
@click.option('--dry-run', is_flag=True)
def hostgroup(name: str, dry_run: bool):
commands.hostgroup(name, dry_run)
@main.command(help='Apply the action')
@click.option('-n', '--name', required=True, type=str)
@click.option('-f', '--file', required=True, type=str)
@click.option('--dry-run', is_flag=True)
def action(name: str, file: str, dry_run: bool):
commands.action(name, file, dry_run)
@main.command(help='Apply the template')
@click.option('-n', '--name', required=True, type=str)
@click.option('-f', '--file', required=True, type=str)
@click.option('--dry-run', is_flag=True)
@click.option('--export', is_flag=True)
def template(name: str, file: str, dry_run: bool, export: bool):
commands.template(name, file, dry_run, export)
@main.command(help='Apply the host')
@click.option('-n', '--name', required=True, type=str)
@click.option('-f', '--file', required=True, type=str)
@click.option('--dry-run', is_flag=True)
@click.option('--export', is_flag=True)
@click.option('--append-only', is_flag=True)
def host(name: str, file: str, dry_run: bool, export: bool, append_only: bool):
commands.host(name, file, dry_run, export, append_only)
@main.command(help='Apply the maintenace')
@click.option('-n', '--name', required=True, type=str)
@click.option('-f', '--file', required=True, type=str)
@click.option('--dry-run', is_flag=True)
def maintenance(name: str, file: str, dry_run: bool):
commands.maintenance(name, file, dry_run) | zabier-cli | /zabier-cli-0.2.0.tar.gz/zabier-cli-0.2.0/zabier/cli.py | cli.py |
import json
from dataclasses import dataclass
from typing import Dict, Optional
from zabier.zabbix.base import ZabbixBase
@dataclass
class Host:
hostid: Optional[str]
name: str
class HostMixin(ZabbixBase):
def get_host_by_name(self, name: str) -> Optional[Host]:
response: Dict = self.do_request(
'host.get',
{
'filter': {
'name': [name]
},
'editable': True,
'startSearch': True,
'searchByAny': True
}
)
if len(response['result']) == 0:
return None
host = response['result'].pop()
return Host(hostid=host['hostid'], name=host['name'])
def import_host_configuration(self,
config: Dict,
append_only: bool) -> bool:
rules = {
'hosts': {
'createMissing': True,
'updateExisting': True
},
'applications': {
'createMissing': True,
'updateExisting': True,
'deleteMissing': True
},
'discoveryRules': {
'createMissing': True,
'updateExisting': True,
'deleteMissing': True
},
'graphs': {
'createMissing': True,
'updateExisting': True,
'deleteMissing': True
},
'items': {
'createMissing': True,
'updateExisting': True,
'deleteMissing': True
},
'templateLinkage': {
'createMissing': True
},
'templateScreens': {
'createMissing': True,
'updateExisting': True,
'deleteMissing': True
},
'triggers': {
'createMissing': True,
'updateExisting': True,
'deleteMissing': True
},
'valueMaps': {
'createMissing': True,
'updateExisting': True
}
}
if append_only:
rules = {
'hosts': {
'createMissing': True
},
'applications': {
'createMissing': True
},
'discoveryRules': {
'createMissing': True
},
'graphs': {
'createMissing': True
},
'items': {
'createMissing': True
},
'templateLinkage': {
'createMissing': True
},
'templateScreens': {
'createMissing': True
},
'triggers': {
'createMissing': True
},
'valueMaps': {
'createMissing': True
}
}
response: Dict = self.do_request(
'configuration.import',
{
'format': 'json',
'rules': rules,
'source': json.dumps(config)
}
)
return response['result']
def export_host_configuration(self, host_id: str) -> bool:
response: Dict = self.do_request(
'configuration.export',
{
'format': 'json',
'options': {
'hosts': [host_id]
}
}
)
return response['result'] | zabier-cli | /zabier-cli-0.2.0.tar.gz/zabier-cli-0.2.0/zabier/zabbix/host.py | host.py |
from dataclasses import asdict, dataclass
from typing import Dict, List, Optional
from dacite import from_dict
from zabier.zabbix.base import ZabbixBase
@dataclass
class Maintenance:
maintenanceid: Optional[str]
name: str
maintenance_type: str
description: str
active_since: str
active_till: str
groups: List[Dict]
hosts: List[Dict]
timeperiods: List[Dict]
class MaintenanceMixin(ZabbixBase):
def get_maintenance_by_name(self, name: str) -> Optional[Maintenance]:
response: Dict = self.do_request(
'maintenance.get',
{
'filter': {
'name': [name]
},
'editable': True,
'startSearch': True,
'searchByAny': True,
'selectGroups': 'extend',
'selectHosts': 'extend',
'selectTimeperiods': 'extend'
}
)
if len(response['result']) == 0:
return None
maintenance = response['result'].pop()
groups = []
hosts = []
for group in maintenance['groups']:
groups.append({
'groupid': group['groupid'],
'name': group['name']})
for host in maintenance['hosts']:
hosts.append({
'hostid': host['hostid'],
'name': host['name']})
maintenance['groups'] = groups
maintenance['hosts'] = hosts
return from_dict(data_class=Maintenance, data=maintenance)
def create_maintenance(self, maintenance: Maintenance) -> int:
maintenance_dict = self._maintenance_to_api_dict(maintenance)
del maintenance_dict['maintenanceid']
response: Dict = self.do_request(
'maintenance.create',
maintenance_dict)
return response['result']['maintenanceids'].pop()
def update_maintenance(self, maintenance: Maintenance) -> int:
maintenance_dict = self._maintenance_to_api_dict(maintenance)
response: Dict = self.do_request(
'maintenance.update',
maintenance_dict)
return response['result']['maintenanceids'].pop()
def _maintenance_to_api_dict(self, maintenance: Maintenance) -> Dict:
maintenance_dict = asdict(maintenance)
groupids = []
hostids = []
for group in maintenance_dict.get('groups', []):
groupids.append(group['groupid'])
for host in maintenance_dict.get('hosts', []):
hostids.append(host['hostid'])
del maintenance_dict['groups']
del maintenance_dict['hosts']
maintenance_dict['groupids'] = groupids
maintenance_dict['hostids'] = hostids
return maintenance_dict | zabier-cli | /zabier-cli-0.2.0.tar.gz/zabier-cli-0.2.0/zabier/zabbix/maintenance.py | maintenance.py |
import json
from dataclasses import dataclass
from typing import Dict, Optional
from zabier.zabbix.base import ZabbixBase
@dataclass
class Template:
templateid: Optional[str]
host: str
name: str
description: str
class TemplateMixin(ZabbixBase):
def get_template_by_name(self, name: str) -> Optional[Template]:
response: Dict = self.do_request(
'template.get',
{
'filter': {
'name': [name]
},
'editable': True,
'startSearch': True,
'searchByAny': True
}
)
if len(response['result']) == 0:
return None
template = response['result'].pop()
return Template(
templateid=template['templateid'],
host=template['host'],
name=template['name'],
description=template['description'])
def import_template_configuration(self,
config: Dict) -> bool:
response: Dict = self.do_request(
'configuration.import',
{
'format': 'json',
'rules': {
'templates': {
'createMissing': True,
'updateExisting': True
},
'applications': {
'createMissing': True,
'updateExisting': True,
'deleteMissing': True
},
'discoveryRules': {
'createMissing': True,
'updateExisting': True,
'deleteMissing': True
},
'graphs': {
'createMissing': True,
'updateExisting': True,
'deleteMissing': True
},
'items': {
'createMissing': True,
'updateExisting': True,
'deleteMissing': True
},
'templateLinkage': {
'createMissing': True
},
'templateScreens': {
'createMissing': True,
'updateExisting': True,
'deleteMissing': True
},
'triggers': {
'createMissing': True,
'updateExisting': True,
'deleteMissing': True
},
'valueMaps': {
'createMissing': True,
'updateExisting': True
}
},
'source': json.dumps(config)
}
)
return response['result']
def export_template_configuration(self, template_id: str) -> bool:
response: Dict = self.do_request(
'configuration.export',
{
'format': 'json',
'options': {
'templates': [template_id]
}
}
)
return response['result'] | zabier-cli | /zabier-cli-0.2.0.tar.gz/zabier-cli-0.2.0/zabier/zabbix/template.py | template.py |
import json
from dataclasses import asdict, dataclass
from typing import Dict, IO
import click
from dacite import from_dict
from datadiff import diff
import yaml
from zabier.zabbix import constants
from zabier.zabbix import get_client
from zabier.zabbix.action import Action
from zabier.zabbix.hostgroup import HostGroup
from zabier.zabbix.maintenance import Maintenance
def hostgroup(name: str, dry_run: bool):
zabbix = get_client()
if zabbix.host_group_exists(name):
click.echo(f'Host group "{name}" is already exists.')
else:
hostgroup = HostGroup(groupid=None, name=name)
if dry_run:
click.echo(f'The new host group will be created.')
else:
groupid = zabbix.create_host_group(hostgroup)
click.echo(f'Host group created. ID: {groupid}')
def action(name: str, file: str, dry_run: bool):
zabbix = get_client()
action = zabbix.get_action_by_name(name)
action_def = _load_config_file(file)
action_config = action_def.get('config', {})
if 'auto_registration_config' in action_def:
action_config = action_def['auto_registration_config']
action_config = _refine_auto_registration_config(name, action_config)
if action is None:
new_action = from_dict(data_class=Action, data=action_config)
if dry_run:
click.echo(f'The new action will be created.')
else:
actionid = zabbix.create_action(new_action)
click.echo(f'Action created. ID: {actionid}')
else:
remote_action = action
local_action = from_dict(data_class=Action, data=action_config)
click.echo(_diff_actions(local_action, remote_action))
local_action.actionid = remote_action.actionid
if not dry_run:
actionid = zabbix.update_action(local_action)
click.echo(f'Action updated. ID: {actionid}')
def template(name: str, file: str, dry_run: bool, export: bool):
zabbix = get_client()
template = zabbix.get_template_by_name(name)
if export:
if template is None:
raise click.UsageError(
f'template "{name}" is not exists.')
config = zabbix.export_template_configuration(template.templateid)
open(file, 'w').write(yaml.dump(json.loads(config)))
click.echo(f'Template exported.')
else:
local_config = _load_config_file(file)
if template is not None:
remote_config = zabbix.export_template_configuration(template.templateid)
click.echo(_diff_configs(local_config, json.loads(remote_config)))
if not dry_run:
zabbix.import_template_configuration(local_config)
click.echo(f'Template imported.')
def host(name: str, file: str, dry_run: bool, export: bool, append_only: bool):
zabbix = get_client()
host = zabbix.get_host_by_name(name)
if export:
if host is None:
raise click.UsageError(
f'Host "{name}" is not exists.')
config = zabbix.export_host_configuration(host.hostid)
open(file, 'w').write(yaml.dump(json.loads(config)))
click.echo(f'Host exported.')
else:
local_config = _load_config_file(file)
if host is not None:
remote_config = zabbix.export_host_configuration(host.hostid)
click.echo(_diff_configs(local_config, json.loads(remote_config)))
if not dry_run:
zabbix.import_host_configuration(local_config, append_only)
click.echo(f'Host imported.')
def maintenance(name: str, file: str, dry_run: bool):
zabbix = get_client()
remote_maintenance = zabbix.get_maintenance_by_name(name)
local_maintenance = _load_maintenance_file(name, file)
if remote_maintenance is None:
if not dry_run:
maintenanceid = zabbix.create_maintenance(local_maintenance)
click.echo(f'Maintenance created. ID: {maintenanceid}')
else:
click.echo(
_diff_maintenances(local_maintenance, remote_maintenance))
if not dry_run:
local_maintenance.maintenanceid = remote_maintenance.maintenanceid
maintenanceid = zabbix.update_maintenance(local_maintenance)
click.echo(f'Maintenance updated. ID: {maintenanceid}')
def _diff_maintenances(local: Maintenance, remote: Maintenance):
local = asdict(local)
remote = asdict(remote)
del local['maintenanceid']
del remote['maintenanceid']
return diff(remote, local)
def _load_maintenance_file(name: str, file: IO) -> Dict:
config = yaml.full_load(open(file)).get('maintenance_config')
groups = []
hosts = []
zabbix = get_client()
for group_name in config.get('groups', []):
group = zabbix.get_host_group_by_name(group_name)
if group is None:
raise click.UsageError(
f'Hostgroup "{group_name}" is not exists.')
groups.append({
'groupid': group.groupid,
'name': group.name
})
for host_name in config.get('hosts', []):
host = zabbix.get_host_by_name(host_name)
if host is None:
raise click.UsageError(
f'Host "{host_name}" is not exists.')
hosts.append({
'hostid': host.hostid,
'name': host.name
})
maintenance = Maintenance(
maintenanceid=None,
name=name,
maintenance_type=config.get('maintenance_type', '0'),
description=config.get('description', ''),
active_since=config.get('active_since'),
active_till=config.get('active_till'),
groups=groups,
hosts=hosts,
timeperiods=config.get('timeperiods'))
return maintenance
def _load_config_file(file: IO) -> Dict:
return yaml.full_load(open(file))
def _diff_configs(local: Dict, remote: Dict):
try:
del local['zabbix_export']['date']
del remote['zabbix_export']['date']
except KeyError:
pass
return diff(remote, local)
def _diff_actions(local: dataclass, remote: dataclass):
local = asdict(local)
remote = asdict(remote)
try:
del local['actionid']
del remote['actionid']
del remote['filter']['eval_formula']
for op in remote['operations']:
del op['actionid']
del op['operationid']
for key in ['optemplate', 'opgroup']:
if key in op:
for op2 in op[key]:
del op2['operationid']
except (KeyError, TypeError):
pass
return diff(remote, local)
def _refine_auto_registration_config(name: str, config: Dict) -> Dict:
zabbix = get_client()
try:
hostgroup_name = config['hostgroup']
hostgroup = zabbix.get_host_group_by_name(hostgroup_name)
if hostgroup is None:
raise click.UsageError(f'HostGroup "{hostgroup_name}" is not exists.')
t_names = config['templates']
templates = []
for t_name in t_names:
template = zabbix.get_template_by_name(t_name)
if template is None:
raise click.UsageError(f'Template "{t_name}" is not exists.')
templates.append({'templateid': str(template.templateid)})
host_status_operationtype = constants.OPERATION_TYPE_ENABLE_HOST
if config['disable_host']:
host_status_operationtype = constants.OPERATION_TYPE_DISABLE_HOST
refined_config = {
'esc_period': '0',
'eventsource': constants.EVENTSOURCE_AUTO_REGISTERED_HOST,
'name': name,
'status': config['status'],
'operations': [
{
'esc_period': '0',
'esc_step_from': '1',
'esc_step_to': '1',
'evaltype': '0',
'opconditions': [],
'operationtype': constants.OPERATION_TYPE_ADD_HOST
},
{
'esc_period': '0',
'esc_step_from': '1',
'esc_step_to': '1',
'evaltype': '0',
'opconditions': [],
'operationtype': constants.OPERATION_TYPE_ADD_TO_HOSTGROUP,
'opgroup': [
{'groupid': str(hostgroup.groupid)}
]
},
{
'esc_period': '0',
'esc_step_from': '1',
'esc_step_to': '1',
'evaltype': '0',
'opconditions': [],
'operationtype': constants.OPERATION_TYPE_LINK_TO_TEMPLATE,
'optemplate': templates
},
{
'esc_period': '0',
'esc_step_from': '1',
'esc_step_to': '1',
'evaltype': '0',
'opconditions': [],
'operationtype': host_status_operationtype
}
],
'filter': {
'conditions': [
{
'conditiontype': constants.CONDITION_TYPE_HOST_METADATA,
'value': config['hostmetadata'],
'operator': constants.CONDITION_OPERATOR_LIKE,
'formulaid': 'A'
}
],
'evaltype': '0',
'formula': ''
}
}
return refined_config
except KeyError as e:
key_name = e.args[0]
raise click.UsageError(
f'"{key_name}" is required in "auto_registration_config".') | zabier-cli | /zabier-cli-0.2.0.tar.gz/zabier-cli-0.2.0/zabier/commands/__init__.py | __init__.py |
PyTree
======
.. image:: https://img.shields.io/pypi/v/tree.svg?style=flat-square
:target: https://pypi.python.org/pypi/Tree
.. image:: https://img.shields.io/pypi/l/Tree.svg?style=flat-square
:target: https://github.com/PixelwarStudio/PyTree/blob/master/LICENSE
Python package, which you can use to generate and drawing trees, realistic or fractal ones.
Usage
-----
.. code-block:: bash
$ pip install zac-pyutils --upgrade -i https://pypi.python.org/pypi
.. code-block:: python
from zac_pyutils import ExqUtils
# log to file
logger = ExqUtils.get_logger("tmp.log")
ExqUtils.log2file(message="new message",logger=logger)
# load file as iter
fileIter = ExqUtils.load_file_as_iter("./data/nlp/sample_data.txt")
| zac-pyutils | /zac_pyutils-1.70.53.tar.gz/zac_pyutils-1.70.53/README.rst | README.rst |
import logging
import datetime
import itertools
import time
import sys
from collections import deque
import matplotlib.pyplot as plt
INFO = logging.INFO
WARN = logging.WARN
ERROR = logging.ERROR
DEBUG = logging.DEBUG
def parse_argv(argv):
res_dict = {}
for idx, item in enumerate(argv):
if idx == 0:
continue # .py file name
# print(idx, item)
if item.startswith("--"):
res_dict.update({item.split("--")[1]: argv[idx+1]})
# print("update..")
# print(res_dict)
return res_dict
def load_file_as_iter(path):
with open(path, "r+") as f:
for i in f:
yield i
def padding(tokens_inp, pad_len=-1, pad="__PAD__"):
return (tokens_inp + [pad] * pad_len)[:pad_len]
def padding_autoMax(tokens_list_inp, pad="__PAD__"):
pad_len = max([len(i) for i in tokens_list_inp])
return [(tokens + [pad] * pad_len)[:pad_len] for tokens in tokens_list_inp]
def zprint(*args):
inp_str = " ".join([str(i) for i in args])
new_m = "|{}| {}".format(datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S'), inp_str)
print(new_m)
def get_logger(log_file):
logger = logging.Logger("logger to {}".format(log_file))
hdlr = logging.FileHandler(log_file)
formatter = logging.Formatter('%(levelname)s %(lineno)s %(message)s')
hdlr.setFormatter(formatter)
logger.addHandler(hdlr)
return logger
def log2file(message, logger, level=logging.INFO, verbose=False):
new_m = "|{}| {}".format(datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S'), message)
logger.log(level, new_m)
if verbose: print(new_m)
def map_on_iter(iter_item, function, chunk_size=100):
dq = deque()
while True:
target = list(itertools.islice(iter_item, chunk_size))
if len(target) > 0:
dq.append(function(target))
else:
break
return flat(list(dq))
def flat(inp_list):
return [item for sublist in inp_list for item in sublist]
def timeit(func):
def wraps(*args, **kwargs):
t0 = time.time()
res = func(*args, **kwargs)
delta = str(round(time.time() - t0, 5) * 1000) + "ms"
return res, delta
return wraps
def groupby(it, key=lambda x:x):
return itertools.groupby(sorted(it, key=key), key=key)
def pltshow(imgArr_list,info_list=None,figsize=(10,10),info_fontsize=8):
row = int(len(imgArr_list)**0.5)
col = len(imgArr_list)/row
col = int(col) if int(col)==col else int(col)+1
fig, axes = plt.subplots(row,col, figsize=figsize)
for idx,img in enumerate(imgArr_list):
ax=axes.flatten()[idx]
ax.imshow(img)
_ = ax.set_axis_off()
info="" if info_list is None else info_list[idx]
_ = ax.set_title(info,size=info_fontsize) | zac-pyutils | /zac_pyutils-1.70.53.tar.gz/zac_pyutils-1.70.53/zac_pyutils/ExqUtils.py | ExqUtils.py |
import tensorflow as tf
def convert_ckpt2pb(ckpt_fp, pb_fp, output_name_list):
"""
将ckpt模型存储为pb格式,示例:
convert_ckpt2pb('ckpt/cnn.ckpt-10000','pb/cnn.pb',['output/proba'])
:param ckpt_fp: 输入.ckpt的路径 |
:param pb_fp: 输出.pb的路径 |
:param output_name_list: 输出节点的名字,一般就是一个,['output/proba'],注意此参数接受的是节点名(没有后面的数字)
"""
saver = tf.train.import_meta_graph(ckpt_fp + '.meta', clear_devices=True)
with tf.Session() as sess:
saver.restore(sess, ckpt_fp) # 恢复图并得到数据
output_graph_def = tf.graph_util.convert_variables_to_constants(
sess=sess,
input_graph_def=sess.graph_def,
output_node_names=output_name_list)
with tf.gfile.GFile(pb_fp, "wb") as f: # 保存模型
f.write(output_graph_def.SerializeToString()) # 序列化输出
print("%d ops in the final graph." % len(output_graph_def.node)) # 得到当前图有几个操作节点
def predict_pb(pb_fp, output_tensor_name, feed_dict_names):
"""
示例:
feed_dict_names={'input:0':xxx,'keep_prob:0':1.0,'is_training:0':0}
predict_pb('model.pb','output/proba:0',feed_dict_names)
:param pb_fp: pb文件路径
:param output_tensor_name: 最终输出的tensor名字
:param feed_dict_names: 字典,key是tensor名字(不是操作名),v是该tensor的输入值
:return:
"""
with tf.Graph().as_default():
output_graph_def = tf.GraphDef()
# 恢复图并得到数据
with open(pb_fp, "rb") as f:
output_graph_def.ParseFromString(f.read())
tf.import_graph_def(output_graph_def, name="")
# 计算output
with tf.Session() as sess:
sess.run(tf.global_variables_initializer())
output_tensor = sess.graph.get_tensor_by_name(output_tensor_name)
fd = {sess.graph.get_tensor_by_name(k): v for k, v in feed_dict_names.items()}
output = sess.run(output_tensor, feed_dict=fd)
return output
def predict_ckpt(ckpt_fp, output_tensor_name, feed_dict_names):
"""
示例:
feed_dict_names={'input:0':xxx,'keep_prob:0':1.0,'is_training:0':0}
predict_pb('model.pb','output/proba:0',feed_dict_names)
:param ckpt_fp: ckpt文件路径
:param output_tensor_name: 最终输出的tensor名字
:param feed_dict_names: 字典,key是tensor名字(不是操作名),v是该tensor的输入值
:return:
"""
# 恢复图并得到数据
saver = tf.train.import_meta_graph(ckpt_fp + '.meta', clear_devices=True)
with tf.Session() as sess:
saver.restore(sess, ckpt_fp)
# 计算output
sess.run(tf.global_variables_initializer())
output_tensor = sess.graph.get_tensor_by_name(output_tensor_name)
fd = {sess.graph.get_tensor_by_name(k): v for k, v in feed_dict_names.items()}
output = sess.run(output_tensor, feed_dict=fd)
return output
def print_all_tensors_from_ckpt(ckpt_fp):
from tensorflow.python.tools.inspect_checkpoint import print_tensors_in_checkpoint_file
print_tensors_in_checkpoint_file(ckpt_fp, tensor_name=None, all_tensors=False)
def get_all_variable(target_graph=None):
if target_graph is None:
target_graph = tf.get_default_graph()
with target_graph.as_default():
return tf.global_variables()
def print_all_variable(target_graph=None, name=True, value=True):
variables = get_all_variable(target_graph)
for v in variables:
to_print = ""
if name:
to_print += f"[name]: {v.name}"
if value:
to_print += f"[values]: {v}"
print(to_print)
def get_all_operation(target_graph=None):
if target_graph is None:
target_graph = tf.get_default_graph()
return target_graph.get_operations()
def print_all_operation(target_graph=None, name=True, type=True, value=True):
opts = get_all_operation(target_graph)
for v in opts:
to_print = ""
if name:
to_print += f"[name]: {v.name}"
if type:
to_print += f"[type]: {v.type}"
if value:
to_print += f"[values]: {v.values}"
print(to_print)
def restore_ckpt(ckpt_fp, sess):
with sess.graph.as_default():
saver = tf.train.Saver()
saver.restore(sess, ckpt_fp)
# todo: without test
def restore_pb(pb_fp, target_graph=None):
if target_graph is None:
target_graph = tf.get_default_graph()
with target_graph.as_default():
output_graph_def = tf.GraphDef()
# 恢复图并得到数据
with tf.gfile.GFile(pb_fp, "rb") as f:
output_graph_def.ParseFromString(f.read())
tf.import_graph_def(output_graph_def)
def img2tfrecord(img_dir, output_dir):
raise NotImplementedError | zac-pyutils | /zac_pyutils-1.70.53.tar.gz/zac_pyutils-1.70.53/zac_pyutils/TFUtils.py | TFUtils.py |
import threading
from threading import Thread, Event
import time
import sys
from queue import Queue
is_py2 = sys.version[0] == '2'
if is_py2:
print("TimeoutTest not available in py2")
############################################
# 重载Thread内部的一些方法实现真正停止子线程
# https://stackoverflow.com/questions/14482230/why-does-the-python-threading-thread-object-has-start-but-not-stop?noredirect=1&lq=1
############################################
class StopThread(StopIteration):
pass
threading.SystemExit = SystemExit, StopThread
# 方案一
class Thread2(threading.Thread):
def stop(self):
self.__stop = True
def _bootstrap(self):
if threading._trace_hook is not None:
raise ValueError('Cannot run thread with tracing!')
self.__stop = False
sys.settrace(self.__trace)
super(Thread2, self)._bootstrap()
def __trace(self, frame, event, arg):
if self.__stop:
raise StopThread()
return self.__trace
# 方案二 | 速度更快
class Thread3(threading.Thread):
def _bootstrap(self, stop_thread=False):
def stop():
nonlocal stop_thread
stop_thread = True
self.stop = stop
def tracer(*_):
if stop_thread:
raise StopThread()
return tracer
sys.settrace(tracer)
super(Thread3, self)._bootstrap()
class TimeoutThread():
def __init__(self, target, args=(), time_limit=1, delta=0.05):
self.resultQ = Queue()
_target = self._put_res_in_resultQ(target)
self.t = Thread3(target=_target, args=args)
self.t.setDaemon(True)
self.timing_thread = Thread3(target=self.timing, args=(time_limit,))
self.timing_thread.setDaemon(True)
def timing(self, timeout):
time.sleep(timeout + 0.1) # 多等0.1秒再kill掉进程,让达到timeout那一瞬间时的print之类的程序能执行下去
print("timing计时完毕,kill目标子线程..")
self.t.stop()
def start(self):
self.t.start()
self.timing_thread.start()
# 主线程block住直到self.t运行结束或者超时(self.timing_thread结束)
while True:
if self.t.isAlive() and self.timing_thread.isAlive():
continue
else:
break
self.t.stop()
self.timing_thread.stop()
q = self.resultQ.queue
res_ = q[0] if len(q) > 0 else None
return res_
def _put_res_in_resultQ(self, func):
"""
# 给target方法做个wrap,把其返回结果放到self.resultQ里
:param func: 即target方法
:return:
"""
def wraps(*args, **kwargs):
res = func(*args, **kwargs)
print("func运行完毕,结果将要放入resultQ队列中")
self.resultQ.put(res)
return wraps
def func_tset_timeout(inp, timeout=5):
for i in range(timeout):
time.sleep(1)
print("子线程执行中:{}".format(i))
return inp + 100
time_limit = 3
time_during = 3
to_inp = 2
print(">>> Test TimeoutThread")
b0 = time.time()
t0 = TimeoutThread(target=func_tset_timeout, args=(to_inp, time_during), time_limit=time_limit)
result = t0.start()
print("result is {}, time:{}".format(result, time.time() - b0))
print("等待 {}秒".format(time_limit))
time.sleep(time_limit)
print("result is {}, time:{}".format(result, time.time() - b0))
print("子线程不再输出日志,的确被kill掉")
assert False
##############################################################################
# join方式 实现了一个可停止的线程,不过从子线程切回主线程,这个子线程还会继续在后台运行
##############################################################################
class StoppableThread(Thread):
"""
会往resultQ里放两个值,先后视情况而定
a.在target运行结束后会把结果放到resultQ里 | 这一步需要自行在target方法里写好
b.在time_limit结束时会往resultQ里放一个None
所以只用取resultQ的 resultQ.queue[0],如果是None说明target运行超时
"""
def __init__(self, resultQ, target, args=(), time_limit=1, delta=0.05):
super(Thread, self).__init__()
self.resultQ = resultQ
self.delta = delta
self.stopped = False
self.t = Thread2(target=target, args=args)
self.t.setDaemon(True)
self.timing_thread = Thread2(target=self.timing, args=(time_limit,))
self.timing_thread.setDaemon(True)
def timing(self, timeout):
time.sleep(timeout)
self.stopped = True
def start(self):
self.t.start()
self.timing_thread.start()
while not self.stopped and len(self.resultQ.queue) == 0:
self.t.join(self.delta)
time.sleep(0.05)
self.resultQ.put(None)
self.t.stop()
# def stop(self):
# self.stopped = True
q = Queue()
def func_tes_stoppable(inp, timeout=5):
print("input: ", inp)
for i in range(timeout):
time.sleep(1)
print(i)
inp += 1
print("processed: ", inp)
q.put(inp)
time_limit = 30
time_during = 3
to_inp = 2
print(">>> Test StoppableThread")
b = time.time()
t1 = StoppableThread(resultQ=q, target=func_tes_stoppable, args=(to_inp, time_during), time_limit=time_limit)
t1.start()
print("result in queue: {}, time: {}".format(q.queue[0], time.time() - b))
time.sleep(time_during)
print("result in queue: {}, time: {}".format(q.queue[0], time.time() - b)) | zac-pyutils | /zac_pyutils-1.70.53.tar.gz/zac_pyutils-1.70.53/zac_pyutils/_TimeoutTest.py | _TimeoutTest.py |
import sys
import urllib.request
import numpy as np
from io import BytesIO
import importlib
from PIL import Image
from sklearn.cluster import MiniBatchKMeans
import logging
import os
logging.disable(logging.WARNING)
def _get_module(name):
# return sys.modules.get(name, default=__import__(name))
return sys.modules.get(name, importlib.import_module(name))
def _is_url(inp_str):
return inp_str.startswith("http://") or inp_str.startswith("https://")
def cos_sim(arr1, arr2):
return np.dot(arr1, arr2) / (np.linalg.norm(arr1) * np.linalg.norm(arr2))
def cos_sim_nested(hist1, hist2, weight=None):
if weight is None:
weight = [1] * hist1.shape[0]
sim_list = [cos_sim(hist1[i, :], hist2[i, :]) * weight[i] for i in range(hist1.shape[0])]
return sum(sim_list) / len(sim_list)
class Load:
@staticmethod
def image_by_cv2_from(img_path: str):
"""
automatic discern input between url and local-file
default format is [BGR].
return None if load url request failed
"""
cv2 = _get_module("cv2")
if _is_url(img_path):
# load from url
if ".webp" in img_path:
assert False, "at 2019-10-28, cv2 does not support webp (it's a python-opencv binding bug) " \
"refer to this: https://github.com/opencv/opencv/issues/14978\n\n" \
"*********** use Loader.load_image_by_pil_from() **********"
try:
url_response = urllib.request.urlopen(img_path)
img_array = np.array(bytearray(url_response.read()), dtype=np.uint8)
img = cv2.imdecode(img_array, -1)
return img
except Exception as e:
print("load img from url failed: " + str(e))
return None
else:
# load from file
return cv2.imread(img_path)
@staticmethod
def image_by_pil_from(img_path: str):
"""
automatic discern input between url and local-file
default format is [RGB].
return None if load url request failed
"""
if _is_url(img_path):
# load from url
try:
url_response = urllib.request.urlopen(img_path)
image = Image.open(BytesIO(url_response.read()))
return image
except Exception as e:
print("[ERROR] load img from url failed: " + str(e))
return None
else:
# load from file
return Image.open(img_path)
@staticmethod
def image_by_caffe_from_fp(img_path: str):
if _is_url(img_path):
assert False, "caffe only support load from local file"
caffe = _get_module("caffe")
return [caffe.io.load_image(img_path)]
class Convert:
@staticmethod
def pre_cv2caffe(img_inp):
cv2 = _get_module("cv2")
img = cv2.cvtColor(img_inp, cv2.COLOR_BGR2RGB) / 255.0
return [img]
@staticmethod
def pre_cv2Image(img_inp):
cv2 = _get_module("cv2")
img = cv2.cvtColor(img_inp, cv2.COLOR_BGR2RGB)
return Image.fromarray(img)
class PlotHist:
@staticmethod
def plot_hist_from_pil_image(img_inp):
assert img_inp.mode == 'RGB', "only support rgb"
plt = _get_module("matplotlib.pyplot")
color_map = ['r', 'g', 'b']
for idx, each_hist in enumerate(np.reshape(img_inp.histogram(), (3, 256))):
# plt.subplot(2,2,idx+1)
plt.plot(each_hist / sum(each_hist), color=color_map[idx])
plt.xlim([0, 256])
plt.show()
@staticmethod
def plot_hist_from_cv2_image(img_inp):
plt = _get_module("matplotlib.pyplot")
img_rgb = Image.fromarray(img_inp)
color_map = ['r', 'g', 'b']
for idx, each_hist in enumerate(np.reshape(img_rgb.histogram(), (3, 256))):
# plt.subplot(2,2,idx+1)
plt.plot(each_hist / sum(each_hist), color=color_map[idx])
plt.xlim([0, 256])
plt.show()
@staticmethod
def plot_hist_from_arr_image(img_inp):
return PlotHist.plot_hist_from_cv2_image(img_inp)
@staticmethod
def plot_hist_crops(hist_inp, crops_inp, color_map=None, dont_show=None):
plt = _get_module("matplotlib.pyplot")
GRAY_MODE = ['F', 'L']
default_color_map = {
"RGB": ['r', 'g', 'b'],
"YCbCr": ['r', 'black', 'b'],
"F": ['black'],
"L": ['black'],
}
row, col = crops_inp.shape
crops_inp_img = np.concatenate(crops_inp, axis=0)
mode = crops_inp_img[0].mode
if mode == 'RGB':
print("not recommend RGB for similarity, try YCbCr")
is_gray = mode in GRAY_MODE
if dont_show is None:
dont_show = []
if color_map is None:
try:
color_map = default_color_map[mode]
except Exception:
print("can't auto define color_map. must feed this param")
fig = plt.figure(figsize=(10, 10))
fig.set_tight_layout(True)
for idx, (crop_hist, crop_img) in enumerate(zip(hist_inp, crops_inp_img)):
fig.add_subplot(row, 2 * col, 2 * idx + 1)
plt.title("hist_{}".format(mode))
for idx_, i in enumerate(crop_hist):
if color_map[idx_] in dont_show:
continue
plt.plot(i, color=color_map[idx_])
plt.xlim([0, crop_hist.shape[-1]]) # 手工设置x轴的最大值(默认用数据里的最大值)
plt.ylim([0, 1])
fig.add_subplot(row, 2 * col, 2 * idx + 2)
plt.title("RGB_{}".format(idx))
plt.axis('off')
plt.tight_layout(h_pad=1, w_pad=0.2)
if is_gray:
plt.imshow(crop_img, cmap='gray')
else:
plt.imshow(crop_img.convert("RGB"), cmap='viridis')
return fig
class StandardCV:
@staticmethod
def custom_cut_to_matrix(imgPIL, row, col):
# 0 1 2
# 3 4 5
# 6 7 8
# col0 col1 col2 col3
# ( 0, 0) (1/3, 0) (2/3, 0) (3/3, 0) row0
# ( 0,1/3) (1/3,1/3) (2/3,1/3) (3/3,1/3) row1
# ( 0,2/3) (1/3,2/3) (2/3,2/3) (3/3,2/3) row2
# ( 0,3/3) (1/3,3/3) (2/3,3/3) (3/3,3/3) row3
crop_matrix = []
base_w, base_h = imgPIL.size[0] // col, imgPIL.size[1] // row
for r in range(row):
crop_one_row = []
for c in range(col):
anchor_lt = (base_w * c, base_h * r) # 左上角的anchor
anchor_rb = (base_w * (c + 1), base_h * (r + 1)) # 右下角的anchor
crop_one_row.append(imgPIL.crop((anchor_lt[0], anchor_lt[1], anchor_rb[0], anchor_rb[1])))
crop_matrix.append(crop_one_row)
return np.array(crop_matrix, dtype='object')
@staticmethod
def get_hist(imgPIL, row=1, col=1, bins=None, weights=None, return_crops=False):
"""
输入要求是PIL的image
根据 row,col 会切分图像,最后返回时 all_crops 的size是(row, col, img_w, img_h, img_channel)
各区域单独计算hist然后拼接到一起
:param bins: 设定各个通道的bins个数,默认[32]*3。如果不是一样大的会padding到和max一样大
例如R通道4个bins: [16,16,16,32],G通道2个bins:[32,48],G会padding为[32,48,0,0]
:param weights: 各个区域hist权重,默认都为1
"""
if bins is None:
bins = [32] * 3
if weights is None:
weights = [1.0] * row * col
is_gray = len(np.array(imgPIL).shape) == 2
# 切块
all_crops = StandardCV.custom_cut_to_matrix(imgPIL, row, col) # come with row x col
all_crops_flatten = np.concatenate(all_crops, axis=0) # flattent to (16, 128, 128, 3)
hist_features = []
for cropIdx, cropPIL in enumerate(all_crops_flatten):
cropArr = np.array(cropPIL)
# np.array(crop).shape[-1] 取通道数
# np.histogram 返回结果两个, hist, bins_edge —— 直方图和bin的方位
if is_gray:
# 灰度图的hist为了方便后面统一使用,也在外面包上一个维度
hist_of_all_channel = [np.histogram(cropArr, bins=bins[0], range=[0, 256])[0]]
else:
hist_of_all_channel = [np.histogram(cropArr[:, :, c], bins=bins[c], range=[0, 256])[0] for c in range(cropArr.shape[-1])]
# normalize
hist_of_all_channel = [hist / sum(hist) for hist in hist_of_all_channel]
# 不同通道bins可能有不同,末尾padding到相同
# 例如R通道4个bins: [16,16,16,32],G通道2个bins:[32,48],G会padding为[32,48,0,0]
max_bin = max(bins)
hist_of_all_channel = [np.pad(hist, (0, max_bin - bins[idx]), 'constant') for idx, hist in enumerate(hist_of_all_channel)]
# 此区域三通道的hist都乘上权重
hist_of_all_channel = [i * weights[cropIdx] for i in hist_of_all_channel]
hist_features.append(np.stack(hist_of_all_channel, axis=0))
hist_features = np.stack(hist_features, axis=0) # [row*col, 3, bins]
if return_crops:
return hist_features, all_crops
else:
return hist_features
@staticmethod
def get_lbp_imgPIL(imgPIL, R=1, P=None):
local_binary_pattern = _get_module("skimage.feature").local_binary_pattern
if P is None:
P = 8 * R
return Image.fromarray(np.array(local_binary_pattern(np.array(imgPIL.convert("L")), P=P, R=R)))
class Vectorize:
class VectorFromNN:
import tensorflow as tf
import tensorflow_hub as hub
class _BasePattern:
default_model = None
url = None
IMAGE_SHAPE = (224, 224) # 大部分都适用224x224
# 如果有特殊情况子类自行重写此方法
def get_default_model(self):
if self.default_model is None:
self.default_model = tf.keras.Sequential([hub.KerasLayer(self.url, trainable=False)])
return self.default_model
# 如果有特殊情况子类自行重写此方法
def pre_format_pilImage(self, imgPIL):
return np.array(imgPIL.resize(self.IMAGE_SHAPE)) / 255.0
# fundamental function
def imgArr2vec_batch(self, imgArr_batch, model=None):
if model is None:
model = self.get_default_model()
return model.predict(imgArr_batch)
# ->imgArr2vec->imgArr2vec_batch
def imgPIL2vec(self, imgPIL, model=None):
imgArr = self.pre_format_pilImage(imgPIL)
return self.imgArr2vec(imgArr, model=model)
# ->imgArr2vec_batch
def imgPIL2vec_batch(self, imgPIL_batch, model=None):
imgArr_batch = np.array([self.pre_format_pilImage(imgPIL) for imgPIL in imgPIL_batch])
return self.imgArr2vec_batch(imgArr_batch, model=model)
# ->imgArr2vec_batch
def imgArr2vec(self, imgArr, model=None):
return self.imgArr2vec_batch(imgArr[np.newaxis, :], model=model)[0]
class InceptionV3(_BasePattern):
url = "https://tfhub.dev/google/imagenet/inception_v3/feature_vector/4"
IMAGE_SHAPE = (299, 299)
class InceptionV1(_BasePattern):
url = "https://tfhub.dev/google/imagenet/inception_v1/feature_vector/4"
class InceptionResNet(_BasePattern):
url = "https://tfhub.dev/google/imagenet/inception_resnet_v2/feature_vector/4"
IMAGE_SHAPE = (299, 299)
class VectorFromHist:
class ColorHist:
def __init__(self, crop_shape=(4, 4), bins=None, p_weight=None):
if bins is None:
self.bins = [32, 64, 32]
if p_weight is None:
if crop_shape == (4, 4):
self.p_weight = np.array([[1., 1., 1., 1.],
[1., 1.2, 1.2, 1.],
[1., 1.2, 1.2, 1.],
[1., 1., 1., 1.]])
else:
self.p_weight = np.ones(crop_shape)
self.crop_shape = crop_shape
assert self.crop_shape == self.p_weight.shape, f"切割shape为 {crop_shape}, 权重shape为 {p_weight.shape}, 二者必须一致"
def imgPIL_to_Vec(self, imgPIL):
"""
hist的结果reshape,将3通道的各个bins都合到一起,整体向量实际是一个nested的结果,shape=[row*col,3*bins]
目的是为了后面各区域的相似度取均值,这样的效果比所有子区域hist直接flatten成一个大向量要好
"""
hist_features = StandardCV.get_hist(imgPIL, row=self.crop_shape[0], col=self.crop_shape[1], bins=self.bins,
weights=self.p_weight.flatten(), return_crops=False)
return hist_features.reshape(self.crop_shape[0]*self.crop_shape[1], -1)
def imgPIL2Vec(self, imgPIL):
return self.imgPIL_to_Vec(imgPIL)
class LBPHist:
def __init__(self, crop_shape=(4, 4), bins=None, p_weight=None, lbp_R=1, lbp_P=None):
if bins is None:
self.bins = [32]
if p_weight is None:
if crop_shape == (4, 4):
self.p_weight = np.array([[1., 1., 1., 1.],
[1., 1.2, 1.2, 1.],
[1., 1.2, 1.2, 1.],
[1., 1., 1., 1.]])
else:
self.p_weight = np.ones(crop_shape)
self.crop_shape = crop_shape
assert self.crop_shape == self.p_weight.shape
self.local_binary_pattern = _get_module("skimage.feature").local_binary_pattern
self.lbp_R = lbp_R
self.lbp_P = 8 * lbp_R if lbp_P is None else lbp_P
def imgPIL_to_Vec(self, imgPIL):
imgLBP = StandardCV.get_lbp_imgPIL(imgPIL, self.lbp_R, self.lbp_P)
hist_features = StandardCV.get_hist(imgLBP, row=self.crop_shape[0], col=self.crop_shape[1], bins=self.bins,
weights=self.p_weight.flatten(), return_crops=False)
return hist_features.reshape(self.crop_shape[0] * self.crop_shape[1], -1)
def imgPIL2Vec(self, imgPIL):
return self.imgPIL_to_Vec(imgPIL)
class VectorFromThemeColor:
class KMeansColor:
def __init__(self, cluster=5):
self.km = MiniBatchKMeans(n_clusters=cluster)
def imgPIL2Vec(self, imgPIL):
img_arr = np.array(imgPIL)
return self.imgArr2Vec(img_arr)
def imgArr2Vec(self, imgArr):
h, w, c = imgArr.shape
pixel = np.reshape(imgArr, (h*w, c))
self.km.fit(pixel)
return self.km.cluster_centers_
class ImageGenerator:
allow_type = ['.jpg', '.png']
MODE_CATEGORICAL = 'categorical'
MODE_SPARSE = 'sparse'
def __init__(self, rescale=1 / 255.0):
self.rescale = rescale
def process_img_pil(self, imgPIL):
# 暂时只有rescale到0,1
return np.array(imgPIL) * self.rescale
def flow_from_directory(self, root_path, classes=None, image_shape=None, batch_size=10, class_mode=None, verbose=True):
if class_mode is None:
class_mode = self.MODE_CATEGORICAL
if image_shape is None:
image_shape = (224, 224)
if classes is None:
find_classes = [os.path.join(root_path, i) for i in os.listdir(root_path) if not i.startswith(".")]
else:
find_classes = [os.path.join(root_path, i) for i in os.listdir(root_path) if not i.startswith(".") and i in classes]
if class_mode == self.MODE_CATEGORICAL:
one_hot = [0] * len(find_classes)
class_dict = {}
for idx, class_ in enumerate(find_classes):
one_hot_ = one_hot.copy()
one_hot_[idx] = 1
class_dict.update({class_: one_hot_})
elif class_mode == self.MODE_SPARSE:
class_dict = {class_: idx for idx, class_ in enumerate(find_classes)}
else:
assert False, f"class_mode should be supplied (currently is '{class_mode}'')"
allow_files = [(class_dir, filename) for class_dir in find_classes for filename in os.listdir(class_dir) if
os.path.splitext(filename)[-1] in self.allow_type]
fp_list = [(os.path.join(root_path, class_dir, filename), class_dict[class_dir]) for (class_dir, filename) in allow_files]
if verbose:
print(f"Found {len(fp_list)} images belonging to {len(class_dict)} classes")
for k, v in class_dict.items():
print(v, len([cla for fp, cla in fp_list if cla == v]), k)
for i in range(0, len(fp_list), batch_size):
img_cla_batch = [(self.process_img_pil(Image.open(fp).resize(image_shape)), cla) for fp, cla in fp_list[i:i + batch_size]]
img_cla_batch = np.array(img_cla_batch)
yield np.stack(img_cla_batch[:, 0]), img_cla_batch[:, 1]
if __name__ == '__main__':
# 验证切图是否正常 | plt绘图耗时会比较久
def test_case0():
print(">>> 验证切图是否正常")
cropsPIL_matrix = StandardCV.custom_cut_to_matrix(test_img, row=4, col=4)
for i in range(cropsPIL_matrix.shape[0]):
for j in range(cropsPIL_matrix.shape[1]):
plt.subplot(4, 4, i * 4 + (j + 1))
plt.imshow(cropsPIL_matrix[i, j])
print(">>> 展示切割后各部分的直方图")
hist, crops = StandardCV.get_hist(test_img, row=4, col=4, return_crops=True)
PlotHist.plot_hist_crops(hist, crops)
plt.show()
# 验证直方图方式相似度量
def test_case1():
print(">>> 验证StandardCV的直方图方式相似度量")
transformer = Vectorize.VectorFromHist.ColorHist()
transformer_lbp = Vectorize.VectorFromHist.LBPHist()
print("多个子区域的直方图独立计算cos_sim然后取平均: {:.4f}".format(cos_sim_nested(transformer.imgPIL_to_Vec(test_img), transformer.imgPIL_to_Vec(test_img2))))
print("多个子区域的LBP直方图独立计算cos_sim然后取平均: {:.4f}".format(cos_sim_nested(transformer_lbp.imgPIL_to_Vec(test_img), transformer_lbp.imgPIL_to_Vec(test_img2))))
# 验证NN相似向量
def test_case2():
print(">>> 验证NN相似向量")
transformer = Vectorize.VectorFromNN.InceptionV3()
vec1 = transformer.imgPIL2vec(test_img)
vec2 = transformer.imgPIL2vec(test_img2)
print("NN取最后一层计算cos_sim: {:.4f}".format(cos_sim(vec1, vec2)))
print(f" 向量一: {vec1.shape} {type(vec1)}\n", vec1)
print(f" 向量二: {vec2.shape} {type(vec2)}\n", vec2)
# 验证图片generator是否正常
def test_case3():
root_path = "/Users/zac/Downloads/Image_samples"
g = ImageGenerator()
img_generator = g.flow_from_directory(root_path, classes=['cg_background', 'landscape'])
for image_batch, label_batch in img_generator:
print("Image batch shape: ", image_batch.shape)
print("Label batch shape: ", label_batch.shape)
break
import matplotlib.pyplot as plt
test_url = "http://www.kedo.gov.cn/upload/resources/image/2017/04/24/150703.png"
test_url2 = "https://encrypted-tbn0.gstatic.com/images?q=tbn:ANd9GcThwIfzyp-Rv5zYM0fwPmoM5k1f9eW3ETYuPcL8j2I0TuG0tdb5&s"
test_img = Load.image_by_pil_from(test_url).convert("YCbCr")
test_img2 = Load.image_by_pil_from(test_url2).convert("YCbCr")
test_img.show()
test_img2.show()
# test_case0()
# test_case1()
# test_case2()
test_case3() | zac-pyutils | /zac_pyutils-1.70.53.tar.gz/zac_pyutils-1.70.53/zac_pyutils/CVUtils.py | CVUtils.py |
import threading
import multiprocessing
import time
import sys
import os
from queue import Queue,Empty
is_py2 = sys.version[0] == '2'
if is_py2:
print("Timeout not available for py2")
class ThreadStopException(Exception):
pass
class _BasicStopThread(threading.Thread):
def _bootstrap(self, stop_thread=False):
def stop():
nonlocal stop_thread
stop_thread = True
self.stop = stop
def tracer(*_):
if stop_thread:
raise ThreadStopException()
return tracer
sys.settrace(tracer)
super(_BasicStopThread, self)._bootstrap()
class TimeoutThread():
"""
使用示例:
t2 = TimeoutThread(target=target_func, args=(30, 8), time_limit=3)
res2=t2.start()
"""
def __init__(self, target, args=(), time_limit=1, verbose=False):
self.resultQ = Queue()
_target = self._put_res_in_result_queue(target)
# 用来运行目标函数的线程
self.target_thread = _BasicStopThread(target=_target, args=args)
self.target_thread.setDaemon(True)
# 用来计时的线程
self.timing_thread = _BasicStopThread(target=self.timing, args=(time_limit, verbose, ))
self.timing_thread.setDaemon(True)
def timing(self, timeout, verbose=False):
pid = os.getpid()
for i in range(timeout):
time.sleep(1)
if verbose:
print("[pid]:{} [timing]:{}".format(pid, i))
time.sleep(0.1) # 多等0.1秒再kill掉进程,让达到timeout那一瞬间时的print之类的程序能执行下去
if verbose:
print("[pid]: {} timing计时完毕,kill目标子线程..".format(pid))
self.target_thread.stop()
def start(self):
self.target_thread.start()
self.timing_thread.start()
# while循环block住主线程,直到self.t运行结束或者超时(self.timing_thread结束)
while True:
if self.target_thread.isAlive() and self.timing_thread.isAlive():
continue
else:
break
self.target_thread.stop()
self.timing_thread.stop()
q = self.resultQ.queue
res_ = q[0] if len(q) > 0 else None
return res_
def _put_res_in_result_queue(self, func):
"""
# 给target方法做个wrap,把其返回结果放到self.resultQ里
:param func: 即target方法
:return:
"""
def wraps(*args, **kwargs):
res = func(*args, **kwargs)
# print("func运行完毕,结果将要放入resultQ队列中")
self.resultQ.put(res)
return wraps
class TimeoutProcess():
def __init__(self, target, args=(), time_limit=1, verbose=False, delta=0.5):
self.time_limit = time_limit
self.delta = delta
self.verbose = verbose
self.resultQ = multiprocessing.Queue()
_target = self._put_res_in_result_queue(target)
self.p = multiprocessing.Process(target=_target, args=args)
self.p.daemon = True
def start(self):
self.p.start()
b_time = time.time()
while True:
time.sleep(0.25)
if time.time()-b_time >= self.time_limit or not self.p.is_alive():
# 每0.25s检查一次,如果已经超时或者process已经结束
break
self.p.terminate()
try:
res_ = self.resultQ.get_nowait() # ==> get(block=False)
except Empty as e:
res_ = None
return res_
def _put_res_in_result_queue(self, func):
"""
# 给target方法做个wrap,把其返回结果放到self.resultQ里
:param func: 即target方法
:return:
"""
def wraps(*args, **kwargs):
res = func(*args, **kwargs)
self.resultQ.put(res)
# print("func运行完毕,结果将要放入resultQ队列中")
return wraps | zac-pyutils | /zac_pyutils-1.70.53.tar.gz/zac_pyutils-1.70.53/zac_pyutils/Timeout.py | Timeout.py |
import logging.handlers
import logging
import sys
import time
import datetime
import time
def _formatter(timezone=8):
#############################################################################
# converter 处理时区问题
# 1. __beijing 这个函数很特殊,只能这样写,因为 logging.Formatter.converter
# 接收参数是一个 有两个参数(sec, what)的function
#############################################################################
def __beijing(sec, what):
beijing_time = (datetime.datetime.utcnow() + datetime.timedelta(hours=timezone))
return beijing_time.timetuple()
logging.Formatter.converter = __beijing
formatstr = '|%(asctime)s| [%(levelname)s] [%(filename)s-%(lineno)d] %(message)s'
formatter = logging.Formatter(formatstr, "%Y-%m-%d %H:%M:%S")
return formatter
def get_console_logger(loglevel=logging.DEBUG, loggername=None, timezone=8):
loggername_ = str(int(time.time())) if loggername is None else loggername
logger = logging.getLogger(loggername_)
ch = logging.StreamHandler(sys.stderr)
ch.setLevel(loglevel)
ch.setFormatter(_formatter(timezone=timezone))
logger.addHandler(ch)
return logger
def get_file_logger(info_log_file, err_log_file, min_default_loglevel=logging.INFO, max_default_loglevel=logging.INFO,
loggername=None, timezone=8):
loggername_ = str(int(time.time())) if loggername is None else loggername
logger = logging.getLogger(loggername_)
logger.setLevel(logging.DEBUG)
#########################
# handler 控制输出目标
#########################
fh_default = logging.handlers.TimedRotatingFileHandler(info_log_file, when='D', interval=1, backupCount=15,
encoding='utf-8')
fh_err = logging.handlers.TimedRotatingFileHandler(err_log_file, when='D', interval=1, backupCount=15,
encoding='utf-8')
#################
# filter 日志等级
#################
filter_default = logging.Filter()
filter_default.filter = lambda record: min_default_loglevel <= record.levelno <= max_default_loglevel # 设置过滤等级
filter_err = logging.Filter()
filter_err.filter = lambda record: record.levelno == logging.ERROR # 设置过滤等级
######################
# formatter 规范化输出
######################
fh_default.setFormatter(_formatter(timezone=timezone))
fh_default.addFilter(filter_default)
fh_err.setFormatter(_formatter(timezone=timezone))
fh_err.addFilter(filter_err)
logger.addHandler(fh_default)
logger.addHandler(fh_err)
return logger | zac-pyutils | /zac_pyutils-1.70.53.tar.gz/zac_pyutils-1.70.53/zac_pyutils/ExqLog.py | ExqLog.py |
import sys
import os
def main():
if sys.argv[1] in ['help','h','-h','--help']:
print("""
USAGE:
[convert] will convert .weights to .pb file. default saved in same dir.
[detect] will use .pb file to detect objects. labeled image saved in current dir.
- cli:
convert_yolo convert <classes_num:int> <darknet_weight_fp>
convert_yolo detect <classes_names_fp> <tf_pb_fp> <img_fp>
- py:
python convert_yolo.py convert <classes_num:int> <darknet_weight_fp>
python convert_yolo.py detect <classes_names_fp> <tf_pb_fp> <img_fp>
""")
sys.exit(0)
print("sys.argv as follow:")
print(sys.argv)
try:
opt=sys.argv[1]
except Exception as e:
print("USAGE: python convert_yolo.py {convert,detect}")
sys.exit(0)
# 不放在开头,因为导入tf耗时很久
from .utils import load_darknet_weights,draw_outputs
from ._Yolo_v3 import YoloV3
import tensorflow as tf
import numpy as np
from PIL import Image
if opt=="convert":
try:
classes=int(sys.argv[2])
darknet_weight_fp=sys.argv[3]
except:
print("USAGE: python utils.py convert <classes_num> <darknet_weight_fp>")
print("Example: python utils.py convert 4 ./custom_w/yolo_w_final.weights")
sys.exit(0)
pb_fp=os.path.splitext(darknet_weight_fp)[0]+"_pb"
print(f"opt:{opt}\nclasses:{classes}\ndarknet_weight_fp:{darknet_weight_fp}\npb_fp:{pb_fp}"+"\n"*3)
yolo = YoloV3(classes=classes)
load_darknet_weights(yolo,darknet_weight_fp)
# 测试是否正常
img = np.random.random((1, 320, 320, 3)).astype(np.float32)
output = yolo(img)
print("available.")
# yolo.save_weights(os.path.splitext(darknet_weight_fp)[0]+"_ckpt")
tf.saved_model.save(yolo, pb_fp)
print(".pb file saved at {}".format(pb_fp))
elif opt=="detect":
try:
class_names_fp=sys.argv[2]
tf_pb_fp=sys.argv[3]
img_fp=sys.argv[4]
except:
print("USAGE: python utils.py detect <classes_names_fp> <tf_pb_fp> <img_fp>")
sys.exit(0)
with open(class_names_fp,"r") as fr:
class_names=[i.strip() for i in fr.readlines()]
classes=len(class_names)
print(f"class_names: {','.join(class_names)}")
# init yolo
# yolo = YoloV3(classes=classes)
# load_darknet_weights(yolo,darknet_weight_fp)
# yolo.load_weights(tf_weight_fp)
yolo = tf.saved_model.load(tf_pb_fp)
# detect img
img = np.array(Image.open(img_fp).resize((416,416)))/255
img_batch = np.expand_dims(img,axis=0)
img_inp = tf.cast(tf.convert_to_tensor(img_batch), tf.float32)
boxes, scores, classes, nums = yolo(img_inp)
img_draw = draw_outputs(img, (boxes[0], scores[0], classes[0], nums[0]), class_names)
img_arr = (img_draw*255).astype(np.uint8)
Image.fromarray(img_arr).save("./utils_detected.jpg")
print("img saved at ./utils_detected.jpg")
else:
print("USAGE: python utils.py {convert,detect}")
if __name__ == "__main__":
# main()
print("abc") | zac-pyutils | /zac_pyutils-1.70.53.tar.gz/zac_pyutils-1.70.53/zac_pyutils/ConvertYolo/convert_yolo.py | convert_yolo.py |
import numpy as np
import tensorflow as tf
import cv2
import sys
import os
from PIL import Image
YOLOV3_LAYER_LIST = [
'yolo_darknet',
'yolo_conv_0',
'yolo_output_0',
'yolo_conv_1',
'yolo_output_1',
'yolo_conv_2',
'yolo_output_2',
]
YOLOV3_TINY_LAYER_LIST = [
'yolo_darknet',
'yolo_conv_0',
'yolo_output_0',
'yolo_conv_1',
'yolo_output_1',
]
def load_darknet_weights(model, weights_file, tiny=False):
wf = open(weights_file, 'rb')
major, minor, revision, seen, _ = np.fromfile(wf, dtype=np.int32, count=5)
if tiny:
layers = YOLOV3_TINY_LAYER_LIST
else:
layers = YOLOV3_LAYER_LIST
for layer_name in layers:
sub_model = model.get_layer(layer_name)
for i, layer in enumerate(sub_model.layers):
if not layer.name.startswith('conv2d'):
continue
batch_norm = None
if i + 1 < len(sub_model.layers) and \
sub_model.layers[i + 1].name.startswith('batch_norm'):
batch_norm = sub_model.layers[i + 1]
print(f"{sub_model.name}/{layer.name} {'bn' if batch_norm else 'bias'}")
filters = layer.filters
size = layer.kernel_size[0]
in_dim = layer.input_shape[-1]
if batch_norm is None:
conv_bias = np.fromfile(wf, dtype=np.float32, count=filters)
else:
# darknet [beta, gamma, mean, variance]
bn_weights = np.fromfile(
wf, dtype=np.float32, count=4 * filters)
# tf [gamma, beta, mean, variance]
bn_weights = bn_weights.reshape((4, filters))[[1, 0, 2, 3]]
# darknet shape (out_dim, in_dim, height, width)
conv_shape = (filters, in_dim, size, size)
conv_weights = np.fromfile(
wf, dtype=np.float32, count=np.product(conv_shape))
# tf shape (height, width, in_dim, out_dim)
conv_weights = conv_weights.reshape(
conv_shape).transpose([2, 3, 1, 0])
if batch_norm is None:
layer.set_weights([conv_weights, conv_bias])
else:
layer.set_weights([conv_weights])
batch_norm.set_weights(bn_weights)
assert len(wf.read()) == 0, 'failed to read all data'
wf.close()
def draw_outputs(img_inp, outputs, class_names):
boxes, objectness, classes, nums = outputs
wh = np.flip(img_inp.shape[0:2])
pixel_max_v=255 if np.max(img_inp)>1 else 1
img = img_inp.copy()
for i in range(nums):
x1y1 = tuple((np.array(boxes[i][0:2]) * wh).astype(np.int32))
x2y2 = tuple((np.array(boxes[i][2:4]) * wh).astype(np.int32))
img = cv2.rectangle(img, x1y1, x2y2, (pixel_max_v, 0, 0), 2)
# cv2.putText 参数: 图片,添加的文字,左上角坐标,字体,字体大小,颜色,字体粗细
img = cv2.putText(img, '{}={:.4f}'.format(
class_names[int(classes[i])], objectness[i]),
x1y1, cv2.FONT_HERSHEY_COMPLEX, 1, (0, 0, pixel_max_v), 2)
return img
def draw_labels(x, y, class_names):
img = x.numpy()
boxes, classes = tf.split(y, (4, 1), axis=-1)
classes = classes[..., 0]
wh = np.flip(img.shape[0:2])
for i in range(len(boxes)):
x1y1 = tuple((np.array(boxes[i][0:2]) * wh).astype(np.int32))
x2y2 = tuple((np.array(boxes[i][2:4]) * wh).astype(np.int32))
img = cv2.rectangle(img, x1y1, x2y2, (255, 0, 0), 2)
img = cv2.putText(img, class_names[classes[i]],
x1y1, cv2.FONT_HERSHEY_COMPLEX_SMALL,
1, (0, 0, 255), 2)
return img
def freeze_all(model, frozen=True):
model.trainable = not frozen
if isinstance(model, tf.keras.Model):
for l in model.layers:
freeze_all(l, frozen)
def trace_model_call(model):
from tensorflow.python.framework import tensor_spec
from tensorflow.python.eager import def_function
from tensorflow.python.util import nest
inputs = model.inputs
input_names = model.input_names
input_signature = []
for input_tensor, input_name in zip(inputs, input_names):
input_signature.append(tensor_spec.TensorSpec(
shape=input_tensor.shape, dtype=input_tensor.dtype,
name=input_name))
@def_function.function(input_signature=input_signature, autograph=False)
def _wrapped_model(*args):
inputs = args[0] if len(input_signature) == 1 else list(args)
outputs_list = nest.flatten(model(inputs=inputs))
output_names = model.output_names
return {"{}_{}".format(kv[0], i): kv[1] for i, kv in enumerate(
zip(output_names, outputs_list))}
return _wrapped_model
if __name__ == "__main__":
print(sys.argv)
try:
opt=sys.argv[1]
except Exception as e:
print("USAGE: python utils.py {convert,detect}")
sys.exit(0)
if opt=="convert":
try:
classes=int(sys.argv[2])
darknet_weight_fp=sys.argv[3]
except:
print("USAGE: python utils.py convert <classes_num> <darknet_weight_fp>")
print("Example: python utils.py convert 4 ./custom_w/yolo_w_final.weights")
sys.exit(0)
pb_fp=os.path.splitext(darknet_weight_fp)[0]+"_pb"
print(f"opt:{opt}\nclasses:{classes}\ndarknet_weight_fp:{darknet_weight_fp}\npb_fp:{pb_fp}")
from _Yolo_v3 import YoloV3
yolo = YoloV3(classes=classes)
load_darknet_weights(yolo,darknet_weight_fp)
# 测试是否正常
img = np.random.random((1, 320, 320, 3)).astype(np.float32)
output = yolo(img)
print("available.")
# yolo.save_weights(os.path.splitext(darknet_weight_fp)[0]+"_ckpt")
tf.saved_model.save(yolo, pb_fp, signatures=trace_model_call(yolo))
elif opt=="detect":
try:
class_names_fp=sys.argv[2]
tf_weight_fp=sys.argv[3]
img_fp=sys.argv[4]
except:
print("USAGE: python utils.py detect <classes_names_fp> <tf_weight_fp> <img_fp>")
sys.exit(0)
with open(class_names_fp,"r") as fr:
class_names=[i.strip() for i in fr.readlines()]
classes=len(class_names)
print(f"class_names: {','.join(class_names)}")
# init yolo
from _Yolo_v3 import YoloV3
yolo = YoloV3(classes=classes)
# load_darknet_weights(yolo,darknet_weight_fp)
yolo.load_weights(tf_weight_fp)
# detect img
img = np.array(Image.open(img_fp).resize((416,416)))
img = np.expand_dims(img,axis=0)
boxes, scores, classes, nums = yolo(img)
img = draw_outputs(img, (boxes[0], scores[0], classes[0], nums[0]), class_names)
cv2.imwrite("./utils_detected.jpg", img)
pass
else:
print("USAGE: python utils.py {convert,detect}") | zac-pyutils | /zac_pyutils-1.70.53.tar.gz/zac_pyutils-1.70.53/zac_pyutils/ConvertYolo/utils.py | utils.py |
import numpy as np
import tensorflow as tf
from tensorflow.keras import Model
from tensorflow.keras.layers import (
Add,
Concatenate,
Conv2D,
Input,
Lambda,
LeakyReLU,
MaxPool2D,
UpSampling2D,
ZeroPadding2D,
)
from tensorflow.keras.regularizers import l2
from tensorflow.keras.losses import (
binary_crossentropy,
sparse_categorical_crossentropy
)
class BatchNormalization(tf.keras.layers.BatchNormalization):
"""
Make trainable=False freeze BN for real (the og version is sad)
"""
def call(self, x, training=False):
if training is None:
training = tf.constant(False)
training = tf.logical_and(training, self.trainable)
return super().call(x, training)
def broadcast_iou(box_1, box_2):
# box_1: (..., (x1, y1, x2, y2))
# box_2: (N, (x1, y1, x2, y2))
# broadcast boxes
box_1 = tf.expand_dims(box_1, -2)
box_2 = tf.expand_dims(box_2, 0)
# new_shape: (..., N, (x1, y1, x2, y2))
new_shape = tf.broadcast_dynamic_shape(tf.shape(box_1), tf.shape(box_2))
box_1 = tf.broadcast_to(box_1, new_shape)
box_2 = tf.broadcast_to(box_2, new_shape)
int_w = tf.maximum(tf.minimum(box_1[..., 2], box_2[..., 2]) -
tf.maximum(box_1[..., 0], box_2[..., 0]), 0)
int_h = tf.maximum(tf.minimum(box_1[..., 3], box_2[..., 3]) -
tf.maximum(box_1[..., 1], box_2[..., 1]), 0)
int_area = int_w * int_h
box_1_area = (box_1[..., 2] - box_1[..., 0]) * \
(box_1[..., 3] - box_1[..., 1])
box_2_area = (box_2[..., 2] - box_2[..., 0]) * \
(box_2[..., 3] - box_2[..., 1])
return int_area / (box_1_area + box_2_area - int_area)
class FLAGS:
# maximum number of boxes per image
yolo_max_boxes=100
# iou threshold
yolo_iou_threshold=0.5
# score threshold
yolo_score_threshold=0.5
yolo_anchors = np.array([(10, 13), (16, 30), (33, 23), (30, 61), (62, 45),
(59, 119), (116, 90), (156, 198), (373, 326)],
np.float32) / 416
yolo_anchor_masks = np.array([[6, 7, 8], [3, 4, 5], [0, 1, 2]])
yolo_tiny_anchors = np.array([(10, 14), (23, 27), (37, 58),
(81, 82), (135, 169), (344, 319)],
np.float32) / 416
yolo_tiny_anchor_masks = np.array([[3, 4, 5], [0, 1, 2]])
def DarknetConv(x, filters, size, strides=1, batch_norm=True):
if strides == 1:
padding = 'same'
else:
x = ZeroPadding2D(((1, 0), (1, 0)))(x) # top left half-padding
padding = 'valid'
x = Conv2D(filters=filters, kernel_size=size,
strides=strides, padding=padding,
use_bias=not batch_norm, kernel_regularizer=l2(0.0005))(x)
if batch_norm:
x = BatchNormalization()(x)
x = LeakyReLU(alpha=0.1)(x)
return x
def DarknetResidual(x, filters):
prev = x
x = DarknetConv(x, filters // 2, 1)
x = DarknetConv(x, filters, 3)
x = Add()([prev, x])
return x
def DarknetBlock(x, filters, blocks):
x = DarknetConv(x, filters, 3, strides=2)
for _ in range(blocks):
x = DarknetResidual(x, filters)
return x
def Darknet(name=None):
x = inputs = Input([None, None, 3])
x = DarknetConv(x, 32, 3)
x = DarknetBlock(x, 64, 1)
x = DarknetBlock(x, 128, 2) # skip connection
x = x_36 = DarknetBlock(x, 256, 8) # skip connection
x = x_61 = DarknetBlock(x, 512, 8)
x = DarknetBlock(x, 1024, 4)
return tf.keras.Model(inputs, (x_36, x_61, x), name=name)
def DarknetTiny(name=None):
x = inputs = Input([None, None, 3])
x = DarknetConv(x, 16, 3)
x = MaxPool2D(2, 2, 'same')(x)
x = DarknetConv(x, 32, 3)
x = MaxPool2D(2, 2, 'same')(x)
x = DarknetConv(x, 64, 3)
x = MaxPool2D(2, 2, 'same')(x)
x = DarknetConv(x, 128, 3)
x = MaxPool2D(2, 2, 'same')(x)
x = x_8 = DarknetConv(x, 256, 3) # skip connection
x = MaxPool2D(2, 2, 'same')(x)
x = DarknetConv(x, 512, 3)
x = MaxPool2D(2, 1, 'same')(x)
x = DarknetConv(x, 1024, 3)
return tf.keras.Model(inputs, (x_8, x), name=name)
def YoloConv(filters, name=None):
def yolo_conv(x_in):
if isinstance(x_in, tuple):
inputs = Input(x_in[0].shape[1:]), Input(x_in[1].shape[1:])
x, x_skip = inputs
# concat with skip connection
x = DarknetConv(x, filters, 1)
x = UpSampling2D(2)(x)
x = Concatenate()([x, x_skip])
else:
x = inputs = Input(x_in.shape[1:])
x = DarknetConv(x, filters, 1)
x = DarknetConv(x, filters * 2, 3)
x = DarknetConv(x, filters, 1)
x = DarknetConv(x, filters * 2, 3)
x = DarknetConv(x, filters, 1)
return Model(inputs, x, name=name)(x_in)
return yolo_conv
def YoloConvTiny(filters, name=None):
def yolo_conv(x_in):
if isinstance(x_in, tuple):
inputs = Input(x_in[0].shape[1:]), Input(x_in[1].shape[1:])
x, x_skip = inputs
# concat with skip connection
x = DarknetConv(x, filters, 1)
x = UpSampling2D(2)(x)
x = Concatenate()([x, x_skip])
else:
x = inputs = Input(x_in.shape[1:])
x = DarknetConv(x, filters, 1)
return Model(inputs, x, name=name)(x_in)
return yolo_conv
def YoloOutput(filters, anchors, classes, name=None):
def yolo_output(x_in):
x = inputs = Input(x_in.shape[1:])
x = DarknetConv(x, filters * 2, 3)
x = DarknetConv(x, anchors * (classes + 5), 1, batch_norm=False)
x = Lambda(lambda x: tf.reshape(x, (-1, tf.shape(x)[1], tf.shape(x)[2],
anchors, classes + 5)))(x)
return tf.keras.Model(inputs, x, name=name)(x_in)
return yolo_output
def yolo_boxes(pred, anchors, classes):
# pred: (batch_size, grid, grid, anchors, (x, y, w, h, obj, ...classes))
grid_size = tf.shape(pred)[1]
box_xy, box_wh, objectness, class_probs = tf.split(
pred, (2, 2, 1, classes), axis=-1)
box_xy = tf.sigmoid(box_xy)
objectness = tf.sigmoid(objectness)
class_probs = tf.sigmoid(class_probs)
pred_box = tf.concat((box_xy, box_wh), axis=-1) # original xywh for loss
# !!! grid[x][y] == (y, x)
grid = tf.meshgrid(tf.range(grid_size), tf.range(grid_size))
grid = tf.expand_dims(tf.stack(grid, axis=-1), axis=2) # [gx, gy, 1, 2]
box_xy = (box_xy + tf.cast(grid, tf.float32)) / \
tf.cast(grid_size, tf.float32)
box_wh = tf.exp(box_wh) * anchors
box_x1y1 = box_xy - box_wh / 2
box_x2y2 = box_xy + box_wh / 2
bbox = tf.concat([box_x1y1, box_x2y2], axis=-1)
return bbox, objectness, class_probs, pred_box
def yolo_nms(outputs, anchors, masks, classes):
# boxes, conf, type
b, c, t = [], [], []
for o in outputs:
b.append(tf.reshape(o[0], (tf.shape(o[0])[0], -1, tf.shape(o[0])[-1])))
c.append(tf.reshape(o[1], (tf.shape(o[1])[0], -1, tf.shape(o[1])[-1])))
t.append(tf.reshape(o[2], (tf.shape(o[2])[0], -1, tf.shape(o[2])[-1])))
bbox = tf.concat(b, axis=1)
confidence = tf.concat(c, axis=1)
class_probs = tf.concat(t, axis=1)
scores = confidence * class_probs
boxes, scores, classes, valid_detections = tf.image.combined_non_max_suppression(
boxes=tf.reshape(bbox, (tf.shape(bbox)[0], -1, 1, 4)),
scores=tf.reshape(
scores, (tf.shape(scores)[0], -1, tf.shape(scores)[-1])),
max_output_size_per_class=FLAGS.yolo_max_boxes,
max_total_size=FLAGS.yolo_max_boxes,
iou_threshold=FLAGS.yolo_iou_threshold,
score_threshold=FLAGS.yolo_score_threshold
)
return boxes, scores, classes, valid_detections
def YoloV3(size=None, channels=3, anchors=yolo_anchors,
masks=yolo_anchor_masks, classes=80, training=False):
x = inputs = Input([size, size, channels], name='input')
x_36, x_61, x = Darknet(name='yolo_darknet')(x)
x = YoloConv(512, name='yolo_conv_0')(x)
output_0 = YoloOutput(512, len(masks[0]), classes, name='yolo_output_0')(x)
x = YoloConv(256, name='yolo_conv_1')((x, x_61))
output_1 = YoloOutput(256, len(masks[1]), classes, name='yolo_output_1')(x)
x = YoloConv(128, name='yolo_conv_2')((x, x_36))
output_2 = YoloOutput(128, len(masks[2]), classes, name='yolo_output_2')(x)
if training:
return Model(inputs, (output_0, output_1, output_2), name='yolov3')
boxes_0 = Lambda(lambda x: yolo_boxes(x, anchors[masks[0]], classes),
name='yolo_boxes_0')(output_0)
boxes_1 = Lambda(lambda x: yolo_boxes(x, anchors[masks[1]], classes),
name='yolo_boxes_1')(output_1)
boxes_2 = Lambda(lambda x: yolo_boxes(x, anchors[masks[2]], classes),
name='yolo_boxes_2')(output_2)
outputs = Lambda(lambda x: yolo_nms(x, anchors, masks, classes),
name='yolo_nms')((boxes_0[:3], boxes_1[:3], boxes_2[:3]))
return Model(inputs, outputs, name='yolov3')
def YoloV3Tiny(size=None, channels=3, anchors=yolo_tiny_anchors,
masks=yolo_tiny_anchor_masks, classes=80, training=False):
x = inputs = Input([size, size, channels], name='input')
x_8, x = DarknetTiny(name='yolo_darknet')(x)
x = YoloConvTiny(256, name='yolo_conv_0')(x)
output_0 = YoloOutput(256, len(masks[0]), classes, name='yolo_output_0')(x)
x = YoloConvTiny(128, name='yolo_conv_1')((x, x_8))
output_1 = YoloOutput(128, len(masks[1]), classes, name='yolo_output_1')(x)
if training:
return Model(inputs, (output_0, output_1), name='yolov3')
boxes_0 = Lambda(lambda x: yolo_boxes(x, anchors[masks[0]], classes),
name='yolo_boxes_0')(output_0)
boxes_1 = Lambda(lambda x: yolo_boxes(x, anchors[masks[1]], classes),
name='yolo_boxes_1')(output_1)
outputs = Lambda(lambda x: yolo_nms(x, anchors, masks, classes),
name='yolo_nms')((boxes_0[:3], boxes_1[:3]))
return Model(inputs, outputs, name='yolov3_tiny')
def YoloLoss(anchors, classes=80, ignore_thresh=0.5):
def yolo_loss(y_true, y_pred):
# 1. transform all pred outputs
# y_pred: (batch_size, grid, grid, anchors, (x, y, w, h, obj, ...cls))
pred_box, pred_obj, pred_class, pred_xywh = yolo_boxes(
y_pred, anchors, classes)
pred_xy = pred_xywh[..., 0:2]
pred_wh = pred_xywh[..., 2:4]
# 2. transform all true outputs
# y_true: (batch_size, grid, grid, anchors, (x1, y1, x2, y2, obj, cls))
true_box, true_obj, true_class_idx = tf.split(
y_true, (4, 1, 1), axis=-1)
true_xy = (true_box[..., 0:2] + true_box[..., 2:4]) / 2
true_wh = true_box[..., 2:4] - true_box[..., 0:2]
# give higher weights to small boxes
box_loss_scale = 2 - true_wh[..., 0] * true_wh[..., 1]
# 3. inverting the pred box equations
grid_size = tf.shape(y_true)[1]
grid = tf.meshgrid(tf.range(grid_size), tf.range(grid_size))
grid = tf.expand_dims(tf.stack(grid, axis=-1), axis=2)
true_xy = true_xy * tf.cast(grid_size, tf.float32) - \
tf.cast(grid, tf.float32)
true_wh = tf.math.log(true_wh / anchors)
true_wh = tf.where(tf.math.is_inf(true_wh),
tf.zeros_like(true_wh), true_wh)
# 4. calculate all masks
obj_mask = tf.squeeze(true_obj, -1)
# ignore false positive when iou is over threshold
best_iou = tf.map_fn(
lambda x: tf.reduce_max(broadcast_iou(x[0], tf.boolean_mask(
x[1], tf.cast(x[2], tf.bool))), axis=-1),
(pred_box, true_box, obj_mask),
tf.float32)
ignore_mask = tf.cast(best_iou < ignore_thresh, tf.float32)
# 5. calculate all losses
xy_loss = obj_mask * box_loss_scale * \
tf.reduce_sum(tf.square(true_xy - pred_xy), axis=-1)
wh_loss = obj_mask * box_loss_scale * \
tf.reduce_sum(tf.square(true_wh - pred_wh), axis=-1)
obj_loss = binary_crossentropy(true_obj, pred_obj)
obj_loss = obj_mask * obj_loss + \
(1 - obj_mask) * ignore_mask * obj_loss
# TODO: use binary_crossentropy instead
class_loss = obj_mask * sparse_categorical_crossentropy(
true_class_idx, pred_class)
# 6. sum over (batch, gridx, gridy, anchors) => (batch, 1)
xy_loss = tf.reduce_sum(xy_loss, axis=(1, 2, 3))
wh_loss = tf.reduce_sum(wh_loss, axis=(1, 2, 3))
obj_loss = tf.reduce_sum(obj_loss, axis=(1, 2, 3))
class_loss = tf.reduce_sum(class_loss, axis=(1, 2, 3))
return xy_loss + wh_loss + obj_loss + class_loss
return yolo_loss | zac-pyutils | /zac_pyutils-1.70.53.tar.gz/zac_pyutils-1.70.53/zac_pyutils/ConvertYolo/_Yolo_v3.py | _Yolo_v3.py |
import colormath.color_objects
import colormath.color_conversions
import matplotlib.pyplot as plt
import numpy as np
import matplotlib.colors as co
def name2color(name):
"""Return the 3-element RGB array of a given color name."""
return co.hex2color(co.cnames[name].lower())
def nm2rgb(inputnm, intensity=1.0):
'''Convert a wavelength (or uniform range of wavelengths) into RGB colors usable by Python.'''
if np.min(inputnm) <= 350.0 or np.max(inputnm) >= 800.0:
return 0,0,0
# create an SED, with 10 nm increments
wavelengths = np.arange(340.0, 840.0, 10.0)
intensities = np.zeros_like(wavelengths)
# add monochromatic light, if the input wavelength has only one value
nm = np.round(np.array(inputnm)/10.0)*10.0
which = (wavelengths >= np.min(nm)) & (wavelengths <= np.max(nm))
# wtf are the units of intensity to feed into SpectralColor?
intensities[which]= 5.0/np.sum(which)*intensity
spectral = colormath.color_objects.SpectralColor(*intensities)
rgb = colormath.color_conversions.convert_color(spectral, colormath.color_objects.sRGBColor)
return rgb.clamped_rgb_r, rgb.clamped_rgb_g, rgb.clamped_rgb_b
def monochromaticdemo():
'''Test of nm2rgb, for a single wavelength.'''
n = 1000
x = np.linspace(340, 1000, n)
colors = [nm2rgb(c) for c in x]
plt.ion()
fi, ax = plt.subplots(2,1, sharex=True)
ax[0].plot(x, [c[0] for c in colors], color='red')
ax[0].plot(x, [c[1] for c in colors], color='green')
ax[0].plot(x, [c[2] for c in colors], color='blue')
ax[1].scatter(x, np.random.normal(0,1,n), color= colors, s=100)
ax[1].set_xlim(min(x), max(x))
ax[1].set_xlabel('Wavelength (nm)')
def broadbanddemo(width=50):
'''Test of nm2rgb, for a range of wavelengths.'''
n = 1000
x = np.linspace(340, 1000, n)
colors = [nm2rgb([c-width, c+width]) for c in x]
plt.ion()
plt.cla()
fi, ax = plt.subplots(2,1, sharex=True)
ax[0].plot(x, [c[0] for c in colors], color='red')
ax[0].plot(x, [c[1] for c in colors], color='green')
ax[0].plot(x, [c[2] for c in colors], color='blue')
ax[1].scatter(x, np.random.normal(0,1,n), color= colors, s=100)
ax[1].set_xlim(min(x), max(x)) | zachopy | /zachopy-0.1.4.tar.gz/zachopy-0.1.4/color.py | color.py |
import numpy as np
import parse
possiblescales = dict(degrees=360.0/24.0, radians=360.0/24.0)
def format(delimiter='letters'):
if delimiter == 'letters':
return "{rah:02d}h{ram:02d}m{ras:05.2f}s{sign:1}{decd:02d}d{decm:02d}m{decs:05.2f}s"
if delimiter == ':':
return "{rah:02d}:{ram:02d}:{ras:05.2f} {sign:1}{decd:02d}:{decm:02d}:{decs:05.2f}"
def test(n=100000, verbose=False, delimiter=':'):
'''Test the conversion between decimals and strings, by generating lots of random positions.'''
for i in range(n):
ra, dec = np.random.uniform(0.0, 360.0), np.random.uniform(-90,90)
s = clockify(ra, dec, delimiter=delimiter)
nra, ndec = unclockify(s)
r = clockify(nra, ndec, delimiter=':')
if verbose:
print
print "performing random test #{0}".format(i)
print ra, dec
print s
print nra, ndec
print r
print '---------------'
print ra - nra, dec - ndec
print
assert(r == s)
assert(np.abs(nra - ra) <= 0.01/3600.0*15)
assert(np.abs(ndec - dec) <= 0.01/3600.0)
#print r
if i % (n/100) == 0:
print '{0}/{1}'.format(i,n)
print 'clocking and unclocking worked on {0} random positions'.format(n)
def unclockify(s, delimiter='letters'):
'''Convert a positional string to decimal RA and Dec values (in degrees).'''
if 'h' in s:
delimiter='letters'
if ':' in s:
delimiter=':'
d = parse.parse(format(delimiter).replace('{decs:05.2f}', '{decs}' ).replace('{ras:05.2f} ', '{ras} ' ), s)
ra = 15.0*(d['rah'] + d['ram']/60.0 + np.float(d['ras'])/3600.0)
dec = np.int(d['sign']+'1')*(d['decd'] + d['decm']/60.0 + np.float(d['decs'])/3600.0)
return ra, dec
def clockify(ra, dec, delimiter='letters'):
'''Convert an RA and Dec position (in degrees) to a positional string.'''
# calculate the RA
rah = np.floor(ra/15.0).astype(np.int)
ram = np.floor((ra/15.0 - rah)*60.0).astype(np.int)
ras = np.abs((ra/15.0 - rah - ram/60.0)*3600.0)
# calculate sign
if dec >= 0:
sign = '+'
else:
sign = '-'
# calculate the Dec
decd = np.floor(np.abs(dec)).astype(np.int)
decm = np.floor((np.abs(dec) - decd)*60.0).astype(np.int)
decs = np.abs((np.abs(dec) - decd - decm/60.0)*3600.0)
if np.round(ras, decimals=2) == 60.00:
ras = 0.0
ram += 1
if ram == 60:
ram = 0
rah += 1
if rah == 24:
rah = 0
if np.round(decs, decimals=2) == 60.00:
decs = 0.0
decm += 1
if decm == 60:
decm = 0
decd += 1
return format(delimiter).format(**locals()) | zachopy | /zachopy-0.1.4.tar.gz/zachopy-0.1.4/strings.py | strings.py |
import numpy as np
import astropy.coordinates
import astropy.units as u
from astroquery.simbad import Simbad
# these are options for how the posstring can be represented
possible_delimiters = ['letters', ' ', ':']
class Star(object):
def __init__(self, name=None, ra=None, dec=None, **kw):
'''Initialize the star, gathering its data from Simbad;
if you supply...
ra and dec (with astropy units attached)
pmra and pmdec (in sky-projected mas/year) and epoch
they will be used instead of the Simbad values.'''
# decide how to initialize: use coordinates? or use simbad? or else?
if ra is not None and dec is not None:
# create the star from the input coordinates
self.fromCoords(ra=ra, dec=dec, name=name, **kw)
elif name is not None:
# create the star from Simbad, searching on name
self.fromSimbad(name, **kw)
else:
raise ValueError('please call {0}() either with "name" or "ra" + "dec" defined'.format(self.__class__.__name__))
@property
def n(self):
'''return the length of the arrays (maybe multiple stars as one)'''
return np.size(self.icrs)
def __repr__(self):
'''how should this star object be represented?'''
return '<{0}>'.format(self.name)
def fromCoords(self, ra=None, dec=None, name=None, unit=(None,None),
pmra=None, pmdec=None, epoch=None, **attributes):
"""
[ra] and [dec] must have units associated with them, either as
ra=359.0*u.deg, dec=89.0*u.deg
or
ra="23:59:59",dec="+89:59:59",unit=(u.hourangle, u.deg)
or
ra='23h59m59s +89d59m59s'
(this last one will figure out the dec, if you leave dec=None)
[name] can be any name you want to give the star
[pmra] and [pmdec] are the proper motions, in sky-projected mas/yr
[epoch] is the time, in decimal years, at which the coordinates
are reported. extrapolated positions at other epochs will
use the time difference relative to this date.
[unit] (optional) keyword passed to SkyCoord, for ra/dec
[**attributes] other properties of the star to store (e.g. V mag.)
"""
# set the star's name, and its attributes
self.name = name
self.attributes=attributes
# we're going to store the epoch as the coordinate's "obstime"
try:
# if an epoch is set, convert it from (e.g. 2015.4) to a Time
obstime=astropy.time.Time(epoch, format='decimalyear')
except ValueError:
# if no epoch is set, ignore it
obstime=None
# create a coordinate object, and assign the epoch as its "obstime"
self.icrs = astropy.coordinates.SkyCoord(ra, dec,
obstime=obstime, unit=unit)
# if you're supplying proper motions, you better supply an epoch too!
if (pmra is not None) or (pmdec is not None):
assert(epoch is not None)
# keep track of the proper motions
self.pmra, self.pmdec = pmra, pmdec
for pm in [self.pmra, self.pmdec]:
bad = np.isfinite(pm) == False
try:
pm[bad] = 0.0
except TypeError:
if bad:
self.pmra, self.pmdec = 0.0, 0.0
# make sure all the proper motions are finite!
assert(np.isfinite(self.pmra).all())
print ' made {0} from custom values'.format(self)
def fromSimbad(self, name, **attributes):
'''search for a star name in Simbad, and pull down its data'''
# add a few extra pieces of information to the search
Simbad.reset_votable_fields()
Simbad.add_votable_fields('pm')
Simbad.add_votable_fields('sptype')
Simbad.add_votable_fields('flux(V)')
Simbad.add_votable_fields('flux(K)')
# send the query to Simbad
self.table = Simbad.query_object(name)
# pull out the official Simbad name
self.name = name
self.simbadname = self.table['MAIN_ID'].data[0]
if len(self.simbadname) < len(self.name):
print "renaming {1} to {0}".format(self.simbadname, self.name)
self.name = self.simbadname
ra = self.table['RA'].data[0]
dec = self.table['DEC'].data[0]
obstime = astropy.time.Time(2000.0, format='decimalyear')
self.icrs = astropy.coordinates.SkyCoord(ra, dec,
unit=(u.hourangle, u.deg),
frame='icrs', obstime=obstime)
self.pmra = self.table['PMRA'].data[0] # in (projected) mas/yr
self.pmdec = self.table['PMDEC'].data[0] # in mas/yr
self.attributes = {}
self.attributes['V'] = float(self.table['FLUX_V'].data[0])
self.attributes['comment'] = self.table['SP_TYPE'].data[0]
for k, v in attributes.iteritems():
self.attributes[k] = v
print ' made {0} from SIMBAD'.format(self)
def atEpoch(self, epoch, format='decimalyear'):
'''return the positions, at a particular epoch
[epoch] is by default decimal years
but other formats are OK if you change format'''
# set the desired new time
desiredtime = astropy.time.Time(epoch, format=format)
# how much time has elapsed?
try:
timeelapsed = (desiredtime - self.icrs.obstime).to('year').value
except:
timeelapsed = 0.0
print 'UH-OH! It looks like no proper motion epoch was defined.'
print ' Returning input coordinates.'
# at what rate do the coordinates change?
rarate = self.pmra/60.0/60.0/np.cos(self.icrs.dec.radian)/1000.0
# in degress of RA/year
decrate = self.pmdec/60.0/60.0/1000.0
# in degrees/year
# update ra and dec, based on the time elapsed
raindegrees = self.icrs.ra.degree + timeelapsed*rarate
decindegrees = self.icrs.dec.degree + timeelapsed*decrate
# create new coordinate object, and return it
extrapolated = astropy.coordinates.SkyCoord(raindegrees*u.deg,
decindegrees*u.deg, frame='icrs',
obstime=desiredtime)
return extrapolated
def posstring(self, epoch=2000.0, coord=None, delimiter='letters'):
'''return a position string, at a particular epoch, for one star'''
# make sure the delimiter is okay
assert(delimiter in possible_delimiters)
# the overall format
overallform = '{ra:s} {dec:s} ({epoch:6.1f})'
# extrapolate to the epoch
coord = self.atEpoch(epoch)
# pull out the RA and dec
rah,ram,ras = coord.ra.hms
decd,decm,decs = coord.dec.dms
assert(np.isfinite(rah))
# decide on the string formats
if delimiter == 'letters':
raform = "{h:02.0f}h{m:02.0f}m{s:05.2f}s"
decform = "{d:+03.0f}d{m:02.0f}m{s:04.1f}s"
else:
raform = "{h:02.0f}:{m:02.0f}:{s:05.2f}".replace(':', delimiter)
decform = "{d:+03.0f}:{m:02.0f}:{s:04.1f}".replace(':', delimiter)
# create the strings, and return the combined one
ra = raform.format(h=rah,m=ram,s=ras)
dec = decform.format(d=decd,m=np.abs(decm),s=np.abs(decs))
return overallform.format(ra=ra, dec=dec, epoch=epoch) | zachopy | /zachopy-0.1.4.tar.gz/zachopy-0.1.4/star.py | star.py |
import os
import numpy as np
import pyds9
import displays.regions as regions
from star import Star
import utils
from slit_mask_regions import slit_mask_regions
finderdir = os.environ['COSMOS'] + 'Finders/'
utils.mkdir(finderdir)
class Camera(object):
def __init__(self,name):
self.instruments = {}
self.instruments['LDSS3C'] ={'size':8.0, 'inflate':1.8}
self.instruments['CHIRON'] ={'size':3.0, 'inflate':1.8}
self.instruments['MIKE'] ={'size':3.0, 'inflate':1.8}
self.instruments['PISCO'] ={'size':9.0, 'inflate':1.8}
self.setup(name)
def setup(self, name):
for k in self.instruments[name].keys():
self.__dict__[k] = self.instruments[name][k]
class Finder(object):
def __init__(self, star,
moment=2016.3,
instrument='LDSS3C',
npixels=500, **starkw):
'''produce a finder chart for a given star, at a particular moment
star = either a star object, or the name of star for Simbad
(**starkw will be passed to the star creation,
if you need custom RA, DEC, proper motions)
moment = for what epoch (e.g. 2016.3) should the chart show?
instrument = a string, indicating basic size of the chart
npixels = how big the image can be; ds9 needs all on screen
'''
if type(star) == str:
# if star is a string, use it (and starkw) to create star object
self.name = star
self.star = Star(self.name, **starkw)
else:
# if star isn't a string, it must be a zachopy.star.Star object'''
self.star = star
self.name = star.name
# keep track
self.instrument = instrument
self.npixels = npixels
# define the moment this finder should represent
self.setMoment(moment)
# use ds9 to create the finder chart
self.createChart()
def setMoment(self, moment):
self.moment = moment
def createChart(self):
self.icrs = self.star.atEpoch(self.moment)
self.ra, self.dec = self.icrs.ra, self.icrs.dec
self.camera = Camera(self.instrument)
self.coordstring = self.icrs.to_string('hmsdms')
for letter in 'hmdm':
self.coordstring = self.coordstring.replace(letter, ':')
self.coordstring = self.coordstring.replace('s', '')
self.w = pyds9.DS9('finders')
toremove=[ 'info','panner','magnifier','buttons']
for what in toremove:
self.w.set('view {0} no'.format(what))
self.w.set("frame delete all")
self.size = self.camera.size
self.inflate = self.camera.inflate
#try:
# self.addImage('poss1_red')
#except:
# print "poss1 failed"
#try:
self.addImage('poss2ukstu_red')
#except:
# print "poss2 failed"
self.addRegions()
try:
slit_mask_regions(self.star.attributes['slits'], 'slits')
self.w.set("regions load {0}".format('slits.reg'))
except KeyError:
print "no slits found!"
self.tidy()
self.save()
def tidy(self):
self.w.set("tile mode column")
self.w.set("tile yes")
self.w.set("single")
self.w.set("zoom to fit")
self.w.set("match frame wcs")
def save(self):
utils.mkdir('finders')
for d in [finderdir, 'finders/']:
print "saveimage " + d + self.name.replace(' ', '') + ".png"
self.w.set("saveimage " + d + self.name.replace(' ', '') + ".png")
def addImage(self, survey='poss2_red'):
self.w.set("frame new")
self.w.set('single')
self.w.set( "dssstsci survey {0}".format(survey))
self.w.set("dssstsci size {0} {1} arcmin ".format(self.size*self.inflate, self.size*self.inflate))
self.w.set("dssstsci coord {0} sexagesimal ".format(self.coordstring))
def addRegions(self):
xpixels = self.w.get('''fits header keyword "'NAXIS1'" ''')
ypixels = self.w.get('''fits header keyword "'NAXIS1'" ''')
self.scale = np.minimum(int(xpixels), self.npixels)/float(xpixels)
self.w.set("width {0:.0f}".format(self.npixels))
self.w.set("height {0:.0f}".format(self.npixels))
# add circles centered on the target position
r = regions.Regions("LDSS3C", units="fk5", path=finderdir )
imageepoch = float(self.w.get('''fits header keyword "'DATE-OBS'" ''').split('-')[0])
old = self.star.atEpoch(imageepoch)
print imageepoch
print self.star.posstring(imageepoch)
current = self.star.atEpoch(self.moment)
print self.moment
print self.star.posstring(self.moment)
r.addLine(old.ra.degree, old.dec.degree, current.ra.degree, current.dec.degree, line='0 1', color='red')
print old.ra.degree, old.dec.degree, current.ra.degree, current.dec.degree
r.addCircle(current.ra.degree, current.dec.degree, "{0}'".format(self.size/2), text="{0:.1f}' diameter".format(self.size), font="bold {0:.0f}".format(np.round(self.scale*14.0)))
r.addCircle(current.ra.degree, current.dec.degree, "{0}'".format(2.0/60.0))
# add a compass
radius = self.size/60/2
r.addCompass(current.ra.degree + 0.95*radius, current.dec.degree + 0.95*radius, "{0}'".format(self.size*self.inflate/10))
r.addText(current.ra.degree, current.dec.degree - 1.1*radius, self.name + ', ' + self.star.posstring(self.moment), font='bold {0:.0f}'.format(np.round(self.scale*16.0)), color='red')
r.addText(current.ra.degree - 1.04*radius, current.dec.degree + 1.02*radius, 'd(RA)={0:+3.0f}, d(Dec)={1:+3.0f} mas/yr'.format(self.star.pmra, self.star.pmdec), font='bold {0:.0f}'.format(np.round(self.scale*12.0)), color='red')
r.addText(current.ra.degree - 1.04*radius, current.dec.degree + 0.95*radius, '(image from {0})'.format(imageepoch), font='bold {0:.0f}'.format(np.round(self.scale*10.0)), color='red')
# load regions into image
print(r)
r.write()
self.w.set("cmap invert yes")
self.w.set("colorbar no")
#self.w.set("rotate to {0}".format(int(np.round(90 -63 - self.star['rot'][0]))))
self.w.set("regions load {0}".format(r.filename)) | zachopy | /zachopy-0.1.4.tar.gz/zachopy-0.1.4/finder.py | finder.py |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.