code
stringlengths 501
5.19M
| package
stringlengths 2
81
| path
stringlengths 9
304
| filename
stringlengths 4
145
|
---|---|---|---|
# Copyright 2023 IBM Corp. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
A class for running the forwarder server
"""
import os
import logging
import socket
from threading import Thread, Event
import zhmcclient
from .forwarded_lpars import ForwardedLpars
from .utils import logprint, PRINT_ALWAYS, PRINT_V, PRINT_VV, \
RETRY_TIMEOUT_CONFIG
class ForwarderServer:
"""
A forwarder server.
"""
def __init__(self, config_data, config_filename):
"""
Parameters:
config_data (dict): Content of forwarder config file.
config_filename (string): Path name of forwarder config file.
"""
self.config_data = config_data
self.config_filename = config_filename
self.thread = Thread(target=self.run) # forwarder thread
self.thread_started = False
self.stop_event = Event() # Set event to stop forwarder thread
self.session = None # zhmcclient.Session with the HMC
self.all_cpcs = None # List of all managed CPCs as zhmcclient.Cpc
self.all_lpars = None # List of all partitions/LPARs as zhmcclient obj
self.forwarded_lpars = None # ForwardedLpars object
self.receiver = None # NotificationReceiver
self.num_subscriptions = None
def startup(self):
"""
Set up the forwarder server and start the forwarder thread.
"""
hmc_data = self.config_data['hmc']
# hmc data structure in config file:
# hmc:
# hmc: 10.11.12.13
# userid: "myuser"
# password: "mypassword"
# verify_cert: false
verify_cert = hmc_data.get('verify_cert', True)
if isinstance(verify_cert, str):
if not os.path.isabs(verify_cert):
verify_cert = os.path.join(
os.path.dirname(self.config_filename), verify_cert)
logprint(logging.INFO, PRINT_ALWAYS,
"Opening session with HMC {h} "
"(user: {u}, certificate validation: {c})".
format(h=hmc_data['host'], u=hmc_data['userid'],
c=verify_cert))
self.session = zhmcclient.Session(
hmc_data['host'],
hmc_data['userid'],
hmc_data['password'],
verify_cert=verify_cert,
retry_timeout_config=RETRY_TIMEOUT_CONFIG)
client = zhmcclient.Client(self.session)
logprint(logging.INFO, PRINT_V,
"Gathering information about CPCs and LPARs to forward")
self.all_cpcs = client.cpcs.list()
self.all_lpars = []
for cpc in self.all_cpcs:
dpm = cpc.prop('dpm-enabled')
if dpm:
self.all_lpars.extend(cpc.partitions.list())
else:
self.all_lpars.extend(cpc.lpars.list())
self.forwarded_lpars = ForwardedLpars(
self.session, self.config_data, self.config_filename)
for lpar in self.all_lpars:
cpc = lpar.manager.parent
added = self.forwarded_lpars.add_if_matching(lpar)
if added:
logprint(logging.INFO, PRINT_V,
"LPAR {p!r} on CPC {c!r} will be forwarded".
format(p=lpar.name, c=cpc.name))
self.receiver = zhmcclient.NotificationReceiver(
[], # self.session.object_topic to get notifications to ignore
hmc_data['host'],
hmc_data['userid'],
hmc_data['password'])
self.num_subscriptions = 0
logger_id = 0 # ID number used in Python logger name
for lpar_info in self.forwarded_lpars.forwarded_lpar_infos.values():
lpar = lpar_info.lpar
cpc = lpar.manager.parent
logprint(logging.INFO, PRINT_VV,
"Opening OS message channel for LPAR {p!r} on CPC {c!r}".
format(p=lpar.name, c=cpc.name))
try:
os_topic = lpar.open_os_message_channel(
include_refresh_messages=True)
except zhmcclient.HTTPError as exc:
if exc.http_status == 409 and exc.reason == 331:
# OS message channel is already open for this session,
# reuse its notification topic.
topic_dicts = self.session.get_notification_topics()
os_topic = None
for topic_dict in topic_dicts:
if topic_dict['topic-type'] != \
'os-message-notification':
continue
obj_uri = topic_dict['object-uri']
if lpar.uri == obj_uri:
os_topic = topic_dict['topic-name']
logprint(logging.INFO, PRINT_VV,
"Using existing OS message notification "
"topic {t!r} for LPAR {p!r} on CPC {c!r}".
format(t=os_topic, p=lpar.name,
c=cpc.name))
break
if os_topic is None:
raise RuntimeError(
"An OS message notification topic for LPAR {p!r} "
"on CPC {c!r} supposedly exists, but cannot be "
"found in the existing topics for this session: "
"{t}".
format(p=lpar.name, c=cpc.name, t=topic_dicts))
elif exc.http_status == 409 and exc.reason == 332:
# The OS does not support OS messages.
logprint(logging.WARNING, PRINT_ALWAYS,
"Warning: The OS in LPAR {p!r} on CPC {c!r} does "
"not support OS messages - ignoring the LPAR".
format(p=lpar.name, c=cpc.name))
os_topic = None
else:
raise
if os_topic:
logprint(logging.INFO, PRINT_VV,
"Subscribing for OS message notifications for LPAR "
"{p!r} on CPC {c!r} (topic: {t})".
format(p=lpar.name, c=cpc.name, t=os_topic))
self.receiver.subscribe(os_topic)
lpar_info.topic = os_topic
self.num_subscriptions += 1
# Prepare sending to syslogs by creating Python loggers
for syslog in self.forwarded_lpars.get_syslogs(lpar):
try:
logger = self._create_logger(syslog, logger_id)
except ConnectionError as exc:
logprint(logging.WARNING, PRINT_ALWAYS,
"Warning: Skipping syslog server: {}".format(exc))
continue
logger_id += 1
syslog.logger = logger
self._start()
self.thread_started = True
@staticmethod
def _create_logger(syslog, logger_id):
facility_code = logging.handlers.SysLogHandler.facility_names[
syslog.facility]
if syslog.port_type == 'tcp':
# Newer syslog protocols, e.g. rsyslog
socktype = socket.SOCK_STREAM
else:
assert syslog.port_type == 'udp'
# Older syslog protocols, e.g. BSD
socktype = socket.SOCK_DGRAM
try:
handler = logging.handlers.SysLogHandler(
(syslog.host, syslog.port), facility_code,
socktype=socktype)
# pylint: disable=broad-exception-caught
except Exception as exc:
raise ConnectionError(
"Cannot create log handler for syslog server at "
"{host}, port {port}/{port_type}: {msg}".
format(host=syslog.host, port=syslog.port,
port_type=syslog.port_type, msg=str(exc)))
handler.setFormatter(logging.Formatter('%(message)s'))
logger_name = 'zhmcosfwd_syslog_{}'.format(logger_id)
logger = logging.getLogger(logger_name)
logger.addHandler(handler)
logger.setLevel(logging.INFO)
return logger
def shutdown(self):
"""
Stop the forwarder thread and clean up the forwarder server.
"""
if self.forwarded_lpars:
for lpar_info in self.forwarded_lpars.forwarded_lpar_infos.values():
lpar = lpar_info.lpar
cpc = lpar.manager.parent
if lpar_info.topic:
logprint(logging.INFO, PRINT_VV,
"Unsubscribing OS message channel for LPAR {p!r} "
"on CPC {c!r} (topic: {t})".
format(p=lpar.name, c=cpc.name, t=lpar_info.topic))
try:
self.receiver.unsubscribe(lpar_info.topic)
except zhmcclient.Error as exc:
logprint(logging.ERROR, PRINT_ALWAYS,
"Error unsubscribing OS message channel for "
"LPAR {p!r} on CPC {c!r} (topic: {t}): {m}".
format(p=lpar.name, c=cpc.name,
t=lpar_info.topic, m=exc))
if self.receiver:
try:
logprint(logging.INFO, PRINT_ALWAYS,
"Closing notification receiver")
self.receiver.close()
except zhmcclient.Error as exc:
logprint(logging.ERROR, PRINT_ALWAYS,
"Error closing notification receiver: {m}".
format(m=exc))
if self.thread_started:
try:
logprint(logging.INFO, PRINT_ALWAYS,
"Stopping forwarder thread")
self._stop()
# pylint: disable=broad-exception-caught
except Exception as exc:
logprint(logging.ERROR, PRINT_ALWAYS,
"Error stopping forwarder thread: {m}".
format(m=exc))
self.thread_started = False
# logprint(logging.INFO, PRINT_ALWAYS,
# "Cleaning up partition notifications on HMC")
# for lpar_tuple in self.forwarded_lpars.values():
# lpar = lpar_tuple[0]
# try:
# lpar.disable_auto_update()
# except zhmcclient.HTTPError as exc:
# if exc.http_status == 403:
# # The session does not exist anymore
# pass
if self.session:
logprint(logging.INFO, PRINT_ALWAYS,
"Closing session with HMC")
try:
self.session.logoff()
except zhmcclient.HTTPError as exc:
if exc.http_status == 403:
# The session does not exist anymore
pass
else:
logprint(logging.ERROR, PRINT_ALWAYS,
"Error closing session with HMC: {m}".
format(m=exc))
self.session = None
def _start(self):
"""
Start the forwarder thread.
"""
self.stop_event.clear()
self.thread.start()
def _stop(self):
"""
Stop the forwarder thread.
"""
self.stop_event.set()
self.thread.join()
def run(self):
"""
The method running as the forwarder server thread.
"""
logprint(logging.INFO, PRINT_V,
"Entering forwarder thread")
while True:
if self.stop_event.is_set():
break
try:
# pylint: disable=unused-variable
for headers, message in self.receiver.notifications():
self.handle_notification(headers, message)
except zhmcclient.NotificationJMSError as exc:
logprint(logging.ERROR, PRINT_ALWAYS,
"Error receiving notifications {}: {}".
format(exc.__class__.__name__, exc))
logprint(logging.ERROR, PRINT_ALWAYS,
"Receiving notifications again")
logprint(logging.INFO, PRINT_V,
"Leaving forwarder thread")
def handle_notification(self, headers, message):
"""
Handle a received notification.
"""
noti_type = headers['notification-type']
if noti_type == 'os-message':
for msg_info in message['os-messages']:
lpar_uri = headers['object-uri']
lpar_infos = self.forwarded_lpars.forwarded_lpar_infos
lpar_info = lpar_infos[lpar_uri]
lpar = lpar_info.lpar
seq_no = msg_info['sequence-number']
msg_txt = msg_info['message-text'].strip('\n')
self.send_to_syslogs(lpar, seq_no, msg_txt)
else:
dest = headers['destination']
sub_id = headers['subscription']
obj_class = headers['class']
obj_name = headers['name']
logprint(logging.WARNING, PRINT_ALWAYS,
"Warning: Ignoring {nt!r} notification for {c} {n!r} "
"(subscription: {s}, destination: {d})".
format(nt=noti_type, c=obj_class, n=obj_name, s=sub_id,
d=dest))
def send_to_syslogs(self, lpar, seq_no, msg_txt):
"""
Send a single OS message to the configured syslogs for its LPAR.
"""
cpc = lpar.manager.parent
for syslog in self.forwarded_lpars.get_syslogs(lpar):
if syslog.logger:
syslog_txt = ('{c} {p} {s}: {m}'.
format(c=cpc.name, p=lpar.name, s=seq_no,
m=msg_txt))
try:
syslog.logger.info(syslog_txt)
# pylint: disable=broad-exception-caught
except Exception as exc:
logprint(logging.WARNING, PRINT_ALWAYS,
"Warning: Cannot send seq_no {s} from LPAR {p!r} "
"on CPC {c!r} to syslog host {h}: {m}".
format(s=seq_no, p=lpar.name, c=cpc.name,
h=syslog.host, m=exc))
continue | zhmc-os-forwarder | /zhmc_os_forwarder-0.2.0.tar.gz/zhmc_os_forwarder-0.2.0/zhmc_os_forwarder/forwarder_server.py | forwarder_server.py |
# Copyright 2023 IBM Corp. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
A class for storing forwarded LPARs and their syslog servers
"""
from .forwarder_config import ForwarderConfig
# pylint: disable=too-few-public-methods
class ForwardedLparInfo:
"""
Info for a single forwarded LPAR
"""
def __init__(self, lpar, syslogs=None, topic=None):
self.lpar = lpar
if not syslogs:
syslogs = []
self.syslogs = syslogs
self.topic = topic
class ForwardedLpars:
"""
A data structure to maintain forwarded LPARs and their syslog servers,
based on the forwarder config.
"""
def __init__(self, session, config_data, config_filename):
"""
Parameters:
session (zhmcclient.Session): Session with the HMC.
config_data (dict): Content of forwarder config file.
config_filename (string): Path name of forwarder config file.
"""
self.session = session
self.config_data = config_data
self.config_filename = config_filename
# Forwarder config for fast lookup
self.config = ForwarderConfig(config_data, config_filename)
# Representation of forwarded LPARs
# - key: LPAR URI
# - value: ForwardedLparInfo
self.forwarded_lpar_infos = {}
def __str__(self):
return ("{s.__class__.__name__}("
"config_filename={s.config_filename!r}"
")".format(s=self))
def __repr__(self):
return ("{s.__class__.__name__}("
"config_filename={s.config_filename!r}, "
"config={s.config!r}, "
"forwarded_lpar_infos={s.forwarded_lpar_infos!r}"
")".format(s=self))
def add_if_matching(self, lpar):
"""
Add an LPAR to be forwarded if it matches a forwarding definition
in the forwarder config.
If the LPAR is already being forwarded, its syslog servers are changed
to the syslog servers from the forwarder definition.
Parameters:
lpar (zhmcclient.Partition/Lpar or string): The LPAR, as a zhmcclient
resource object or as a URI string.
Returns:
bool: Indicates whether the LPAR was added.
"""
syslogs = self.config.get_syslogs(lpar)
if syslogs:
if lpar.uri not in self.forwarded_lpar_infos:
self.forwarded_lpar_infos[lpar.uri] = ForwardedLparInfo(lpar)
self.forwarded_lpar_infos[lpar.uri].syslogs = syslogs
return True
return False
def remove(self, lpar):
"""
Remove an LPAR from being forwarded.
If the LPAR is not currently being forwarded, Nothing is done.
Parameters:
lpar (zhmcclient.Partition/Lpar or string): The LPAR, as a zhmcclient
resource object or as a URI string.
"""
if lpar.uri in self.forwarded_lpar_infos:
del self.forwarded_lpar_infos[lpar.uri]
def is_forwarding(self, lpar):
"""
Return whether the LPAR is currently being forwarded.
Parameters:
lpar (zhmcclient.Partition/Lpar or string): The LPAR, as a zhmcclient
resource object or as a URI string.
Returns:
bool: Indicates whether the LPAR is currently being forwarded.
"""
return lpar.uri in self.forwarded_lpar_infos
def get_syslogs(self, lpar):
"""
Get the syslogs from the forwarder config for a forwarded LPAR.
If the LPAR is not currently forwarded, returns None.
Parameters:
lpar (zhmcclient.Partition/Lpar or string): The LPAR, as a zhmcclient
resource object or as a URI string.
Returns:
list of ConfigSyslogInfo: The syslogs for the LPAR, or None.
"""
try:
lpar_info = self.forwarded_lpar_infos[lpar.uri]
except KeyError:
return None
return lpar_info.syslogs | zhmc-os-forwarder | /zhmc_os_forwarder-0.2.0.tar.gz/zhmc_os_forwarder-0.2.0/zhmc_os_forwarder/forwarded_lpars.py | forwarded_lpars.py |
# Copyright 2023 IBM Corp. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
A class for storing forwarded LPARs and their syslog servers
"""
import re
from collections import namedtuple
# Default syslog properties, if not specified in forwarder config
DEFAULT_SYSLOG_PORT = 514
DEFAULT_SYSLOG_PORT_TYPE = 'tcp'
DEFAULT_SYSLOG_FACILITY = 'user'
# Info for a single CPC pattern in the forwarder config
ConfigCpcInfo = namedtuple(
'ConfigCpcInfo',
[
'cpc_pattern', # string: Compiled pattern for CPC name
'lpar_infos', # List of ConfigLparInfo items
]
)
# Info for a single LPAR pattern in the forwarder config
# In context of a CPC pattern.
ConfigLparInfo = namedtuple(
'ConfigLparInfo',
[
'lpar_pattern', # string: Compiled pattern for LPAR name
'syslogs', # list of ConfigSyslogInfo: Syslogs for the LPAR
]
)
# pylint: disable=too-few-public-methods
class ConfigSyslogInfo:
"""
Info for a single syslog in the forwarder config
"""
def __init__(self, host, port, port_type, facility):
self.host = host # string: Syslog IP address or hostname
self.port = port # int: Syslog port number
self.port_type = port_type # int: Syslog port type ('tcp', 'udp')
self.facility = facility # string: Syslog facility (e.g. 'user')
self.logger = None # logging.Logger: Python logger for syslog
class ForwarderConfig:
"""
A data structure to keep the forwarder config in an optimized way.
"""
def __init__(self, config_data, config_filename):
"""
Parameters:
config_data (dict): Content of forwarder config file.
config_filename (string): Path name of forwarder config file.
"""
self.config_data = config_data
self.config_filename = config_filename
# Data for storing the config
# Representation of forwarder config for fast lookup
# - items: namedtuple ConfigCpcInfo
self.config_cpc_infos = []
forwarding = self.config_data['forwarding']
# forwarding data structure in config file:
# forwarding:
# - syslogs:
# - server: 10.11.12.14
# cpcs:
# - cpc: CPC.*
# partitions:
# - partition: "dal1-.*"
for fwd_item in forwarding:
syslogs = []
for sl_item in fwd_item['syslogs']:
sl_host = sl_item['host']
sl_port = sl_item.get('port', DEFAULT_SYSLOG_PORT)
sl_port_type = sl_item.get('port_type',
DEFAULT_SYSLOG_PORT_TYPE)
sl_facility = sl_item.get('facility', DEFAULT_SYSLOG_FACILITY)
syslog_info = ConfigSyslogInfo(
sl_host, sl_port, sl_port_type, sl_facility)
syslogs.append(syslog_info)
for cpc_item in fwd_item['cpcs']:
cpc_pattern = re.compile('^{}$'.format(cpc_item['cpc']))
cpc_info = ConfigCpcInfo(cpc_pattern, [])
for lpar_item in cpc_item['partitions']:
lpar_pattern = re.compile(
'^{}$'.format(lpar_item['partition']))
lpar_info = ConfigLparInfo(lpar_pattern, syslogs)
cpc_info.lpar_infos.append(lpar_info)
self.config_cpc_infos.append(cpc_info)
def __str__(self):
return ("{s.__class__.__name__}("
"config_filename={s.config_filename!r}"
")".format(s=self))
def __repr__(self):
return ("{s.__class__.__name__}("
"config_filename={s.config_filename!r}, "
"config_cpc_infos={s.config_cpc_infos!r}"
")".format(s=self))
def get_syslogs(self, lpar):
"""
Get the syslogs for an LPAR if it matches the forwarder config.
If it does not match the forwarder config, None is returned.
Parameters:
lpar (zhmcclient.Partition/Lpar): The LPAR, as a zhmcclient
resource object.
Returns:
list of ConfigSyslogInfo: List of syslogs if matching, or None
otherwise.
"""
cpc = lpar.manager.parent
for cpc_info in self.config_cpc_infos:
if cpc_info.cpc_pattern.match(cpc.name):
for lpar_info in cpc_info.lpar_infos:
if lpar_info.lpar_pattern.match(lpar.name):
return lpar_info.syslogs
return None | zhmc-os-forwarder | /zhmc_os_forwarder-0.2.0.tar.gz/zhmc_os_forwarder-0.2.0/zhmc_os_forwarder/forwarder_config.py | forwarder_config.py |
# Copyright 2023 IBM Corp. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
IBM Z HMC OS Message Forwarder
"""
import argparse
import sys
import time
import logging
import logging.handlers
import urllib3
import zhmcclient
from ._version import __version__
from .forwarder_server import ForwarderServer
from . import utils # for global variable VERBOSE_LEVEL
from .utils import DEFAULT_CONFIG_FILE, VALID_LOG_DESTINATIONS, \
VALID_LOG_LEVELS, VALID_LOG_COMPONENTS, DEFAULT_LOG_LEVEL, \
DEFAULT_LOG_COMP, DEFAULT_SYSLOG_FACILITY, VALID_SYSLOG_FACILITIES, \
PRINT_ALWAYS, PRINT_V, RETRY_TIMEOUT_CONFIG, \
ProperExit, ImproperExit, EarlyExit, \
parse_yaml_file, logprint, setup_logging
def parse_args(args):
"""
Parses the CLI arguments.
"""
parser = argparse.ArgumentParser(
description="IBM Z HMC OS Message Forwarder")
parser.add_argument("-c", metavar="CONFIG_FILE",
default=DEFAULT_CONFIG_FILE,
help="path name of config file. "
"Use --help-config for details. "
"Default: {}".format(DEFAULT_CONFIG_FILE))
parser.add_argument("--log", dest='log_dest', metavar="DEST", default=None,
help="enable logging and set a log destination "
"({dests}). Default: no logging".
format(dests=', '.join(VALID_LOG_DESTINATIONS)))
parser.add_argument("--log-comp", dest='log_complevels', action='append',
metavar="COMP[=LEVEL]", default=None,
help="set a logging level ({levels}, default: "
"{def_level}) for a component ({comps}). May be "
"specified multiple times; options add to the default "
"of: {def_comp}".
format(levels=', '.join(VALID_LOG_LEVELS),
comps=', '.join(VALID_LOG_COMPONENTS),
def_level=DEFAULT_LOG_LEVEL,
def_comp=DEFAULT_LOG_COMP))
parser.add_argument("--syslog-facility", metavar="TEXT",
default=DEFAULT_SYSLOG_FACILITY,
help="syslog facility ({slfs}) when logging to the "
"system log. Default: {def_slf}".
format(slfs=', '.join(VALID_SYSLOG_FACILITIES),
def_slf=DEFAULT_SYSLOG_FACILITY))
parser.add_argument("--verbose", "-v", action='count', default=0,
help="increase the verbosity level (max: 2)")
parser.add_argument("--version", action='store_true',
help="show versions of forwarder and zhmcclient "
"library and exit")
parser.add_argument("--help-config", action='store_true',
help="show help for forwarder config file and exit")
return parser.parse_args(args)
def print_version():
"""
Print the version of this program and the zhmcclient library.
"""
# pylint: disable=no-member
print("zhmc_os_forwarder version: {}\n"
"zhmcclient version: {}".
format(__version__, zhmcclient.__version__))
def help_config():
"""
Print help for the forwarder config file.
"""
print("""
Help for forwarder config file
The forwarder config file is a YAML file that defines which HMC to talk to,
and the forwarding, i.e. which partition is forwarded to which syslog server.
The following example shows a complete forwarder config file. For more details,
see the documentation at https://zhmc-os-forwarder.readthedocs.io/.
---
hmc:
hmc: 10.11.12.13
userid: "myuser"
password: "mypassword"
verify_cert: false
forwarding:
- syslogs:
- server: 10.11.12.14
cpcs:
- cpc: MYCPC
partitions:
- partition: ".*"
""")
def main():
"""
Main function for the script.
"""
args = parse_args(sys.argv[1:])
if args.version:
print_version()
sys.exit(0)
if args.help_config:
help_config()
sys.exit(0)
utils.VERBOSE_LEVEL = args.verbose
urllib3.disable_warnings()
forwarder_server = None
try:
setup_logging(args.log_dest, args.log_complevels, args.syslog_facility)
logprint(logging.WARNING, None,
"---------------- "
"zhmc_os_forwarder started "
"----------------")
logprint(logging.INFO, PRINT_ALWAYS,
"zhmc_os_forwarder version: {}".format(__version__))
# pylint: disable=no-member
logprint(logging.INFO, PRINT_ALWAYS,
"zhmcclient version: {}".format(zhmcclient.__version__))
logprint(logging.INFO, PRINT_ALWAYS,
"Verbosity level: {}".format(utils.VERBOSE_LEVEL))
config_filename = args.c
logprint(logging.INFO, PRINT_V,
"Parsing forwarder config file: {}".format(config_filename))
config_data = parse_yaml_file(
config_filename, 'forwarder config file', 'config_schema.yaml')
logprint(logging.INFO, PRINT_V,
"Timeout/retry configuration: "
"connect: {r.connect_timeout} sec / {r.connect_retries} "
"retries, read: {r.read_timeout} sec / {r.read_retries} "
"retries.".format(r=RETRY_TIMEOUT_CONFIG))
forwarder_server = ForwarderServer(config_data, config_filename)
try:
forwarder_server.startup()
except zhmcclient.Error as exc:
new_exc = ImproperExit(
"{}: {}".format(exc.__class__.__name__, exc))
new_exc.__cause__ = None # pylint: disable=invalid-name
raise new_exc
logprint(logging.INFO, PRINT_V,
"Current number of subscriptions for OS message "
"notifications: {}".
format(forwarder_server.num_subscriptions))
logprint(logging.INFO, PRINT_ALWAYS,
"Forwarder is up and running (Press Ctrl-C to shut down)")
while True:
try:
time.sleep(1)
except KeyboardInterrupt:
raise ProperExit
except KeyboardInterrupt:
logprint(logging.WARNING, PRINT_ALWAYS,
"Forwarder interrupted before server start")
exit_rc(1)
except EarlyExit as exc:
logprint(logging.ERROR, PRINT_ALWAYS,
"Error: {}".format(exc))
exit_rc(1)
except ImproperExit as exc:
logprint(logging.ERROR, PRINT_ALWAYS,
"Error: {}".format(exc))
if forwarder_server:
forwarder_server.shutdown()
exit_rc(1)
except ProperExit:
logprint(logging.WARNING, PRINT_ALWAYS,
"Forwarder shutdown requested")
if forwarder_server:
forwarder_server.shutdown()
exit_rc(0)
def exit_rc(rc):
"""Exit the script"""
logprint(logging.WARNING, None,
"---------------- "
"zhmc_os_forwarder terminated "
"----------------")
sys.exit(rc)
if __name__ == "__main__":
main() | zhmc-os-forwarder | /zhmc_os_forwarder-0.2.0.tar.gz/zhmc_os_forwarder-0.2.0/zhmc_os_forwarder/zhmc_os_forwarder.py | zhmc_os_forwarder.py |
import sys
import os
import types
import platform
import time
import logging
from contextlib import contextmanager
import yaml
import jsonschema
import zhmcclient
#
# GLobal variables that will be set after command line parsing and will
# then be used throughout the project.
#
# Verbosity level from the command line
VERBOSE_LEVEL = 0
# Indicates that logging was enabled on the command line
LOGGING_ENABLED = False
#
# Logging
#
LOGGER_NAME = 'zhmcosforwarder'
# Logger names by log component
LOGGER_NAMES = {
'forwarder': LOGGER_NAME,
'hmc': zhmcclient.HMC_LOGGER_NAME,
'jms': zhmcclient.JMS_LOGGER_NAME,
}
VALID_LOG_COMPONENTS = list(LOGGER_NAMES.keys()) + ['all']
# Log levels by their CLI names
LOG_LEVELS = {
'error': logging.ERROR,
'warning': logging.WARNING,
'info': logging.INFO,
'debug': logging.DEBUG,
'off': logging.NOTSET,
}
VALID_LOG_LEVELS = list(LOG_LEVELS.keys())
# Defaults for --log-comp option.
DEFAULT_LOG_LEVEL = 'warning'
DEFAULT_LOG_COMP = 'all=warning'
# Values for printing messages dependent on verbosity level in command line
PRINT_ALWAYS = 0
PRINT_V = 1
PRINT_VV = 2
VALID_LOG_DESTINATIONS = ['stderr', 'syslog', 'FILE']
# Syslog facilities
VALID_SYSLOG_FACILITIES = [
'user', 'local0', 'local1', 'local2', 'local3', 'local4', 'local5',
'local6', 'local7'
]
DEFAULT_SYSLOG_FACILITY = 'user'
# Values to use for the 'address' parameter when creating a SysLogHandler.
# Key: Operating system type, as returned by platform.system(). For CygWin,
# the returned value is 'CYGWIN_NT-6.1', which is special-cased to 'CYGWIN_NT'.
# Value: Value for the 'address' parameter.
SYSLOG_ADDRESS = {
'Linux': '/dev/log',
'Darwin': '/var/run/syslog', # macOS / OS-X
'Windows': ('localhost', 514),
'CYGWIN_NT': '/dev/log', # Requires syslog-ng pkg
'other': ('localhost', 514), # used if no key matches
}
#
# Defaults for other command line options
#
DEFAULT_CONFIG_FILE = '/etc/zhmc-os-forwarder/config.yaml'
#
# Retry
#
# Sleep time in seconds when retrying HMC connections
RETRY_SLEEP_TIME = 10
# Retry / timeout configuration for zhmcclient (used at the socket level)
RETRY_TIMEOUT_CONFIG = zhmcclient.RetryTimeoutConfig(
connect_timeout=10,
connect_retries=2,
read_timeout=300,
read_retries=2,
max_redirects=zhmcclient.DEFAULT_MAX_REDIRECTS,
operation_timeout=zhmcclient.DEFAULT_OPERATION_TIMEOUT,
status_timeout=zhmcclient.DEFAULT_STATUS_TIMEOUT,
name_uri_cache_timetolive=zhmcclient.DEFAULT_NAME_URI_CACHE_TIMETOLIVE,
)
#
# Exceptions
#
class ConfigFileError(Exception):
"""
An error was found in the config file.
"""
pass
class ConnectionError(Exception):
# pylint: disable=redefined-builtin
"""
Connection error with the HMC.
"""
pass
class AuthError(Exception):
"""
Authentication error with the HMC.
"""
pass
class OtherError(Exception):
"""
Other error with the HMC.
"""
pass
class ProperExit(Exception):
"""
Terminating while the server was running.
"""
pass
class ImproperExit(Exception):
"""
Terminating because something went wrong.
"""
pass
class EarlyExit(Exception):
"""
Terminating before the server was started.
"""
pass
@contextmanager
def zhmc_exceptions(session, config_filename):
# pylint: disable=invalid-name
"""
Context manager that handles zhmcclient exceptions by raising the
appropriate forwarder exceptions.
Example::
with zhmc_exceptions(session, config_filename):
client = zhmcclient.Client(session)
version_info = client.version_info()
"""
try:
yield
except zhmcclient.ConnectionError as exc:
new_exc = ConnectionError(
"Connection error using IP address {} defined in forwarder config "
"file {}: {}".format(session.host, config_filename, exc))
new_exc.__cause__ = None
raise new_exc # ConnectionError
except zhmcclient.ClientAuthError as exc:
new_exc = AuthError(
"Client authentication error for the HMC at {h} using "
"userid '{u}' defined in forwarder config file {f}: {m}".
format(h=session.host, u=session.userid, f=config_filename,
m=exc))
new_exc.__cause__ = None
raise new_exc # AuthError
except zhmcclient.ServerAuthError as exc:
http_exc = exc.details # zhmcclient.HTTPError
new_exc = AuthError(
"Authentication error returned from the HMC at {h} using "
"userid '{u}' defined in forwarder config file {f}: {m} "
"(HMC operation {hm} {hu}, HTTP status {hs}.{hr})".
format(h=session.host, u=session.userid, f=config_filename,
m=exc, hm=http_exc.request_method, hu=http_exc.request_uri,
hs=http_exc.http_status, hr=http_exc.reason))
new_exc.__cause__ = None
raise new_exc # AuthError
except (IOError, OSError) as exc:
new_exc = OtherError(str(exc))
new_exc.__cause__ = None
raise new_exc # OtherError
except zhmcclient.Error as exc:
new_exc = OtherError(
"Error returned from HMC at {}: {}".format(session.host, exc))
new_exc.__cause__ = None
raise new_exc # OtherError
def validate_option(option_name, option_value, allowed_values):
"""
Validate the option value against the allowed option values
and return the value, if it passes. raises EarlyExit otherwise.
Raises:
EarlyExit: Invalid command line usage.
"""
if option_value not in allowed_values:
raise EarlyExit(
"Invalid value {val} for {opt} option. Allowed are: {allowed}".
format(opt=option_name, val=option_value,
allowed=', '.join(allowed_values)))
return option_value
def parse_yaml_file(yamlfile, name, schemafilename=None):
"""
Returns the parsed content of a YAML file as a Python object.
Optionally validates against a specified JSON schema file in YAML format.
Raises:
ImproperExit
"""
try:
with open(yamlfile, "r", encoding='utf-8') as fp:
yaml_obj = yaml.safe_load(fp)
except FileNotFoundError as exc:
new_exc = ImproperExit(
"Cannot find {} {}: {}".
format(name, yamlfile, exc))
new_exc.__cause__ = None # pylint: disable=invalid-name
raise new_exc
except PermissionError as exc:
new_exc = ImproperExit(
"Permission error reading {} {}: {}".
format(name, yamlfile, exc))
new_exc.__cause__ = None # pylint: disable=invalid-name
raise new_exc
except yaml.YAMLError as exc:
new_exc = ImproperExit(
"YAML error reading {} {}: {}".
format(name, yamlfile, exc))
new_exc.__cause__ = None # pylint: disable=invalid-name
raise new_exc
if schemafilename:
schemafile = os.path.join(
os.path.dirname(__file__), 'schemas', schemafilename)
try:
with open(schemafile, 'r', encoding='utf-8') as fp:
schema = yaml.safe_load(fp)
except FileNotFoundError as exc:
new_exc = ImproperExit(
"Internal error: Cannot find schema file {}: {}".
format(schemafile, exc))
new_exc.__cause__ = None # pylint: disable=invalid-name
raise new_exc
except PermissionError as exc:
new_exc = ImproperExit(
"Internal error: Permission error reading schema file {}: {}".
format(schemafile, exc))
new_exc.__cause__ = None # pylint: disable=invalid-name
raise new_exc
except yaml.YAMLError as exc:
new_exc = ImproperExit(
"Internal error: YAML error reading schema file {}: {}".
format(schemafile, exc))
new_exc.__cause__ = None # pylint: disable=invalid-name
raise new_exc
try:
jsonschema.validate(yaml_obj, schema)
except jsonschema.exceptions.SchemaError as exc:
new_exc = ImproperExit(
"Internal error: Invalid JSON schema file {}: {}".
format(schemafile, exc))
new_exc.__cause__ = None
raise new_exc
except jsonschema.exceptions.ValidationError as exc:
element_str = json_path_str(exc.absolute_path)
new_exc = ImproperExit(
"Validation of {} {} failed on {}: {}".
format(name, yamlfile, element_str, exc.message))
new_exc.__cause__ = None
raise new_exc
return yaml_obj
def json_path_str(path_list):
"""
Return a string with the path list in JSON path notation, except that
the root element is not '$' but verbally expressed.
"""
if not path_list:
return "root elements"
path_str = ""
for p in path_list:
if isinstance(p, int):
path_str += "[{}]".format(p)
else:
path_str += ".{}".format(p)
if path_str.startswith('.'):
path_str = path_str[1:]
return "element '{}'".format(path_str)
def get_hmc_info(session):
"""
Return the result of the 'Query API Version' operation. This includes
the HMC version, HMC name and other data. For details, see the operation's
result description in the HMC WS API book.
Raises: zhmccclient exceptions
"""
client = zhmcclient.Client(session)
hmc_info = client.query_api_version()
return hmc_info
def logprint(log_level, print_level, message):
"""
Log a message at the specified log level, and print the message at
the specified verbosity level
Parameters:
log_level (int): Python logging level at which the message should be
logged (logging.DEBUG, etc.), or None for no logging.
print_level (int): Verbosity level at which the message should be
printed (1, 2), or None for no printing.
message (string): The message.
"""
if print_level is not None and VERBOSE_LEVEL >= print_level:
print(message)
if log_level is not None and LOGGING_ENABLED:
logger = logging.getLogger(LOGGER_NAME)
# Note: This method never raises an exception. Errors during logging
# are handled by calling handler.handleError().
logger.log(log_level, message)
def setup_logging(log_dest, log_complevels, syslog_facility):
"""
Set up Python logging as specified in the command line.
Raises:
EarlyExit
"""
global LOGGING_ENABLED # pylint: disable=global-statement
if log_dest is None:
logprint(None, PRINT_V, "Logging is disabled")
handler = None
dest_str = None
elif log_dest == 'stderr':
dest_str = "the Standard Error stream"
logprint(None, PRINT_V, "Logging to {}".format(dest_str))
handler = logging.StreamHandler(stream=sys.stderr)
elif log_dest == 'syslog':
system = platform.system()
if system.startswith('CYGWIN_NT'):
# Value is 'CYGWIN_NT-6.1'; strip off trailing version:
system = 'CYGWIN_NT'
try:
address = SYSLOG_ADDRESS[system]
except KeyError:
address = SYSLOG_ADDRESS['other']
dest_str = ("the System Log at address {a!r} with syslog facility "
"{slf!r}".format(a=address, slf=syslog_facility))
logprint(None, PRINT_V, "Logging to {}".format(dest_str))
try:
facility = logging.handlers.SysLogHandler.facility_names[
syslog_facility]
except KeyError:
valid_slfs = ', '.join(
logging.handlers.SysLogHandler.facility_names.keys())
raise EarlyExit(
"This system ({sys}) does not support syslog facility {slf}. "
"Supported are: {slfs}.".
format(sys=system, slf=syslog_facility, slfs=valid_slfs))
# The following does not raise any exception if the syslog address
# cannot be opened. In that case, the first attempt to log something
# will fail.
handler = logging.handlers.SysLogHandler(
address=address, facility=facility)
else:
dest_str = "file {fn}".format(fn=log_dest)
logprint(None, PRINT_V, "Logging to {}".format(dest_str))
try:
handler = logging.FileHandler(log_dest)
except OSError as exc:
raise EarlyExit(
"Cannot log to file {fn}: {exc}: {msg}".
format(fn=log_dest, exc=exc.__class__.__name__, msg=exc))
if not handler and log_complevels:
raise EarlyExit(
"--log-comp option cannot be used when logging is disabled; "
"use --log option to enable logging.")
if handler:
def handleError(self, record):
"""
Replacement for built-in method on logging.Handler class.
This is needed because the SysLogHandler class does not raise
an exception when creating the handler object, but only when
logging something to it.
"""
_, exc, _ = sys.exc_info()
f_record = self.format(record)
print("Error: Logging to {d} failed with: {exc}: {msg}. Formatted "
"log record: {r!r}".
format(d=dest_str, exc=exc.__class__.__name__, msg=exc,
r=f_record),
file=sys.stderr)
sys.exit(1)
handler.handleError = types.MethodType(handleError, handler)
logger_level_dict = {} # key: logger_name, value: level
if not log_complevels:
log_complevels = [DEFAULT_LOG_COMP]
for complevel in log_complevels:
if '=' in complevel:
comp, level = complevel.split('=', 2)
else:
comp = complevel
level = DEFAULT_LOG_LEVEL
if level not in LOG_LEVELS:
raise EarlyExit(
"Invalid log level {level!r} in --log-comp option. "
"Allowed are: {allowed}".
format(level=level, allowed=', '.join(VALID_LOG_LEVELS)))
if comp == 'all':
for logger_name in LOGGER_NAMES.values():
logger_level_dict[logger_name] = level
else:
try:
logger_name = LOGGER_NAMES[comp]
except KeyError:
raise EarlyExit(
"Invalid component {comp!r} in --log-comp option. "
"Allowed are: {allowed}".
format(comp=comp,
allowed=', '.join(VALID_LOG_COMPONENTS)))
logger_level_dict[logger_name] = level
complevels = ', '.join(
["{name}={level}".format(name=name, level=level)
for name, level in logger_level_dict.items()])
logprint(None, PRINT_V,
"Logging components: {complevels}".
format(complevels=complevels))
if isinstance(handler, logging.handlers.SysLogHandler):
# Most syslog implementations fail when the message is longer
# than a limit. We use a hard coded limit for now:
# * 2048 is the typical maximum length of a syslog message,
# including its headers
# * 41 is the max length of the syslog message parts before MESSAGE
# * 47 is the max length of the Python format string before message
# Example syslog message:
# <34>1 2003-10-11T22:14:15.003Z localhost MESSAGE
# where MESSAGE is the formatted Python log message.
max_msg = '.{}'.format(2048 - 41 - 47)
else:
max_msg = ''
fs = ('%(asctime)s %(levelname)s %(name)s: %(message){m}s'.
format(m=max_msg))
# Set the formatter to always log times in UTC. Since the %z
# formatting string does not get adjusted for that, set the timezone
# offset always to '+0000'.
dfs = '%Y-%m-%d %H:%M:%S+0000'
logging.Formatter.converter = time.gmtime # log times in UTC
handler.setFormatter(logging.Formatter(fmt=fs, datefmt=dfs))
for logger_name in LOGGER_NAMES.values():
logger = logging.getLogger(logger_name)
if logger_name in logger_level_dict:
level = logger_level_dict[logger_name]
level_int = LOG_LEVELS[level]
if level_int != logging.NOTSET:
logger.addHandler(handler)
logger.setLevel(level_int)
else:
logger.setLevel(logging.NOTSET)
LOGGING_ENABLED = True | zhmc-os-forwarder | /zhmc_os_forwarder-0.2.0.tar.gz/zhmc_os_forwarder-0.2.0/zhmc_os_forwarder/utils.py | utils.py |
.. Copyright 2018 IBM Corp. All Rights Reserved.
..
.. Licensed under the Apache License, Version 2.0 (the "License");
.. you may not use this file except in compliance with the License.
.. You may obtain a copy of the License at
..
.. http://www.apache.org/licenses/LICENSE-2.0
..
.. Unless required by applicable law or agreed to in writing, software
.. distributed under the License is distributed on an "AS IS" BASIS,
.. WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
.. See the License for the specific language governing permissions and
.. limitations under the License.
IBM Z HMC Prometheus Exporter
=============================
.. image:: https://img.shields.io/pypi/v/zhmc-prometheus-exporter.svg
:target: https://pypi.python.org/pypi/zhmc-prometheus-exporter/
:alt: Version on Pypi
.. image:: https://github.com/zhmcclient/zhmc-prometheus-exporter/workflows/test/badge.svg?branch=master
:target: https://github.com/zhmcclient/zhmc-prometheus-exporter/actions?query=branch%3Amaster
:alt: Test status (master)
.. image:: https://readthedocs.org/projects/zhmc-prometheus-exporter/badge/?version=latest
:target: https://readthedocs.org/projects/zhmc-prometheus-exporter/builds/
:alt: Docs status (master)
.. image:: https://coveralls.io/repos/github/zhmcclient/zhmc-prometheus-exporter/badge.svg?branch=master
:target: https://coveralls.io/github/zhmcclient/zhmc-prometheus-exporter?branch=master
:alt: Test coverage (master)
The **IBM Z HMC Prometheus Exporter** is a `Prometheus exporter`_ written in
Python that retrieves metrics from the `IBM Z`_ Hardware Management Console (HMC)
and exports them to the `Prometheus`_ monitoring system.
The exporter attempts to stay up as much as possible, for example it performs
automatic session renewals with the HMC if the logon session expires, and it
survives HMC reboots and automatically picks up metrics collection again once
the HMC come back up.
.. _IBM Z: https://www.ibm.com/it-infrastructure/z
.. _Prometheus exporter: https://prometheus.io/docs/instrumenting/exporters/
.. _Prometheus: https://prometheus.io
Documentation
-------------
* `Documentation`_
* `Change log`_
.. _Documentation: https://zhmc-prometheus-exporter.readthedocs.io/en/stable/
.. _Change log: https://zhmc-prometheus-exporter.readthedocs.io/en/stable/changes.html
Quickstart
----------
* Install the exporter and all of its Python dependencies as follows:
.. code-block:: bash
$ pip install zhmc-prometheus-exporter
* Provide an *HMC credentials file* for use by the exporter.
The HMC credentials file tells the exporter which HMC to talk to for
obtaining metrics, and which userid and password to use for logging on to
the HMC.
Download the `sample HMC credentials file`_ as ``hmccreds.yaml`` and edit
that copy accordingly.
For details, see `HMC credentials file`_.
.. _HMC credentials file: https://zhmc-prometheus-exporter.readthedocs.io/en/stable/usage.html#hmc-credentials-file
.. _sample HMC credentials file: https://zhmc-prometheus-exporter.readthedocs.io/en/stable/usage.html#sample-hmc-credentials-file
* Provide a *metric definition file* for use by the exporter.
The metric definition file maps the metrics returned by the HMC to metrics
exported to Prometheus.
Furthermore, the metric definition file allows optimizing the access time to
the HMC by disabling the fetching of metrics that are not needed.
Download the `sample metric definition file`_ as ``metrics.yaml``. It can
be used as it is and will have all metrics enabled and mapped properly. You
only need to edit the file if you want to adjust the metric names, labels, or
metric descriptions, or if you want to optimize access time by disabling
metrics not needed.
For details, see `Metric definition file`_.
.. _Metric definition file: https://zhmc-prometheus-exporter.readthedocs.io/en/stable/usage.html#metric-definition-file
.. _sample metric definition file: https://zhmc-prometheus-exporter.readthedocs.io/en/stable/usage.html#sample-metric-definition-file
* Run the exporter as follows:
.. code-block:: bash
$ zhmc_prometheus_exporter -c hmccreds.yaml -m metrics.yaml
Exporter is up and running on port 9291
Depending on the number of CPCs managed by your HMC, and dependent on how many
metrics are enabled, it will take some time until the exporter reports to be
up and running. You can see what it does in the mean time by using the ``-v``
option. Subsequent requests to the exporter will be sub-second.
* Direct your web browser at http://localhost:9291 to see the exported
Prometheus metrics. Refreshing the browser will update the metrics.
Reporting issues
----------------
If you encounter a problem, please report it as an `issue on GitHub`_.
.. _issue on GitHub: https://github.com/zhmcclient/zhmc-prometheus-exporter/issues
License
-------
This package is licensed under the `Apache 2.0 License`_.
.. _Apache 2.0 License: http://apache.org/licenses/LICENSE-2.0
| zhmc-prometheus-exporter | /zhmc_prometheus_exporter-1.4.2.tar.gz/zhmc_prometheus_exporter-1.4.2/README.rst | README.rst |
# Copyright 2018 IBM Corp. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
IBM Z HMC Prometheus Exporter
"""
import argparse
import sys
import os
import types
import platform
import re
import time
import warnings
import logging
import logging.handlers
from contextlib import contextmanager
import jinja2
import six
import urllib3
import yaml
import jsonschema
import zhmcclient
from prometheus_client import start_http_server
from prometheus_client.core import GaugeMetricFamily, CounterMetricFamily, \
REGISTRY
from ._version import __version__
DEFAULT_CREDS_FILE = '/etc/zhmc-prometheus-exporter/hmccreds.yaml'
DEFAULT_METRICS_FILE = '/etc/zhmc-prometheus-exporter/metrics.yaml'
EXPORTER_LOGGER_NAME = 'zhmcexporter'
# Logger names by log component
LOGGER_NAMES = {
'exporter': EXPORTER_LOGGER_NAME,
'hmc': zhmcclient.HMC_LOGGER_NAME,
'jms': zhmcclient.JMS_LOGGER_NAME,
}
VALID_LOG_COMPONENTS = list(LOGGER_NAMES.keys()) + ['all']
# Log levels by their CLI names
LOG_LEVELS = {
'error': logging.ERROR,
'warning': logging.WARNING,
'info': logging.INFO,
'debug': logging.DEBUG,
'off': logging.NOTSET,
}
VALID_LOG_LEVELS = list(LOG_LEVELS.keys())
# Defaults for --log-comp option.
DEFAULT_LOG_LEVEL = 'warning'
DEFAULT_LOG_COMP = 'all=warning'
# Values for printing messages dependent on verbosity level in command line
PRINT_ALWAYS = 0
PRINT_V = 1
PRINT_VV = 2
VALID_LOG_DESTINATIONS = ['stderr', 'syslog', 'FILE']
# Syslog facilities
VALID_SYSLOG_FACILITIES = [
'user', 'local0', 'local1', 'local2', 'local3', 'local4', 'local5',
'local6', 'local7'
]
DEFAULT_SYSLOG_FACILITY = 'user'
# Values to use for the 'address' parameter when creating a SysLogHandler.
# Key: Operating system type, as returned by platform.system(). For CygWin,
# the returned value is 'CYGWIN_NT-6.1', which is special-cased to 'CYGWIN_NT'.
# Value: Value for the 'address' parameter.
SYSLOG_ADDRESS = {
'Linux': '/dev/log',
'Darwin': '/var/run/syslog', # macOS / OS-X
'Windows': ('localhost', 514),
'CYGWIN_NT': '/dev/log', # Requires syslog-ng pkg
'other': ('localhost', 514), # used if no key matches
}
# Sleep time in seconds when retrying metrics retrieval
RETRY_SLEEP_TIME = 10
# Retry / timeout configuration for zhmcclient (used at the socket level)
RETRY_TIMEOUT_CONFIG = zhmcclient.RetryTimeoutConfig(
connect_timeout=10,
connect_retries=2,
read_timeout=300,
read_retries=2,
max_redirects=zhmcclient.DEFAULT_MAX_REDIRECTS,
operation_timeout=zhmcclient.DEFAULT_OPERATION_TIMEOUT,
status_timeout=zhmcclient.DEFAULT_STATUS_TIMEOUT,
name_uri_cache_timetolive=zhmcclient.DEFAULT_NAME_URI_CACHE_TIMETOLIVE,
)
class YAMLInfoNotFoundError(Exception):
"""A custom error that is raised when something that was expected in a
YAML cannot be found.
"""
pass
class ConnectionError(Exception):
# pylint: disable=redefined-builtin
"""Unwrapped from zhmcclient"""
pass
class AuthError(Exception):
"""Unwrapped from zhmcclient"""
pass
class OtherError(Exception):
"""Other exceptions raised by zhmcclient"""
pass
class ProperExit(Exception):
"""Terminating while the server was running"""
pass
class ImproperExit(Exception):
"""Terminating because something went wrong"""
pass
class EarlyExit(Exception):
"""Terminating before the server was started"""
pass
@contextmanager
def zhmc_exceptions(session, hmccreds_filename):
# pylint: disable=invalid-name
"""
Context manager that handles zhmcclient exceptions by raising the
appropriate exporter exceptions.
Example::
with zhmc_exceptions(session, hmccreds_filename):
client = zhmcclient.Client(session)
version_info = client.version_info()
"""
try:
yield
except zhmcclient.ConnectionError as exc:
new_exc = ConnectionError(
"Connection error using IP address {} defined in HMC credentials "
"file {}: {}".format(session.host, hmccreds_filename, exc))
new_exc.__cause__ = None
raise new_exc # ConnectionError
except zhmcclient.ClientAuthError as exc:
new_exc = AuthError(
"Client authentication error for the HMC at {h} using "
"userid '{u}' defined in HMC credentials file {f}: {m}".
format(h=session.host, u=session.userid, f=hmccreds_filename,
m=exc))
new_exc.__cause__ = None
raise new_exc # AuthError
except zhmcclient.ServerAuthError as exc:
http_exc = exc.details # zhmcclient.HTTPError
new_exc = AuthError(
"Authentication error returned from the HMC at {h} using "
"userid '{u}' defined in HMC credentials file {f}: {m} "
"(HMC operation {hm} {hu}, HTTP status {hs}.{hr})".
format(h=session.host, u=session.userid, f=hmccreds_filename,
m=exc, hm=http_exc.request_method, hu=http_exc.request_uri,
hs=http_exc.http_status, hr=http_exc.reason))
new_exc.__cause__ = None
raise new_exc # AuthError
except (IOError, OSError) as exc:
new_exc = OtherError(str(exc))
new_exc.__cause__ = None
raise new_exc # OtherError
except zhmcclient.Error as exc:
new_exc = OtherError(
"Error returned from HMC at {}: {}".format(session.host, exc))
new_exc.__cause__ = None
raise new_exc # OtherError
def parse_args(args):
"""Parses the CLI arguments."""
parser = argparse.ArgumentParser(
description="IBM Z HMC Exporter - a Prometheus exporter for metrics "
"from the IBM Z HMC")
parser.add_argument("-c", metavar="CREDS_FILE",
default=DEFAULT_CREDS_FILE,
help="path name of HMC credentials file. "
"Use --help-creds for details. "
"Default: {}".format(DEFAULT_CREDS_FILE))
parser.add_argument("-m", metavar="METRICS_FILE",
default=DEFAULT_METRICS_FILE,
help="path name of metric definition file. "
"Use --help-metrics for details. "
"Default: {}".format(DEFAULT_METRICS_FILE))
parser.add_argument("-p", metavar="PORT",
default="9291",
help="port for exporting. Default: 9291")
parser.add_argument("--log", dest='log_dest', metavar="DEST", default=None,
help="enable logging and set a log destination "
"({dests}). Default: no logging".
format(dests=', '.join(VALID_LOG_DESTINATIONS)))
parser.add_argument("--log-comp", dest='log_complevels', action='append',
metavar="COMP[=LEVEL]", default=None,
help="set a logging level ({levels}, default: "
"{def_level}) for a component ({comps}). May be "
"specified multiple times; options add to the default "
"of: {def_comp}".
format(levels=', '.join(VALID_LOG_LEVELS),
comps=', '.join(VALID_LOG_COMPONENTS),
def_level=DEFAULT_LOG_LEVEL,
def_comp=DEFAULT_LOG_COMP))
parser.add_argument("--syslog-facility", metavar="TEXT",
default=DEFAULT_SYSLOG_FACILITY,
help="syslog facility ({slfs}) when logging to the "
"system log. Default: {def_slf}".
format(slfs=', '.join(VALID_SYSLOG_FACILITIES),
def_slf=DEFAULT_SYSLOG_FACILITY))
parser.add_argument("--verbose", "-v", action='count', default=0,
help="increase the verbosity level (max: 2)")
parser.add_argument("--help-creds", action='store_true',
help="show help for HMC credentials file and exit")
parser.add_argument("--help-metrics", action='store_true',
help="show help for metric definition file and exit")
return parser.parse_args(args)
def help_creds():
"""
Print help for HMC credentials file.
"""
print("""
Help for HMC credentials file
The HMC credentials file is a YAML file that defines the IP address of the HMC
and the userid and password for logging on to the HMC.
The HMC userid must be authorized for object access permission to the resources
for which metrics are to be returned. Metrics of resources for which the userid
does not have object access permission will not be included in the result,
without raising an error.
The following example shows a complete HMC credentials file. For more details,
see the documentation at https://zhmc-prometheus-exporter.readthedocs.io/.
---
metrics:
hmc: 1.2.3.4
userid: myuser
password: mypassword
extra_labels:
- name: pod
value: mypod
""")
def help_metrics():
"""
Print help for metric definition file.
"""
print("""
Help for metric definition file
The metric definition file is a YAML file that defines which metrics are
exported to prometheus and under which names.
The following example shows a valid metric definition file that defines
a small subset of metrics and metric groups for DPM mode to be exported. For
more details and a full list of metrics and metric groups, see the
documentation at https://zhmc-prometheus-exporter.readthedocs.io/.
---
metric_groups:
partition-usage:
prefix: partition
fetch: true
labels:
- name: cpc
value: resource.parent
- name: partition
value: resource
# ...
metrics:
partition-usage:
processor-usage:
percent: true
exporter_name: processor_usage_ratio
exporter_desc: Usage ratio across all processors of the partition
# ...
# ...
""")
def validate_option(option_name, option_value, allowed_values):
"""
Validate the option value against the allowed option values
and return the value, if it passes. raises EarlyExit otherwise.
Raises:
EarlyExit: Invalid command line usage.
"""
if option_value not in allowed_values:
raise EarlyExit(
"Invalid value {val} for {opt} option. Allowed are: {allowed}".
format(opt=option_name, val=option_value,
allowed=', '.join(allowed_values)))
return option_value
def parse_yaml_file(yamlfile, name, schemafilename=None):
"""
Returns the parsed content of a YAML file as a Python object.
Optionally validates against a specified JSON schema file in YAML format.
Raises:
ImproperExit
"""
try:
with open(yamlfile, "r", encoding='utf-8') as fp:
yaml_obj = yaml.safe_load(fp)
except FileNotFoundError as exc:
new_exc = ImproperExit(
"Cannot find {} {}: {}".
format(name, yamlfile, exc))
new_exc.__cause__ = None # pylint: disable=invalid-name
raise new_exc
except PermissionError as exc:
new_exc = ImproperExit(
"Permission error reading {} {}: {}".
format(name, yamlfile, exc))
new_exc.__cause__ = None # pylint: disable=invalid-name
raise new_exc
except yaml.YAMLError as exc:
new_exc = ImproperExit(
"YAML error reading {} {}: {}".
format(name, yamlfile, exc))
new_exc.__cause__ = None # pylint: disable=invalid-name
raise new_exc
if schemafilename:
schemafile = os.path.join(
os.path.dirname(__file__), 'schemas', schemafilename)
try:
with open(schemafile, 'r', encoding='utf-8') as fp:
schema = yaml.safe_load(fp)
except FileNotFoundError as exc:
new_exc = ImproperExit(
"Internal error: Cannot find schema file {}: {}".
format(schemafile, exc))
new_exc.__cause__ = None # pylint: disable=invalid-name
raise new_exc
except PermissionError as exc:
new_exc = ImproperExit(
"Internal error: Permission error reading schema file {}: {}".
format(schemafile, exc))
new_exc.__cause__ = None # pylint: disable=invalid-name
raise new_exc
except yaml.YAMLError as exc:
new_exc = ImproperExit(
"Internal error: YAML error reading schema file {}: {}".
format(schemafile, exc))
new_exc.__cause__ = None # pylint: disable=invalid-name
raise new_exc
try:
jsonschema.validate(yaml_obj, schema)
except jsonschema.exceptions.SchemaError as exc:
new_exc = ImproperExit(
"Internal error: Invalid JSON schema file {}: {}".
format(schemafile, exc))
new_exc.__cause__ = None
raise new_exc
except jsonschema.exceptions.ValidationError as exc:
element_str = json_path_str(exc.absolute_path)
new_exc = ImproperExit(
"Validation of {} {} failed on {}: {}".
format(name, yamlfile, element_str, exc.message))
new_exc.__cause__ = None
raise new_exc
return yaml_obj
def json_path_str(path_list):
"""
Return a string with the path list in JSON path notation, except that
the root element is not '$' but verbally expressed.
"""
if not path_list:
return "root elements"
path_str = ""
for p in path_list:
if isinstance(p, int):
path_str += "[{}]".format(p)
else:
path_str += ".{}".format(p)
if path_str.startswith('.'):
path_str = path_str[1:]
return "element '{}'".format(path_str)
def split_version(version_str, pad_to):
"""
Return a tuple with the version parts as integers.
Parameters:
version_str (string): Version string, where the parts are separated by
dot. The version parts must be decimal numbers. Example: '2.14'
pad_to (int): Minimum number of version parts to return, padding the
least significant version parts with 0. Example: '2.14' padded to
3 results in (2, 14, 0).
Returns:
tuple(int, ...): Tuple of version parts, as integers.
"""
version_info = []
for v in version_str.strip('"\'').split('.'):
if v == '':
v = 0
vint = int(v) # May raise ValueError
version_info.append(vint)
while len(version_info) < pad_to:
version_info.append(0)
pad_to -= 1
return tuple(version_info)
MNU_PATTERN = r'\d+(?:\.\d+(?:\.\d+)?)?' # M.N.U
COND_PATTERN = '^(.*?)("{mnu}"|\'{mnu}\')(.*)$'.format(mnu=MNU_PATTERN)
COND_PATTERN = re.compile(COND_PATTERN)
def resource_str(resource_obj):
"""
Return a human readable string identifying the resource object, for
messages.
"""
res_class = resource_obj.properties['class']
if res_class == 'cpc':
res_str = "CPC '{}'".format(resource_obj.name)
elif res_class in ('partition', 'logical-partition'):
res_str = "partition '{}' on CPC '{}'". \
format(resource_obj.name, resource_obj.manager.parent.name)
else:
raise ValueError("Resource class {} is not supported".format(res_class))
return res_str
def eval_condition(condition, hmc_version, se_version):
"""
Evaluate a Python expression as a condition and return a boolean indicating
whether the condition is true.
Any M.N.U version strings in the condition expression are converted to a
tuple of integers before evaluating the expression.
Parameters:
condition (string): Python expression to evaluate as a condition. The
remaining parameters are valid variables to use in the expression.
hmc_version (string): Expression variable: HMC version as a string.
se_version (string): Expression variable: SE/CPC version as a string.
Returns:
bool: Evaluated condition
"""
hmc_version = split_version(hmc_version, 3)
if se_version:
se_version = split_version(se_version, 3)
while True:
m = COND_PATTERN.match(condition)
if m is None:
break
condition = "{}{}{}".format(
m.group(1), split_version(m.group(2), 3), m.group(3))
# pylint: disable=eval-used
condition = eval(condition, None,
dict(hmc_version=hmc_version, se_version=se_version))
return condition
# Metrics context creation & deletion and retrieval derived from
# github.com/zhmcclient/python-zhmcclient/examples/metrics.py
def create_session(cred_dict, hmccreds_filename):
"""
To create a context, a session must be created first.
Parameters:
cred_dict (dict): 'metric' object from the HMC credentials file,
specifying items: hmc, userid, password, verify_cert.
hmccreds_filename (string): Path name of HMC credentials file.
Returns:
zhmcclient.Session
"""
# These warnings do not concern us
urllib3.disable_warnings()
logprint(logging.INFO, PRINT_V,
"HMC host: {}".format(cred_dict["hmc"]))
logprint(logging.INFO, PRINT_V,
"HMC userid: {}".format(cred_dict["userid"]))
verify_cert = cred_dict.get("verify_cert", True)
if isinstance(verify_cert, six.string_types):
if not os.path.isabs(verify_cert):
verify_cert = os.path.join(
os.path.dirname(hmccreds_filename), verify_cert)
logprint(logging.INFO, PRINT_V,
"HMC certificate validation: {}".format(verify_cert))
session = zhmcclient.Session(cred_dict["hmc"],
cred_dict["userid"],
cred_dict["password"],
verify_cert=verify_cert,
retry_timeout_config=RETRY_TIMEOUT_CONFIG)
return session
def get_hmc_info(session):
"""
Return the result of the 'Query API Version' operation. This includes
the HMC version, HMC name and other data. For details, see the operation's
result description in the HMC WS API book.
Raises: zhmccclient exceptions
"""
client = zhmcclient.Client(session)
hmc_info = client.query_api_version()
return hmc_info
def create_metrics_context(session, yaml_metric_groups, hmc_version):
"""
Creating a context is mandatory for reading metrics from the Z HMC.
Takes the session, the metric_groups dictionary from the metrics YAML file
for fetch/do not fetch information, and the name of the YAML file for error
output.
Returns a tuple(context, resources), where context is the metric context
and resources is a dict(key: metric group name, value: list of
auto-enabled resource objects for the metric group).
Raises: zhmccclient exceptions
"""
fetched_hmc_metric_groups = []
fetched_res_metric_groups = []
for metric_group in yaml_metric_groups:
mg_dict = yaml_metric_groups[metric_group]
mg_type = mg_dict.get("type", 'hmc')
# fetch is required in the metrics schema:
fetch = mg_dict["fetch"]
# if is optional in the metrics schema:
if fetch and "if" in mg_dict:
fetch = eval_condition(mg_dict["if"], hmc_version, None)
if fetch:
if mg_type == 'hmc':
fetched_hmc_metric_groups.append(metric_group)
else:
assert mg_type == 'resource' # ensured by enum
fetched_res_metric_groups.append(metric_group)
client = zhmcclient.Client(session)
logprint(logging.INFO, PRINT_V,
"Creating a metrics context on the HMC for HMC metric "
"groups: {}".format(', '.join(fetched_hmc_metric_groups)))
context = client.metrics_contexts.create(
{"anticipated-frequency-seconds": 15,
"metric-groups": fetched_hmc_metric_groups})
resources = {}
for metric_group in fetched_res_metric_groups:
logprint(logging.INFO, PRINT_V,
"Retrieving resources from the HMC for resource metric "
"group {}".format(metric_group))
try:
resource_path = yaml_metric_groups[metric_group]['resource']
except KeyError:
new_exc = ImproperExit(
"Missing 'resource' item in resource metric group {} in "
"metrics file".
format(metric_group))
new_exc.__cause__ = None # pylint: disable=invalid-name
raise new_exc
if resource_path == 'cpc':
resources[metric_group] = []
cpcs = client.cpcs.list()
for cpc in cpcs:
logprint(logging.INFO, PRINT_V,
"Enabling auto-update for CPC {}".format(cpc.name))
try:
cpc.enable_auto_update()
except zhmcclient.Error as exc:
logprint(logging.ERROR, PRINT_ALWAYS,
"Ignoring resource-based metrics for CPC {}, "
"because enabling auto-update for it failed "
"with {}: {}".
format(cpc.name, exc.__class__.__name__, exc))
continue # skip this CPC
resources[metric_group].append(cpc)
elif resource_path == 'cpc.partition':
resources[metric_group] = []
cpcs = client.cpcs.list()
for cpc in cpcs:
partitions = cpc.partitions.list()
for partition in partitions:
logprint(logging.INFO, PRINT_V,
"Enabling auto-update for partition {}.{}".
format(cpc.name, partition.name))
try:
partition.enable_auto_update()
except zhmcclient.Error as exc:
logprint(logging.ERROR, PRINT_ALWAYS,
"Ignoring resource-based metrics for "
"partition {}.{}, because enabling "
"auto-update for it failed with {}: {}".
format(cpc.name, partition.name,
exc.__class__.__name__, exc))
continue # skip this partition
resources[metric_group].append(partition)
elif resource_path == 'cpc.logical-partition':
resources[metric_group] = []
cpcs = client.cpcs.list()
for cpc in cpcs:
lpars = cpc.lpars.list()
for lpar in lpars:
logprint(logging.INFO, PRINT_V,
"Enabling auto-update for LPAR {}.{}".
format(cpc.name, lpar.name))
try:
lpar.enable_auto_update()
except zhmcclient.Error as exc:
logprint(logging.ERROR, PRINT_ALWAYS,
"Ignoring resource-based metrics for "
"LPAR {}.{}, because enabling "
"auto-update for it failed with {}: {}".
format(cpc.name, lpar.name,
exc.__class__.__name__, exc))
continue # skip this LPAR
resources[metric_group].append(lpar)
else:
new_exc = ImproperExit(
"Invalid 'resource' item in resource metric group {} in "
"metrics file: {}".
format(metric_group, resource_path))
new_exc.__cause__ = None # pylint: disable=invalid-name
raise new_exc
return context, resources
def cleanup(session, context, resources):
"""
Clean up:
- delete the metric context
- disable auto-update on resources
- logoff from the HMC session
"""
try:
if context:
logprint(logging.INFO, PRINT_ALWAYS,
"Cleaning up metrics context on HMC")
try:
context.delete()
except zhmcclient.HTTPError as exc:
if exc.http_status == 404 and exc.reason == 1:
# The metrics context does not exist anymore
pass
elif exc.http_status == 403:
# The session does not exist anymore
pass
if resources:
logprint(logging.INFO, PRINT_ALWAYS,
"Cleaning up notification subscription on HMC")
for res_list in resources.values():
for res in res_list:
try:
res.disable_auto_update()
except zhmcclient.HTTPError as exc:
if exc.http_status == 403:
# The session does not exist anymore
pass
if session:
logprint(logging.INFO, PRINT_ALWAYS,
"Closing session with HMC")
try:
session.logoff()
except zhmcclient.HTTPError as exc:
if exc.http_status == 403:
# The session does not exist anymore
pass
except zhmcclient.Error as exc:
logprint(logging.ERROR, PRINT_ALWAYS,
"Error when cleaning up: {}".format(exc))
def retrieve_metrics(context):
"""
Retrieve metrics from the Z HMC.
Takes the metrics context.
Returns a zhmcclient.MetricsResponse object.
Raises: zhmccclient exceptions
"""
retrieved_metrics = context.get_metrics()
metrics_object = zhmcclient.MetricsResponse(context, retrieved_metrics)
return metrics_object
class ResourceCache(object):
# pylint: disable=too-few-public-methods
"""
Cache for zhmcclient resource objects to avoid having to look them up
repeatedly.
"""
def __init__(self):
self._resources = {} # dict URI -> Resource object
def resource(self, uri, object_value):
"""
Return the zhmcclient resource object for the URI, updating the cache
if not present.
"""
try:
_resource = self._resources[uri]
except KeyError:
logprint(logging.INFO, PRINT_VV,
"Finding resource for {}".format(uri))
try:
_resource = object_value.resource # Takes time to find on HMC
except zhmcclient.MetricsResourceNotFound as exc:
mgd = object_value.metric_group_definition
logprint(logging.WARNING, PRINT_ALWAYS,
"Warning: Did not find resource {} specified in "
"metric object value for metric group '{}'".
format(uri, mgd.name))
for mgr in exc.managers:
res_class = mgr.class_name
logprint(logging.WARNING, PRINT_ALWAYS,
"Warning details: List of {} resources found:".
format(res_class))
res_dict = {}
resources = mgr.list()
for res in resources:
res_dict[res.uri] = res
logprint(logging.WARNING, PRINT_ALWAYS,
repr(res_dict))
logprint(logging.WARNING, PRINT_ALWAYS,
"Warning details: Current resource cache:")
logprint(logging.WARNING, PRINT_ALWAYS,
repr(self._resources))
raise
self._resources[uri] = _resource
return _resource
def remove(self, uri):
"""
Remove the resource with a specified URI from the cache, if present.
If not present, nothing happens.
"""
try:
del self._resources[uri]
except KeyError:
pass
def expand_global_label_value(
env, label_name, item_value, hmc_info):
"""
Expand a Jinja2 expression on a label value, for a global (extra) label.
"""
try:
func = env.compile_expression(item_value)
except jinja2.TemplateSyntaxError as exc:
logprint(logging.WARNING, PRINT_V,
"Ignoring global label '{}' due to "
"syntax error in the Jinja2 expression in its value: {}".
format(label_name, exc))
return None
try:
value = func(hmc_info=hmc_info)
except jinja2.TemplateError as exc:
logprint(logging.WARNING, PRINT_V,
"Ignoring global label '{}' due to "
"error in rendering the Jinja2 expression in its value: {}".
format(label_name, exc))
return None
return str(value)
def expand_group_label_value(
env, label_name, group_name, item_value, resource_obj,
metric_values=None):
"""
Expand a Jinja2 expression on a label value, for a metric group label.
"""
try:
func = env.compile_expression(item_value)
except jinja2.TemplateSyntaxError as exc:
logprint(logging.WARNING, PRINT_V,
"Ignoring label '{}' on metric group '{}' due to "
"syntax error in label value Jinja2 expression: {}".
format(label_name, group_name, exc))
return None
try:
value = func(
resource_obj=resource_obj,
metric_values=metric_values)
except jinja2.TemplateError as exc:
logprint(logging.WARNING, PRINT_V,
"Ignoring label '{}' on metric group '{}' due to "
"error in rendering label value Jinja2 expression: {}".
format(label_name, group_name, exc))
return None
return str(value)
def expand_metric_label_value(
env, label_name, metric_exporter_name, item_value, resource_obj,
metric_values=None):
"""
Expand a Jinja2 expression on a label value, for a metric label.
"""
try:
func = env.compile_expression(item_value)
except jinja2.TemplateSyntaxError as exc:
logprint(logging.WARNING, PRINT_V,
"Ignoring label '{}' on metric with exporter name '{}' due to "
"syntax error in the Jinja2 expression in its value: {}".
format(label_name, metric_exporter_name, exc))
return None
try:
value = func(
resource_obj=resource_obj,
metric_values=metric_values)
except jinja2.TemplateError as exc:
logprint(logging.WARNING, PRINT_V,
"Ignoring label '{}' on metric with exporter name '{}' due to "
"error in rendering the Jinja2 expression in its value: {}".
format(label_name, metric_exporter_name, exc))
return None
return str(value)
def cpc_from_resource(resource):
"""
From a given zhmcclient resource object, try to navigate to its CPC
and return the zhmcclient.Cpc object.
If the resource is not a CPC or part of a CPC, return None.
"""
cpc = resource
while True:
if cpc is None or cpc.manager.class_name == 'cpc':
break
cpc = cpc.manager.parent
return cpc
def build_family_objects(
metrics_object, yaml_metric_groups, yaml_metrics, metrics_filename,
extra_labels, hmc_version, se_versions, resource_cache=None):
"""
Go through all retrieved metrics and build the Prometheus Family objects.
Note: resource_cache will be omitted in tests, and is therefore optional.
Returns a dictionary of Prometheus Family objects with the following
structure:
family_name:
GaugeMetricFamily object
"""
env = jinja2.Environment()
family_objects = {}
for metric_group_value in metrics_object.metric_group_values:
metric_group = metric_group_value.name
try:
yaml_metric_group = yaml_metric_groups[metric_group]
except KeyError:
warnings.warn("Skipping metric group '{}' returned by the HMC "
"that is not defined in the 'metric_groups' section "
"of metric definition file {}".
format(metric_group, metrics_filename))
continue # Skip this metric group
for object_value in metric_group_value.object_values:
if resource_cache:
try:
resource = resource_cache.resource(
object_value.resource_uri, object_value)
except zhmcclient.MetricsResourceNotFound:
# Some details have already been logged & printed
warnings.warn("Skipping resource with URI '{}' of metric "
"group '{}' returned by the HMC that is not "
"found on the HMC".
format(object_value.resource_uri,
metric_group))
continue # Skip this metric
else:
resource = object_value.resource
metric_values = object_value.metrics
cpc = cpc_from_resource(resource)
if cpc:
# This resource is a CPC or part of a CPC
se_version = se_versions[cpc.name]
else:
# This resource is an HMC or part of an HMC
se_version = None
# Calculate the resource labels at the metric group level:
mg_labels = dict(extra_labels)
# labels is optional in the metrics schema:
default_labels = [dict(name='resource', value='resource_obj.name')]
yaml_labels = yaml_metric_group.get('labels', default_labels)
for item in yaml_labels:
# name, value are required in the metrics schema:
label_name = item['name']
item_value = item['value']
label_value = expand_group_label_value(
env, label_name, metric_group, item_value, resource,
metric_values)
if label_value is not None:
mg_labels[label_name] = label_value
for metric in metric_values:
try:
yaml_metric = yaml_metrics[metric_group][metric]
except KeyError:
warnings.warn("Skipping metric '{}' of metric group '{}' "
"returned by the HMC that is not defined in "
"the 'metrics' section of metric definition "
"file {}".
format(metric, metric_group,
metrics_filename))
continue # Skip this metric
metric_value = metric_values[metric]
# Skip metrics with the special value -1 (which indicates that
# the resource does not exist)
if metric_value == -1:
continue
# Skip metrics that are defined to be ignored
# exporter_name is required in the metrics schema:
if not yaml_metric["exporter_name"]:
continue
# Skip conditional metrics that their condition not met
if_expr = yaml_metric.get("if", None)
if if_expr and \
not eval_condition(if_expr, hmc_version, se_version):
continue
# Transform HMC percentages (value 100 means 100% = 1) to
# Prometheus values (value 1 means 100% = 1)
# percent is optional in the metrics schema:
if yaml_metric.get("percent", False):
metric_value /= 100
# Calculate the resource labels at the metric level:
labels = dict(mg_labels)
# labels is optional in the metrics schema:
yaml_labels = yaml_metric.get('labels', [])
for item in yaml_labels:
# name, value are required in the metrics schema:
label_name = item['name']
item_value = item['value']
label_value = expand_metric_label_value(
env, label_name, yaml_metric["exporter_name"],
item_value, resource, metric_values)
if label_value is not None:
labels[label_name] = label_value
# Create a Family object, if needed
# prefix,exporter_name are required in the metrics schema:
family_name = "zhmc_{}_{}".format(
yaml_metric_group["prefix"],
yaml_metric["exporter_name"])
try:
family_object = family_objects[family_name]
except KeyError:
# exporter_desc is required in the metrics schema:
metric_type = yaml_metric.get("metric_type", "gauge")
if metric_type == "gauge":
family_object = GaugeMetricFamily(
family_name,
yaml_metric["exporter_desc"],
labels=list(labels.keys()))
else:
assert metric_type == "counter" # ensured by schema
family_object = CounterMetricFamily(
family_name,
yaml_metric["exporter_desc"],
labels=list(labels.keys()))
family_objects[family_name] = family_object
# Add the metric value to the Family object
family_object.add_metric(list(labels.values()), metric_value)
return family_objects
def build_family_objects_res(
resources, yaml_metric_groups, yaml_metrics, metrics_filename,
extra_labels, hmc_version, se_versions, resource_cache=None):
"""
Go through all auto-updated resources and build the Prometheus Family
objects for them.
Note: resource_cache will be omitted in tests, and is therefore optional.
Returns a dictionary of Prometheus Family objects with the following
structure:
family_name:
GaugeMetricFamily object
"""
env = jinja2.Environment()
family_objects = {}
for metric_group, res_list in resources.items():
yaml_metric_group = yaml_metric_groups[metric_group]
for i, resource in enumerate(list(res_list)):
# Note: We use list() because resources that no longer exist will
# be removed from the original list, so this provides a stable
# iteration when items are removed from the original list.
if resource.ceased_existence:
try:
res_str = resource.name
except zhmcclient.CeasedExistence:
# For attribute 'name', the exception is only raised when
# the name is not yet known locally.
res_str = "with URI {}".format(resource.uri)
logprint(logging.INFO, PRINT_VV,
"Resource no longer exists on HMC: {} {}".
format(resource.manager.class_name, res_str))
# Remove the resource from the list so it no longer show up
# in Prometheus data.
del res_list[i]
# Remove the resource from the resource cache. This does not
# influence what is shown in Prometheus data, but it is simply
# a cleanup.
if resource_cache:
resource_cache.remove(resource.uri)
continue
cpc = cpc_from_resource(resource)
if cpc:
# This resource is a CPC or part of a CPC
se_version = se_versions[cpc.name]
else:
# This resource is an HMC or part of an HMC
se_version = None
# Calculate the resource labels at the metric group level:
mg_labels = dict(extra_labels)
# labels is optional in the metrics schema:
default_labels = [dict(name='resource', value='resource_obj.name')]
yaml_labels = yaml_metric_group.get('labels', default_labels)
for item in yaml_labels:
# name, value are required in the metrics schema:
label_name = item['name']
item_value = item['value']
label_value = expand_group_label_value(
env, label_name, metric_group, item_value, resource)
if label_value is not None:
mg_labels[label_name] = label_value
yaml_mg = yaml_metrics[metric_group]
if isinstance(yaml_mg, dict):
yaml_mg_iter = yaml_mg.items()
else:
yaml_mg_iter = yaml_mg
for item in yaml_mg_iter:
if isinstance(yaml_mg, dict):
prop_name, yaml_metric = item
else:
yaml_metric = item
prop_name = yaml_metric.get('property_name', None)
# exporter_name is required in the metrics schema
exporter_name = yaml_metric["exporter_name"]
# Skip metrics that are defined to be ignored
if not exporter_name:
continue
# Skip conditional metrics that their condition not met
if_expr = yaml_metric.get("if", None)
if if_expr and \
not eval_condition(if_expr, hmc_version, se_version):
continue
if prop_name:
try:
metric_value = resource.properties[prop_name]
except KeyError:
# Skip resource properties that do not exist on older
# CPC/HMC versions.
continue
else:
prop_expr = yaml_metric.get('properties_expression', None)
if not prop_expr:
new_exc = ImproperExit(
"Metric definition for exporter name '{}' in "
"metric definition file {} has neither "
"'property_name' nor 'properties_expression'".
format(exporter_name, metrics_filename))
new_exc.__cause__ = None # pylint: disable=invalid-name
raise new_exc
try:
func = env.compile_expression(
prop_expr, undefined_to_none=False)
except jinja2.exceptions.TemplateError as exc:
new_exc = ImproperExit(
"Error compiling properties expression {!r} "
"defined for exporter name '{}' "
"in metric definition file {}: {}: {}".
format(prop_expr, exporter_name, metrics_filename,
exc.__class__.__name__, exc))
new_exc.__cause__ = None # pylint: disable=invalid-name
raise new_exc
try:
metric_value = func(properties=resource.properties)
# pylint: disable=broad-exception-caught,broad-except
except Exception as exc:
# Typical exceptions:
# - jinja2.exceptions.UndefinedError, e.g. for missing
# HMC resource properties
# - TypeError
logprint(
logging.WARNING, PRINT_ALWAYS,
"Ignoring metric with exporter name '{}' "
"in metric definition file {} due to error "
"evaluating properties expression {}: {}: {}".
format(exporter_name, metrics_filename, prop_expr,
exc.__class__.__name__, exc))
continue
# Skip resource properties that have a null value. An example
# are some LPAR/partition properties that are null when the
# partition is not active. Prometheus cannot represent null
# values (It can represent the NaN float value but that would
# not really be the right choice).
if metric_value is None:
continue
# Skip metrics that are defined to be ignored.
# exporter_name is required in the metrics schema.
if not yaml_metric["exporter_name"]:
continue
# Skip conditional metrics that their condition not met
if_expr = yaml_metric.get("if", None)
if if_expr and \
not eval_condition(if_expr, hmc_version, se_version):
continue
# Transform the HMC value using the valuemap, if defined:
valuemap = yaml_metric.get('valuemap', None)
if valuemap:
try:
metric_value = valuemap[metric_value]
except KeyError:
warnings.warn(
"Skipping property '{}' of resource metric group "
"'{}' in metric definition file {}, because its "
"valuemap does not define a mapping for "
"value {!r} returned for {}".
format(prop_name, metric_group, metrics_filename,
metric_value, resource_str(resource)))
continue
# Transform HMC percentages (value 100 means 100% = 1) to
# Prometheus values (value 1 means 100% = 1)
# percent is optional in the metrics schema:
if yaml_metric.get("percent", False):
metric_value /= 100
# Calculate the resource labels at the metric level:
labels = dict(mg_labels)
# labels is optional in the metrics schema:
yaml_labels = yaml_metric.get('labels', [])
for item in yaml_labels: # pylint: disable=redefined-outer-name
# name, value are required in the metrics schema:
label_name = item['name']
item_value = item['value']
label_value = expand_metric_label_value(
env, label_name, exporter_name, item_value, resource)
if label_value is not None:
labels[label_name] = label_value
# Create a Family object, if needed
# prefix,exporter_name are required in the metrics schema:
family_name = "zhmc_{}_{}".format(
yaml_metric_group["prefix"],
yaml_metric["exporter_name"])
try:
family_object = family_objects[family_name]
except KeyError:
# exporter_desc is required in the metrics schema:
metric_type = yaml_metric.get("metric_type", "gauge")
if metric_type == "gauge":
family_object = GaugeMetricFamily(
family_name,
yaml_metric["exporter_desc"],
labels=list(labels.keys()))
else:
assert metric_type == "counter" # ensured by schema
family_object = CounterMetricFamily(
family_name,
yaml_metric["exporter_desc"],
labels=list(labels.keys()))
family_objects[family_name] = family_object
# Add the metric value to the Family object
family_object.add_metric(list(labels.values()), metric_value)
return family_objects
class ZHMCUsageCollector():
# pylint: disable=too-few-public-methods
"""Collects the usage for exporting."""
def __init__(self, yaml_creds, session, context, resources,
yaml_metric_groups,
yaml_metrics, extra_labels, filename_metrics, filename_creds,
resource_cache, hmc_version, se_versions):
self.yaml_creds = yaml_creds
self.session = session
self.context = context
self.resources = resources
self.yaml_metric_groups = yaml_metric_groups
self.yaml_metrics = yaml_metrics
self.extra_labels = extra_labels
self.filename_metrics = filename_metrics
self.filename_creds = filename_creds
self.resource_cache = resource_cache
self.hmc_version = hmc_version
self.se_versions = se_versions
def collect(self):
"""
Yield the metrics for exporting.
Uses the context, the metric groups and the metrics from the YAML file,
and the name of the YAML file for error output.
Retries indefinitely in case of connection problems with the HMC or
in case of HTTP errors. HTTP 404.1 is automatically handled by
refreshing the metrics context.
Raises exception in case of authentication errors or other errors.
"""
logprint(logging.INFO, None,
"Collecting metrics")
with zhmc_exceptions(self.session, self.filename_creds):
while True:
logprint(logging.DEBUG, None,
"Fetching metrics from HMC")
try:
metrics_object = retrieve_metrics(self.context)
except zhmcclient.HTTPError as exc:
if exc.http_status == 404 and exc.reason == 1:
logprint(logging.WARNING, PRINT_ALWAYS,
"Recreating the metrics context after HTTP "
"status {}.{}".
format(exc.http_status, exc.reason))
self.context, _ = create_metrics_context(
self.session, self.yaml_metric_groups,
self.hmc_version)
continue
logprint(logging.WARNING, PRINT_ALWAYS,
"Retrying after HTTP status {}.{}: {}".
format(exc.http_status, exc.reason, exc))
time.sleep(RETRY_SLEEP_TIME)
continue
except zhmcclient.ConnectionError as exc:
logprint(logging.WARNING, PRINT_ALWAYS,
"Retrying after connection error: {}".format(exc))
time.sleep(RETRY_SLEEP_TIME)
continue
except zhmcclient.ServerAuthError as exc:
http_exc = exc.details # zhmcclient.HTTPError
logprint(logging.WARNING, PRINT_ALWAYS,
"Retrying after server authentication error with "
"HTTP status {}.{}".
format(http_exc.http_status, http_exc.reason))
time.sleep(RETRY_SLEEP_TIME)
continue
except zhmcclient.ClientAuthError as exc:
logprint(logging.ERROR, PRINT_ALWAYS,
"Abandoning after client authentication error: {}".
format(exc))
raise
# pylint: disable=broad-exception-caught,broad-except
except Exception as exc:
logprint(logging.ERROR, PRINT_ALWAYS,
"Abandoning after exception {}: {}".
format(exc.__class__.__name__, exc))
raise
break
logprint(logging.DEBUG, None,
"Building family objects for HMC metrics")
family_objects = build_family_objects(
metrics_object, self.yaml_metric_groups,
self.yaml_metrics, self.filename_metrics,
self.extra_labels, self.hmc_version, self.se_versions,
self.resource_cache)
logprint(logging.DEBUG, None,
"Building family objects for resource metrics")
family_objects.update(build_family_objects_res(
self.resources, self.yaml_metric_groups,
self.yaml_metrics, self.filename_metrics,
self.extra_labels, self.hmc_version, self.se_versions,
self.resource_cache))
logprint(logging.DEBUG, None,
"Returning family objects")
# Yield all family objects
for family_obj in family_objects.values():
yield family_obj
logprint(logging.INFO, None,
"Done collecting metrics")
# Global variable with the verbosity level from the command line
VERBOSE_LEVEL = 0
# Global variable indicating that logging is enabled
LOGGING_ENABLED = False
def logprint(log_level, print_level, message):
"""
Log a message at the specified log level, and print the message at
the specified verbosity level
Parameters:
log_level (int): Python logging level at which the message should be
logged (logging.DEBUG, etc.), or None for no logging.
print_level (int): Verbosity level at which the message should be
printed (1, 2), or None for no printing.
message (string): The message.
"""
if print_level is not None and VERBOSE_LEVEL >= print_level:
print(message)
if log_level is not None and LOGGING_ENABLED:
logger = logging.getLogger(EXPORTER_LOGGER_NAME)
# Note: This method never raises an exception. Errors during logging
# are handled by calling handler.handleError().
logger.log(log_level, message)
def setup_logging(log_dest, log_complevels, syslog_facility):
"""
Set up Python logging as specified in the command line.
Raises:
EarlyExit
"""
global LOGGING_ENABLED # pylint: disable=global-statement
if log_dest is None:
logprint(None, PRINT_V, "Logging is disabled")
handler = None
dest_str = None
elif log_dest == 'stderr':
dest_str = "the Standard Error stream"
logprint(None, PRINT_V, "Logging to {}".format(dest_str))
handler = logging.StreamHandler(stream=sys.stderr)
elif log_dest == 'syslog':
system = platform.system()
if system.startswith('CYGWIN_NT'):
# Value is 'CYGWIN_NT-6.1'; strip off trailing version:
system = 'CYGWIN_NT'
try:
address = SYSLOG_ADDRESS[system]
except KeyError:
address = SYSLOG_ADDRESS['other']
dest_str = ("the System Log at address {a!r} with syslog facility "
"{slf!r}".format(a=address, slf=syslog_facility))
logprint(None, PRINT_V, "Logging to {}".format(dest_str))
try:
facility = logging.handlers.SysLogHandler.facility_names[
syslog_facility]
except KeyError:
valid_slfs = ', '.join(
logging.handlers.SysLogHandler.facility_names.keys())
raise EarlyExit(
"This system ({sys}) does not support syslog facility {slf}. "
"Supported are: {slfs}.".
format(sys=system, slf=syslog_facility, slfs=valid_slfs))
# The following does not raise any exception if the syslog address
# cannot be opened. In that case, the first attempt to log something
# will fail.
handler = logging.handlers.SysLogHandler(
address=address, facility=facility)
else:
dest_str = "file {fn}".format(fn=log_dest)
logprint(None, PRINT_V, "Logging to {}".format(dest_str))
try:
handler = logging.FileHandler(log_dest)
except OSError as exc:
raise EarlyExit(
"Cannot log to file {fn}: {exc}: {msg}".
format(fn=log_dest, exc=exc.__class__.__name__, msg=exc))
if not handler and log_complevels:
raise EarlyExit(
"--log-comp option cannot be used when logging is disabled; "
"use --log option to enable logging.")
if handler:
def handleError(self, record):
"""
Replacement for built-in method on logging.Handler class.
This is needed because the SysLogHandler class does not raise
an exception when creating the handler object, but only when
logging something to it.
"""
_, exc, _ = sys.exc_info()
f_record = self.format(record)
print("Error: Logging to {d} failed with: {exc}: {msg}. Formatted "
"log record: {r!r}".
format(d=dest_str, exc=exc.__class__.__name__, msg=exc,
r=f_record),
file=sys.stderr)
sys.exit(1)
handler.handleError = types.MethodType(handleError, handler)
logger_level_dict = {} # key: logger_name, value: level
if not log_complevels:
log_complevels = [DEFAULT_LOG_COMP]
for complevel in log_complevels:
if '=' in complevel:
comp, level = complevel.split('=', 2)
else:
comp = complevel
level = DEFAULT_LOG_LEVEL
if level not in LOG_LEVELS:
raise EarlyExit(
"Invalid log level {level!r} in --log-comp option. "
"Allowed are: {allowed}".
format(level=level, allowed=', '.join(VALID_LOG_LEVELS)))
if comp == 'all':
for logger_name in LOGGER_NAMES.values():
logger_level_dict[logger_name] = level
else:
try:
logger_name = LOGGER_NAMES[comp]
except KeyError:
raise EarlyExit(
"Invalid component {comp!r} in --log-comp option. "
"Allowed are: {allowed}".
format(comp=comp,
allowed=', '.join(VALID_LOG_COMPONENTS)))
logger_level_dict[logger_name] = level
complevels = ', '.join(
["{name}={level}".format(name=name, level=level)
for name, level in logger_level_dict.items()])
logprint(None, PRINT_V,
"Logging components: {complevels}".
format(complevels=complevels))
if isinstance(handler, logging.handlers.SysLogHandler):
# Most syslog implementations fail when the message is longer
# than a limit. We use a hard coded limit for now:
# * 2048 is the typical maximum length of a syslog message,
# including its headers
# * 41 is the max length of the syslog message parts before MESSAGE
# * 47 is the max length of the Python format string before message
# Example syslog message:
# <34>1 2003-10-11T22:14:15.003Z localhost MESSAGE
# where MESSAGE is the formatted Python log message.
max_msg = '.{}'.format(2048 - 41 - 47)
else:
max_msg = ''
fs = ('%(asctime)s %(levelname)s %(name)s: %(message){m}s'.
format(m=max_msg))
# Set the formatter to always log times in UTC. Since the %z
# formatting string does not get adjusted for that, set the timezone
# offset always to '+0000'.
dfs = '%Y-%m-%d %H:%M:%S+0000'
logging.Formatter.converter = time.gmtime # log times in UTC
handler.setFormatter(logging.Formatter(fmt=fs, datefmt=dfs))
for logger_name in LOGGER_NAMES.values():
logger = logging.getLogger(logger_name)
if logger_name in logger_level_dict:
level = logger_level_dict[logger_name]
level_int = LOG_LEVELS[level]
if level_int != logging.NOTSET:
logger.addHandler(handler)
logger.setLevel(level_int)
else:
logger.setLevel(logging.NOTSET)
LOGGING_ENABLED = True
def main():
"""Puts the exporter together."""
# If the session and context keys are not created, their destruction
# should not be attempted.
global VERBOSE_LEVEL # pylint: disable=global-statement
session = None
context = None
resources = None
try:
args = parse_args(sys.argv[1:])
if args.help_creds:
help_creds()
sys.exit(0)
if args.help_metrics:
help_metrics()
sys.exit(0)
VERBOSE_LEVEL = args.verbose
setup_logging(args.log_dest, args.log_complevels, args.syslog_facility)
logprint(logging.WARNING, None,
"---------------- "
"zhmc_prometheus_exporter started "
"----------------")
logprint(logging.INFO, PRINT_ALWAYS,
"zhmc_prometheus_exporter version: {}".format(__version__))
# pylint: disable=no-member
logprint(logging.INFO, PRINT_ALWAYS,
"zhmcclient version: {}".format(zhmcclient.__version__))
logprint(logging.INFO, PRINT_ALWAYS,
"Verbosity level: {}".format(VERBOSE_LEVEL))
hmccreds_filename = args.c
logprint(logging.INFO, PRINT_V,
"Parsing HMC credentials file: {}".format(hmccreds_filename))
yaml_creds_content = parse_yaml_file(
hmccreds_filename, 'HMC credentials file', 'hmccreds_schema.yaml')
# metrics is required in the metrics schema:
yaml_creds = yaml_creds_content["metrics"]
logprint(logging.INFO, PRINT_V,
"Parsing metric definition file: {}".format(args.m))
yaml_metric_content = parse_yaml_file(
args.m, 'metric definition file', 'metrics_schema.yaml')
# metric_groups and metrics are required in the metrics schema:
yaml_metric_groups = yaml_metric_content['metric_groups']
yaml_metrics = yaml_metric_content['metrics']
# Check that the metric_groups and metrics items are consistent
for mg in yaml_metrics:
if mg not in yaml_metric_groups:
new_exc = ImproperExit(
"Metric group '{}' in metric definition file {} is "
"defined in 'metrics' but not in 'metric_groups'".
format(mg, args.m))
new_exc.__cause__ = None # pylint: disable=invalid-name
raise new_exc
for mg in yaml_metric_groups:
if mg not in yaml_metrics:
new_exc = ImproperExit(
"Metric group '{}' in metric definition file {} is "
"defined in 'metric_groups' but not in 'metrics'".
format(mg, args.m))
new_exc.__cause__ = None # pylint: disable=invalid-name
raise new_exc
# Check that the correct format is used in the metrics section
for mg, yaml_m in yaml_metrics.items():
yaml_mg = yaml_metric_groups[mg]
mg_type = yaml_mg.get('type', 'metric')
if mg_type == 'metric' and not isinstance(yaml_m, dict):
new_exc = ImproperExit(
"Metrics for metric group '{}' of type 'metric' must use "
"the dictionary format in metric definition file {}".
format(mg, args.m))
new_exc.__cause__ = None # pylint: disable=invalid-name
raise new_exc
# Unregister the default collectors (Python, Platform)
if hasattr(REGISTRY, '_collector_to_names'):
# pylint: disable=protected-access
for coll in list(REGISTRY._collector_to_names.keys()):
REGISTRY.unregister(coll)
logprint(logging.INFO, PRINT_V,
"Timeout/retry configuration: "
"connect: {r.connect_timeout} sec / {r.connect_retries} "
"retries, read: {r.read_timeout} sec / {r.read_retries} "
"retries.".format(r=RETRY_TIMEOUT_CONFIG))
env = jinja2.Environment()
# hmc is required in the HMC creds schema:
session = create_session(yaml_creds, hmccreds_filename)
try:
with zhmc_exceptions(session, hmccreds_filename):
hmc_info = get_hmc_info(session)
hmc_version = hmc_info['hmc-version']
client = zhmcclient.Client(session)
cpc_list = client.cpcs.list()
se_versions = {}
for cpc in cpc_list:
cpc_name = cpc.name
se_versions[cpc_name] = cpc.prop('se-version')
se_versions_str = ', '.join(
["{}: {}".format(cpc, v)
for cpc, v in se_versions.items()])
logprint(logging.INFO, PRINT_V,
"HMC version: {}".format(hmc_version))
logprint(logging.INFO, PRINT_V,
"Managed CPCs and their SE versions: {}".
format(se_versions_str))
context, resources = create_metrics_context(
session, yaml_metric_groups, hmc_version)
except (ConnectionError, AuthError, OtherError) as exc:
raise ImproperExit(exc)
# Calculate the resource labels at the global level
# extra_labels is optional in the metrics schema:
yaml_extra_labels = yaml_creds_content.get("extra_labels", [])
extra_labels = {}
for item in yaml_extra_labels:
# name is required in the HMC creds schema:
label_name = item['name']
item_value = item['value']
label_value = expand_global_label_value(
env, label_name, item_value, hmc_info)
if label_value is not None:
extra_labels[label_name] = label_value
extra_labels_str = ','.join(
['{}="{}"'.format(k, v) for k, v in extra_labels.items()])
logprint(logging.INFO, PRINT_V,
"Using extra labels: {}".format(extra_labels_str))
resource_cache = ResourceCache()
coll = ZHMCUsageCollector(
yaml_creds, session, context, resources, yaml_metric_groups,
yaml_metrics, extra_labels, args.m, hmccreds_filename,
resource_cache, hmc_version, se_versions)
logprint(logging.INFO, PRINT_V,
"Registering the collector and performing first collection")
REGISTRY.register(coll) # Performs a first collection
logprint(logging.INFO, PRINT_V,
"Starting the HTTP server on port {}".format(args.p))
start_http_server(int(args.p))
logprint(logging.INFO, PRINT_ALWAYS,
"Exporter is up and running on port {}".format(args.p))
while True:
try:
time.sleep(1)
except KeyboardInterrupt:
raise ProperExit
except KeyboardInterrupt:
logprint(logging.WARNING, PRINT_ALWAYS,
"Exporter interrupted before server start.")
cleanup(session, context, resources)
exit_rc(1)
except EarlyExit as exc:
logprint(logging.ERROR, PRINT_ALWAYS,
"Error: {}".format(exc))
exit_rc(1)
except ImproperExit as exc:
logprint(logging.ERROR, PRINT_ALWAYS,
"Error: {}".format(exc))
cleanup(session, context, resources)
exit_rc(1)
except ProperExit:
logprint(logging.WARNING, PRINT_ALWAYS,
"Exporter interrupted after server start.")
cleanup(session, context, resources)
exit_rc(0)
def exit_rc(rc):
"""Exit the script"""
logprint(logging.WARNING, None,
"---------------- "
"zhmc_prometheus_exporter terminated "
"----------------")
sys.exit(rc)
if __name__ == "__main__":
main() | zhmc-prometheus-exporter | /zhmc_prometheus_exporter-1.4.2.tar.gz/zhmc_prometheus_exporter-1.4.2/zhmc_prometheus_exporter/zhmc_prometheus_exporter.py | zhmc_prometheus_exporter.py |
.. Copyright 2016-2019 IBM Corp. All Rights Reserved.
..
.. Licensed under the Apache License, Version 2.0 (the "License");
.. you may not use this file except in compliance with the License.
.. You may obtain a copy of the License at
..
.. http://www.apache.org/licenses/LICENSE-2.0
..
.. Unless required by applicable law or agreed to in writing, software
.. distributed under the License is distributed on an "AS IS" BASIS,
.. WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
.. See the License for the specific language governing permissions and
.. limitations under the License.
..
zhmccli - A CLI for the IBM Z HMC, written in pure Python
=========================================================
.. image:: https://img.shields.io/pypi/v/zhmccli.svg
:target: https://pypi.python.org/pypi/zhmccli/
:alt: Version on Pypi
.. image:: https://github.com/zhmcclient/zhmccli/workflows/test/badge.svg?branch=master
:target: https://github.com/zhmcclient/zhmccli/actions/
:alt: Actions status
.. image:: https://readthedocs.org/projects/zhmccli/badge/?version=latest
:target: http://zhmccli.readthedocs.io/en/latest/
:alt: Docs build status (latest)
.. image:: https://img.shields.io/coveralls/zhmcclient/zhmccli.svg
:target: https://coveralls.io/r/zhmcclient/zhmccli
:alt: Test coverage (master)
.. image:: https://codeclimate.com/github/zhmcclient/zhmccli/badges/gpa.svg
:target: https://codeclimate.com/github/zhmcclient/zhmccli
:alt: Code Climate
.. contents:: Contents:
:local:
Overview
========
The zhmccli package is a CLI written in pure Python that interacts with the
Hardware Management Console (HMC) of `IBM Z`_ or `LinuxONE`_ machines. The goal
of this package is to provide an easy-to-use command line interface
for operators.
.. _IBM Z: http://www.ibm.com/systems/z/
.. _LinuxONE: http://www.ibm.com/systems/linuxone/
The zhmccli package uses the API provided by the zhmcclient package, which
interacts with the Web Services API of the HMC. It supports management of the
lifecycle and configuration of various platform resources, such as partitions,
CPU, memory, virtual switches, I/O adapters, and more.
Installation
============
The quick way:
.. code-block:: bash
$ pip install zhmccli
For more details, see the `Installation section`_ in the documentation.
.. _Installation section: http://zhmccli.readthedocs.io/en/latest/intro.html#installation
Quickstart
===========
The following example lists the names of the machines (CPCs) managed by an HMC:
.. code-block:: bash
$ hmc_host="<IP address or hostname of the HMC>"
$ hmc_userid="<userid on that HMC>"
$ zhmc -h $hmc_host -u $hmc_userid cpc list --names-only
Enter password (for user ... at HMC ...): .......
+----------+
| name |
|----------+
| P000S67B |
+----------+
Documentation
=============
* `Documentation <http://zhmccli.readthedocs.io/en/latest/>`_
* `Change log <http://zhmccli.readthedocs.io/en/latest/changes.html>`_
Contributing
============
For information on how to contribute to this project, see the
`Development section`_ in the documentation.
.. _Development section: http://zhmccli.readthedocs.io/en/latest/development.html
License
=======
The zhmccli package is licensed under the `Apache 2.0 License`_.
.. _Apache 2.0 License: https://github.com/zhmcclient/zhmccli/tree/master/LICENSE
| zhmccli | /zhmccli-1.8.0.tar.gz/zhmccli-1.8.0/README.rst | README.rst |
.. Copyright 2016-2021 IBM Corp. All Rights Reserved.
..
.. Licensed under the Apache License, Version 2.0 (the "License");
.. you may not use this file except in compliance with the License.
.. You may obtain a copy of the License at
..
.. http://www.apache.org/licenses/LICENSE-2.0
..
.. Unless required by applicable law or agreed to in writing, software
.. distributed under the License is distributed on an "AS IS" BASIS,
.. WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
.. See the License for the specific language governing permissions and
.. limitations under the License.
..
zhmcclient - A pure Python client library for the IBM Z HMC Web Services API
============================================================================
.. PyPI download statistics are broken, but the new PyPI warehouse makes PyPI
.. download statistics available through Google BigQuery
.. (https://bigquery.cloud.google.com).
.. Query to list package downloads by version:
..
SELECT
file.project,
file.version,
COUNT(*) as total_downloads,
SUM(CASE WHEN REGEXP_EXTRACT(details.python, r"^([^\.]+\.[^\.]+)") = "2.6" THEN 1 ELSE 0 END) as py26_downloads,
SUM(CASE WHEN REGEXP_EXTRACT(details.python, r"^([^\.]+\.[^\.]+)") = "2.7" THEN 1 ELSE 0 END) as py27_downloads,
SUM(CASE WHEN REGEXP_EXTRACT(details.python, r"^([^\.]+)\.[^\.]+") = "3" THEN 1 ELSE 0 END) as py3_downloads,
FROM
TABLE_DATE_RANGE(
[the-psf:pypi.downloads],
TIMESTAMP("19700101"),
CURRENT_TIMESTAMP()
)
WHERE
file.project = 'zhmcclient'
GROUP BY
file.project, file.version
ORDER BY
file.version DESC
.. image:: https://img.shields.io/pypi/v/zhmcclient.svg
:target: https://pypi.python.org/pypi/zhmcclient/
:alt: Version on Pypi
.. # .. image:: https://img.shields.io/pypi/dm/zhmcclient.svg
.. # :target: https://pypi.python.org/pypi/zhmcclient/
.. # :alt: Pypi downloads
.. image:: https://github.com/zhmcclient/python-zhmcclient/workflows/test/badge.svg?branch=master
:target: https://github.com/zhmcclient/python-zhmcclient/actions/
:alt: Actions status
.. image:: https://readthedocs.org/projects/python-zhmcclient/badge/?version=latest
:target: https://readthedocs.org/projects/python-zhmcclient/builds/
:alt: ReadTheDocs status
.. image:: https://coveralls.io/repos/github/zhmcclient/python-zhmcclient/badge.svg?branch=master
:target: https://coveralls.io/github/zhmcclient/python-zhmcclient?branch=master
:alt: Coveralls status
.. image:: https://codeclimate.com/github/zhmcclient/python-zhmcclient/badges/gpa.svg
:target: https://codeclimate.com/github/zhmcclient/python-zhmcclient
:alt: CodeClimate status
.. contents:: Contents:
:local:
Overview
========
The zhmcclient package is a client library
written in pure Python that interacts with the Web Services API of the Hardware
Management Console (HMC) of `IBM Z`_ or `LinuxONE`_ machines. The goal of
this package is to make the HMC Web Services API easily consumable for Python
programmers.
.. _IBM Z: http://www.ibm.com/systems/z/
.. _LinuxONE: http://www.ibm.com/systems/linuxone/
The HMC Web Services API is the access point for any external tools to
manage the IBM Z or LinuxONE platform. It supports management of the
lifecycle and configuration of various platform resources, such as partitions,
CPU, memory, virtual switches, I/O adapters, and more.
The zhmcclient package encapsulates both protocols supported by the HMC Web
Services API:
* REST over HTTPS for request/response-style operations driven by the client.
Most of these operations complete synchronously, but some long-running tasks
complete asynchronously.
* JMS (Java Messaging Services) for notifications from the HMC to the client.
This can be used to be notified about changes in the system, or about
completion of asynchronous tasks started using REST.
Installation
============
The quick way:
.. code-block:: bash
$ pip install zhmcclient
For more details, see the `Installation section`_ in the documentation.
.. _Installation section: http://python-zhmcclient.readthedocs.io/en/latest/intro.html#installation
Quickstart
===========
The following example code lists the partitions on CPCs in DPM mode that are
accessible for the user:
.. code-block:: python
#!/usr/bin/env python
import zhmcclient
import requests.packages.urllib3
requests.packages.urllib3.disable_warnings()
# Set these variables for your environment:
host = "<IP address or hostname of the HMC>"
userid = "<userid on that HMC>"
password = "<password of that HMC userid>"
verify_cert = False
session = zhmcclient.Session(host, userid, password, verify_cert=verify_cert)
client = zhmcclient.Client(session)
console = client.consoles.console
partitions = console.list_permitted_partitions()
for part in partitions:
cpc = part.manager.parent
print("{} {}".format(cpc.name, part.name))
Possible output when running the script:
.. code-block:: text
P000S67B PART1
P000S67B PART2
P0000M96 PART1
Documentation and Change Log
============================
For the latest released version on PyPI:
* `Documentation`_
* `Change log`_
.. _Documentation: http://python-zhmcclient.readthedocs.io/en/latest/
.. _Change log: http://python-zhmcclient.readthedocs.io/en/latest/changes.html
zhmc CLI
========
Before version 0.18.0 of the zhmcclient package, it contained the zhmc CLI.
Starting with zhmcclient version 0.18.0, the zhmc CLI has been moved from this
project into the new `zhmccli project`_.
If your project uses the zhmc CLI, and you are upgrading the zhmcclient
package from before 0.18.0 to 0.18.0 or later, your project will need to add
the `zhmccli package`_ to its dependencies.
.. _zhmccli project: https://github.com/zhmcclient/zhmccli
.. _zhmccli package: https://pypi.python.org/pypi/zhmccli
Contributing
============
For information on how to contribute to this project, see the
`Development section`_ in the documentation.
.. _Development section: http://python-zhmcclient.readthedocs.io/en/latest/development.html
License
=======
The zhmcclient package is licensed under the `Apache 2.0 License`_.
.. _Apache 2.0 License: https://github.com/zhmcclient/python-zhmcclient/tree/master/LICENSE
| zhmcclient | /zhmcclient-1.3.2.tar.gz/zhmcclient-1.3.2/README.rst | README.rst |
from __future__ import absolute_import
__all__ = ['IdPool']
class IdPool(object):
"""
A pool of integer ID values from a defined value range.
The IDs can be allocated from and returned to the pool.
The pool is optimized for memory consumption, by only materializing ID
values as needed.
"""
def __init__(self, lowest, highest):
"""
Parameters:
lowest (integer): Lowest value of the ID value range.
highest (integer): Highest value of the ID value range.
"""
if lowest > highest:
raise ValueError("Lowest value %d is higher than highest %d" %
(lowest, highest))
# ID value range, using slice semantics (end points past the highest)
self._range_start = lowest
self._range_end = highest + 1
# The ID values in use.
self._used = set()
# Free pool: The ID values that are free and materialized.
self._free = set()
# Start of new free ID values to be materialized when the free pool is
# expanded.
self._expand_start = lowest
# Expansion chunk size: Number of new free ID values to be materialized
# when the free pool is expanded.
self._expand_len = 10
def _expand(self):
"""
Expand the free pool, if possible.
If out of capacity w.r.t. the defined ID value range, ValueError is
raised.
"""
assert not self._free # free pool is empty
expand_end = self._expand_start + self._expand_len
if expand_end > self._range_end:
# This happens if the size of the value range is not a multiple
# of the expansion chunk size.
expand_end = self._range_end
if self._expand_start == expand_end:
raise ValueError("Out of capacity in ID pool")
self._free = set(range(self._expand_start, expand_end))
self._expand_start = expand_end
def alloc(self):
"""
Allocate an ID value and return it.
Raises:
ValueError: Out of capacity in ID pool.
"""
if not self._free:
self._expand()
_id = self._free.pop()
self._used.add(_id)
return _id
def free(self, id):
# pylint: disable=redefined-builtin
"""
Free an ID value.
The ID value must be allocated.
Raises:
ValueError: ID value to be freed is not currently allocated.
"""
self._free_impl(id, fail_if_not_allocated=True)
def free_if_allocated(self, id):
# pylint: disable=redefined-builtin
"""
Free an ID value, if it is currently allocated.
If the specified ID value is not currently allocated, nothing happens.
"""
self._free_impl(id, fail_if_not_allocated=False)
def _free_impl(self, id, fail_if_not_allocated):
# pylint: disable=redefined-builtin
"""
Implementation of free.
"""
if id in self._used:
self._used.remove(id)
self._free.add(id)
elif fail_if_not_allocated:
raise ValueError("ID value to be freed is not currently "
"allocated: %d" % id) | zhmcclient | /zhmcclient-1.3.2.tar.gz/zhmcclient-1.3.2/zhmcclient_mock/_idpool.py | _idpool.py |
from __future__ import absolute_import
try:
from collections import OrderedDict
except ImportError:
from ordereddict import OrderedDict
import yaml
import yamlloader
import jsonschema
import zhmcclient
from zhmcclient._utils import datetime_from_isoformat
from ._hmc import FakedHmc, FakedMetricObjectValues
from ._urihandler import UriHandler, HTTPError, URIS
from ._urihandler import ConnectionError # pylint: disable=redefined-builtin
__all__ = ['FakedSession', 'HmcDefinitionYamlError', 'HmcDefinitionSchemaError']
# JSON schema for a faked HMC definition
FAKED_HMC_DEFINITION_SCHEMA = {
"$schema": "http://json-schema.org/draft-07/schema#",
"description": "JSON schema for a faked HMC definition",
"definitions": {
"Properties": {
"description": "Dictionary of resource properties. Keys are the "
"property names in HMC format (with dashes)",
"type": "object",
"patternProperties": {
"^[a-z0-9\\-]+$": {
"description": "A resource property value",
"type": ["object", "array", "string", "integer", "number",
"boolean", "null"],
},
},
},
"Hmc": {
"description": "The definition of a faked HMC",
"type": "object",
"additionalProperties": False,
"required": [
"host",
"api_version",
"consoles",
],
"properties": {
"host": {
"description": "The hostname or IP address of the HMC host",
"type": "string",
},
"api_version": {
"description": "The version of the HMC WS API, as "
"major.minor",
"type": "string",
},
"metric_values": {
"description": "The metric values prepared for later "
"retrieval",
"type": "array",
"items": {
"$ref": "#/definitions/MetricValues"
},
},
"metrics_contexts": {
"description": "The metrics contexts defined on this HMC",
"type": "array",
"items": {
"$ref": "#/definitions/MetricsContext"
},
},
"consoles": {
"description": "The consoles (HMCs). There is only "
"a single console.",
"type": "array",
"maxItems": 1,
"items": {
"$ref": "#/definitions/Console"
},
},
"cpcs": {
"description": "The CPCs managed by this HMC",
"type": "array",
"items": {
"$ref": "#/definitions/Cpc"
},
},
},
},
"MetricValues": {
"description": "The metric values of a single metric group for a "
"single resource object at a point in time, "
"prepared for later retrieval",
"type": "object",
"additionalProperties": False,
"required": [
"group_name",
"resource_uri",
"timestamp",
"metrics",
],
"properties": {
"group_name": {
"description": "Name of the metric group definition for "
"these metric values",
"type": "string",
},
"resource_uri": {
"description": "URI of the resource object for these "
"metric values",
"type": "string",
},
"timestamp": {
"description": "Point in time for these metric values, "
"as a string in ISO8601 format",
"type": "string",
},
"metrics": {
"description": "The metrics (values by name)",
"type": "object",
"patternProperties": {
"^[a-z0-9\\-]+$": {
"description": "The value of the metric",
"type": ["string", "integer", "number",
"boolean", "null"],
},
},
},
},
},
"MetricsContext": {
"description": "A metrics context defined on an HMC",
"type": "object",
"additionalProperties": False,
"required": [
"properties",
],
"properties": {
"properties": {
"$ref": "#/definitions/Properties"
},
},
},
"Console": {
"description": "A console (HMC)",
"type": "object",
"additionalProperties": False,
"required": [
"properties",
],
"properties": {
"properties": {
"$ref": "#/definitions/Properties"
},
"users": {
"description": "The users defined on this HMC",
"type": "array",
"items": {
"$ref": "#/definitions/User"
},
},
"user_roles": {
"description": "The user roles defined on this HMC",
"type": "array",
"items": {
"$ref": "#/definitions/UserRole"
},
},
"user_patterns": {
"description": "The user patterns defined on this HMC",
"type": "array",
"items": {
"$ref": "#/definitions/UserPattern"
},
},
"password_rules": {
"description": "The password rules defined on this HMC",
"type": "array",
"items": {
"$ref": "#/definitions/PasswordRule"
},
},
"tasks": {
"description": "The tasks defined on this HMC",
"type": "array",
"items": {
"$ref": "#/definitions/Task"
},
},
"ldap_server_definitions": {
"description": "The LDAP server definitions on this HMC",
"type": "array",
"items": {
"$ref": "#/definitions/LdapServerDefinition"
},
},
"unmanaged_cpcs": {
"description": "The unmanaged CPCs discovered by this HMC",
"type": "array",
"items": {
"$ref": "#/definitions/UnmanagedCpc"
},
},
"storage_groups": {
"description": "The storage groups defined on this HMC",
"type": "array",
"items": {
"$ref": "#/definitions/StorageGroup"
},
},
},
},
"User": {
"description": "A user defined on an HMC",
"type": "object",
"additionalProperties": False,
"required": [
"properties",
],
"properties": {
"properties": {
"$ref": "#/definitions/Properties"
},
},
},
"UserRole": {
"description": "A user role defined on an HMC",
"type": "object",
"additionalProperties": False,
"required": [
"properties",
],
"properties": {
"properties": {
"$ref": "#/definitions/Properties"
},
},
},
"UserPattern": {
"description": "A user pattern defined on an HMC",
"type": "object",
"additionalProperties": False,
"required": [
"properties",
],
"properties": {
"properties": {
"$ref": "#/definitions/Properties"
},
},
},
"PasswordRule": {
"description": "A password rule defined on an HMC",
"type": "object",
"additionalProperties": False,
"required": [
"properties",
],
"properties": {
"properties": {
"$ref": "#/definitions/Properties"
},
},
},
"Task": {
"description": "A task defined on an HMC",
"type": "object",
"additionalProperties": False,
"required": [
"properties",
],
"properties": {
"properties": {
"$ref": "#/definitions/Properties"
},
},
},
"LdapServerDefinition": {
"description": "An LPAP server definition on an HMC",
"type": "object",
"additionalProperties": False,
"required": [
"properties",
],
"properties": {
"properties": {
"$ref": "#/definitions/Properties"
},
},
},
"UnmanagedCpc": {
"description": "An unmanaged CPC discovered by an HMC",
"type": "object",
"additionalProperties": False,
"required": [
"properties",
],
"properties": {
"properties": {
"$ref": "#/definitions/Properties"
},
},
},
"StorageGroup": {
"description": "A storage group defined on an HMC (and associated "
"with a CPC)",
"type": "object",
"additionalProperties": False,
"required": [
"properties",
],
"properties": {
"properties": {
"$ref": "#/definitions/Properties"
},
"storage_volumes": {
"description": "The storage volumes of this storage group",
"type": "array",
"items": {
"$ref": "#/definitions/StorageVolume"
},
},
},
},
"StorageVolume": {
"description": "A storage volume of a storage group",
"type": "object",
"additionalProperties": False,
"required": [
"properties",
],
"properties": {
"properties": {
"$ref": "#/definitions/Properties"
},
},
},
"Cpc": {
"description": "A CPC managed by an HMC",
"type": "object",
"additionalProperties": False,
"required": [
"properties",
],
"properties": {
"properties": {
"$ref": "#/definitions/Properties"
},
"capacity_groups": {
"description": "The capacity groups of this CPC (any mode)",
"type": "array",
"items": {
"$ref": "#/definitions/CapacityGroup"
},
},
"partitions": {
"description": "The partitions of this CPC (DPM mode)",
"type": "array",
"items": {
"$ref": "#/definitions/Partition"
},
},
"adapters": {
"description": "The adapters of this CPC (DPM mode)",
"type": "array",
"items": {
"$ref": "#/definitions/Adapter"
},
},
"virtual_switches": {
"description": "The virtual switches of this CPC "
"(DPM mode)",
"type": "array",
"items": {
"$ref": "#/definitions/VirtualSwitch"
},
},
"lpars": {
"description": "The LPARs of this CPC (classic mode)",
"type": "array",
"items": {
"$ref": "#/definitions/Lpar"
},
},
"reset_activation_profiles": {
"description": "The reset activation profiles of this CPC "
"(classic mode)",
"type": "array",
"items": {
"$ref": "#/definitions/ResetActivationProfile"
},
},
"image_activation_profiles": {
"description": "The image activation profiles of this CPC "
"(classic mode)",
"type": "array",
"items": {
"$ref": "#/definitions/ImageActivationProfile"
},
},
"load_activation_profiles": {
"description": "The load activation profiles of this CPC "
"(classic mode)",
"type": "array",
"items": {
"$ref": "#/definitions/LoadActivationProfile"
},
},
},
},
"CapacityGroup": {
"description": "A capacity group in a CPC",
"type": "object",
"additionalProperties": False,
"required": [
"properties",
],
"properties": {
"properties": {
"$ref": "#/definitions/Properties"
},
},
},
"Partition": {
"description": "A partition of a CPC (DPM mode)",
"type": "object",
"additionalProperties": False,
"required": [
"properties",
],
"properties": {
"properties": {
"$ref": "#/definitions/Properties"
},
"devno_pool": {
"description": "Internal state: The pool of "
"auto-allocated device numbers for this "
"partition",
"type": "object",
"additionalProperties": True,
},
"wwpn_pool": {
"description": "Internal state: The pool of "
"auto-allocated WWPNs for this partition",
"type": "object",
"additionalProperties": True,
},
"nics": {
"description": "The NICs of this partition",
"type": "array",
"items": {
"$ref": "#/definitions/Nic"
},
},
"hbas": {
"description": "The HBAs of this partition (up to z13)",
"type": "array",
"items": {
"$ref": "#/definitions/Hba"
},
},
"virtual_functions": {
"description": "The virtual functions of this partition",
"type": "array",
"items": {
"$ref": "#/definitions/VirtualFunction"
},
},
},
},
"Nic": {
"description": "A NIC of a partition",
"type": "object",
"additionalProperties": False,
"required": [
"properties",
],
"properties": {
"properties": {
"$ref": "#/definitions/Properties"
},
},
},
"Hba": {
"description": "An HBA of a partition (up to z13)",
"type": "object",
"additionalProperties": False,
"required": [
"properties",
],
"properties": {
"properties": {
"$ref": "#/definitions/Properties"
},
},
},
"VirtualFunction": {
"description": "A virtual function of a partition",
"type": "object",
"additionalProperties": False,
"required": [
"properties",
],
"properties": {
"properties": {
"$ref": "#/definitions/Properties"
},
},
},
"Adapter": {
"description": "An adapter of a CPC (DPM mode)",
"type": "object",
"additionalProperties": False,
"required": [
"properties",
],
"properties": {
"properties": {
"$ref": "#/definitions/Properties"
},
"ports": {
"description": "The ports of this adapter",
"type": "array",
"items": {
"$ref": "#/definitions/Port"
},
},
},
},
"Port": {
"description": "A port of an adapter",
"type": "object",
"additionalProperties": False,
"required": [
"properties",
],
"properties": {
"properties": {
"$ref": "#/definitions/Properties"
},
},
},
"Lpar": {
"description": "An LPAR of a CPC (classic mode)",
"type": "object",
"additionalProperties": False,
"required": [
"properties",
],
"properties": {
"properties": {
"$ref": "#/definitions/Properties"
},
},
},
"VirtualSwitch": {
"description": "A virtual switch in a CPC (DPM mode)",
"type": "object",
"additionalProperties": False,
"required": [
"properties",
],
"properties": {
"properties": {
"$ref": "#/definitions/Properties"
},
},
},
"ResetActivationProfile": {
"description": "A reset activation profile of a CPC (classic mode)",
"type": "object",
"additionalProperties": False,
"required": [
"properties",
],
"properties": {
"properties": {
"$ref": "#/definitions/Properties"
},
},
},
"ImageActivationProfile": {
"description": "An image activation profile of a CPC "
"(classic mode)",
"type": "object",
"additionalProperties": False,
"required": [
"properties",
],
"properties": {
"properties": {
"$ref": "#/definitions/Properties"
},
},
},
"LoadActivationProfile": {
"description": "A load activation profile of a CPC (classic mode)",
"type": "object",
"additionalProperties": False,
"required": [
"properties",
],
"properties": {
"properties": {
"$ref": "#/definitions/Properties"
},
},
},
},
"type": "object",
"additionalProperties": False,
"required": [
"hmc_definition",
],
"properties": {
"hmc_definition": {
"$ref": "#/definitions/Hmc"
},
},
}
class HmcDefinitionYamlError(Exception):
"""
An error that is raised when loading an HMC definition and that indicates
invalid YAML syntax in the faked HMC definition, at the YAML scanner or
parser level.
``args[0]`` will be set to a message detailing the issue.
"""
def __init__(self, message):
# pylint: disable=useless-super-delegation
super(HmcDefinitionYamlError, self).__init__(message)
class HmcDefinitionSchemaError(Exception):
"""
An error that is raised when loading an HMC definition and that indicates
that the data in the faked HMC definition fails schema validation.
``args[0]`` will be set to a message detailing the issue.
"""
def __init__(self, message):
# pylint: disable=useless-super-delegation
super(HmcDefinitionSchemaError, self).__init__(message)
class FakedSession(zhmcclient.Session):
"""
A faked Session class for the zhmcclient package, that can be used as a
replacement for the :class:`zhmcclient.Session` class.
This class is derived from :class:`zhmcclient.Session`.
This class can be used by projects using the zhmcclient package for their
unit testing. It can also be used by unit tests of the zhmcclient package
itself.
This class provides a faked HMC with all of its resources that are relevant
for the zhmcclient.
The faked HMC provided by this class maintains its resource state in memory
as Python objects, and no communication happens to any real HMC. The
faked HMC implements all HMC operations that are relevant for the
zhmcclient package in a successful manner.
It is possible to populate the faked HMC with an initial resource state
(see :meth:`~zhmcclient_mock.FakedHmc.add_resources`).
"""
def __init__(self, host, hmc_name, hmc_version, api_version,
userid=None, password=None):
"""
Parameters:
host (:term:`string`):
HMC host the mocked HMC will be set up with.
hmc_name (:term:`string`):
HMC name. Used for result of Query Version Info operation.
hmc_version (:term:`string`):
HMC version string (e.g. '2.13.1'). Used for result of
Query Version Info operation.
api_version (:term:`string`):
HMC API version string (e.g. '1.8'). Used for result of
Query Version Info operation.
userid (:term:`string`):
HMC userid for logging in to the mocked HMC.
password (:term:`string`):
HMC password for logging in to the mocked HMC.
"""
super(FakedSession, self).__init__(
host, userid=userid, password=password)
self._hmc = FakedHmc(hmc_name, hmc_version, api_version)
self._urihandler = UriHandler(URIS)
self._object_topic = 'faked-notification-topic'
self._job_topic = 'faked-job-notification-topic'
def __repr__(self):
"""
Return a string with the state of this faked session, for debug
purposes.
"""
ret = (
"{classname} at 0x{id:08x} (\n"
" _host = {s._host!r}\n"
" _userid = {s._userid!r}\n"
" _password = '...'\n"
" _get_password = {s._get_password!r}\n"
" _retry_timeout_config = {s._retry_timeout_config!r}\n"
" _base_url = {s._base_url!r}\n"
" _headers = {s._headers!r}\n"
" _session_id = {s._session_id!r}\n"
" _session = {s._session!r}\n"
" _hmc = {hmc_classname} at 0x{hmc_id:08x}\n"
" _urihandler = {s._urihandler!r}\n"
")".format(
classname=self.__class__.__name__,
id=id(self),
hmc_classname=self._hmc.__class__.__name__,
hmc_id=id(self._hmc),
s=self))
return ret
@property
def hmc(self):
"""
:class:`~zhmcclient_mock.FakedHmc`: The faked HMC provided by this
faked session.
The faked HMC supports being populated with initial resource state,
for example using its :meth:`zhmcclient_mock.FakedHmc.add_resources`
method.
As an alternative to providing an entire resource tree, the resources
can also be added one by one, from top to bottom, using the
:meth:`zhmcclient_mock.FakedBaseManager.add` methods of the
respective managers (the top-level manager for CPCs can be accessed
via ``hmc.cpcs``).
"""
return self._hmc
@staticmethod
def from_hmc_yaml_file(filepath, userid=None, password=None):
"""
Return a new FakedSession object from an HMC definition in a YAML file.
The data format of the YAML file is validated using a schema.
Parameters:
filepath(:term:`string`): Path name of the YAML file that contains
the HMC definition.
userid (:term:`string`):
Userid of the HMC user to be used for logging in, or `None`.
password (:term:`string`):
Password of the HMC user if `userid` was specified, or `None`.
Returns:
FakedSession: New faked session with faked HMC set up from HMC
definition.
Raises:
IOError: Error opening the YAML file for reading.
YamlFormatError: Invalid YAML syntax in HMC definition.
HmcDefinitionSchemaError: Invalid data format in HMC definition.
"""
# pylint: disable=unspecified-encoding
with open(filepath) as fp:
hmc = FakedSession.from_hmc_yaml(fp, filepath, userid, password)
return hmc
@staticmethod
def from_hmc_yaml(hmc_yaml, filepath=None, userid=None, password=None):
"""
Return a new FakedSession object from an HMC definition YAML string
or stream.
An HMC definition YAML string can be created using
:meth:`zhmcclient.Client.to_hmc_yaml`.
The timestamp in metric values can have any valid ISO8601 format.
Timezone-naive values are amended with the local timezone.
The data format of the YAML string is validated using a schema.
Parameters:
hmc_yaml(string or stream): HMC definition YAML string or stream.
filepath(string): Path name of the YAML file that contains the HMC
definition; used only in exception messages. If `None`, no
filename is used in exception messages.
userid (:term:`string`):
Userid of the HMC user to be used for logging in, or `None`.
password (:term:`string`):
Password of the HMC user if `userid` was specified, or `None`.
Returns:
FakedSession: New faked session with faked HMC set up from HMC
definition.
Raises:
YamlFormatError: Invalid YAML syntax in HMC definition YAML string
or stream.
HmcDefinitionSchemaError: Invalid data format in HMC definition.
"""
try:
hmc_dict = yaml.load(hmc_yaml, Loader=yamlloader.ordereddict.Loader)
except (yaml.parser.ParserError, yaml.scanner.ScannerError) as exc:
if filepath:
file_str = " in file {f}".format(f=filepath)
else:
file_str = ""
new_exc = HmcDefinitionYamlError(
"Invalid YAML syntax in faked HMC definition{fs}: {msg}".
format(fs=file_str, msg=exc))
new_exc.__cause__ = None
raise new_exc # HmcDefinitionYamlError
hmc = FakedSession.from_hmc_dict(hmc_dict, filepath, userid, password)
return hmc
@staticmethod
def from_hmc_dict(hmc_dict, filepath=None, userid=None, password=None):
"""
Return a new FakedSession object from an HMC definition dictionary.
An HMC definition dictionary can be created using
:meth:`zhmcclient.Client.to_hmc_dict`.
The timestamp in metric values can have any valid ISO8601 format.
Timezone-naive values are amended with the local timezone.
The data format of the YAML string is validated using a schema.
Parameters:
hmc_dict(dict): HMC definition dictionary.
filepath(string): Path name of the YAML file that contains the HMC
definition; used only in exception messages. If `None`, no
filename is used in exception messages.
userid (:term:`string`):
Userid of the HMC user to be used for logging in, or `None`.
password (:term:`string`):
Password of the HMC user if `userid` was specified, or `None`.
Returns:
FakedSession: New faked session with faked HMC set up from the HMC
definition.
Raises:
HmcDefinitionSchemaError: Invalid data format in HMC definition.
"""
try:
jsonschema.validate(hmc_dict, FAKED_HMC_DEFINITION_SCHEMA)
except jsonschema.exceptions.ValidationError as exc:
if filepath:
file_str = " in file {f}".format(f=filepath)
else:
file_str = ""
new_exc = HmcDefinitionSchemaError(
"Invalid data format in faked HMC definition{fs}: {msg}; "
"Offending element: {elem}; "
"Schema item: {schemaitem}; "
"Validator: {valname}={valvalue}".
format(fs=file_str, msg=exc.message,
elem='.'.join(str(e) for e in exc.absolute_path),
schemaitem='.'.join(str(e) for e in
exc.absolute_schema_path),
valname=exc.validator,
valvalue=exc.validator_value))
new_exc.__cause__ = None
raise new_exc # HmcDefinitionSchemaError
hmc_res_dict = hmc_dict['hmc_definition']
consoles = hmc_res_dict.get('consoles')
console = consoles[0]
host = hmc_res_dict['host']
api_version = hmc_res_dict['api_version']
hmc_name = console['properties']['name']
hmc_version = console['properties']['version']
session = FakedSession(host, hmc_name, hmc_version, api_version,
userid=userid, password=password)
res_dict = OrderedDict()
res_dict['consoles'] = consoles
cpcs = hmc_res_dict.get('cpcs')
if cpcs:
res_dict['cpcs'] = cpcs
metrics_contexts = hmc_res_dict.get('metrics_contexts')
if metrics_contexts:
res_dict['metrics_contexts'] = metrics_contexts
session.hmc.add_resources(res_dict)
mv_dicts = hmc_res_dict.get('metric_values')
if mv_dicts:
for mv_dict in mv_dicts:
group_name = mv_dict['group_name']
resource_uri = mv_dict['resource_uri']
timestamp = datetime_from_isoformat(mv_dict['timestamp'])
values = []
for name, value in mv_dict['metrics'].items():
item_tup = (name, value)
values.append(item_tup)
mv = FakedMetricObjectValues(
group_name=group_name,
resource_uri=resource_uri,
timestamp=timestamp,
values=values)
session.hmc.add_metric_values(mv)
return session
def get(self, uri, logon_required=True):
"""
Perform the HTTP GET method against the resource identified by a URI,
on the faked HMC.
Parameters:
uri (:term:`string`):
Relative URI path of the resource, e.g. "/api/session".
This URI is relative to the base URL of the session (see
the :attr:`~zhmcclient.Session.base_url` property).
Must not be `None`.
logon_required (bool):
Boolean indicating whether the operation requires that the session
is logged on to the HMC.
Because this is a faked HMC, this does not perform a real logon,
but it is still used to update the state in the faked HMC.
Returns:
:term:`json object` with the operation result.
Raises:
:exc:`~zhmcclient.HTTPError`
:exc:`~zhmcclient.ParseError` (not implemented)
:exc:`~zhmcclient.AuthError` (not implemented)
:exc:`~zhmcclient.ConnectionError`
"""
try:
return self._urihandler.get(self._hmc, uri, logon_required)
except HTTPError as exc:
new_exc = zhmcclient.HTTPError(exc.response())
new_exc.__cause__ = None
raise new_exc # zhmcclient.HTTPError
except ConnectionError as exc:
new_exc = zhmcclient.ConnectionError(exc.message, None)
new_exc.__cause__ = None
raise new_exc # zhmcclient.ConnectionError
def post(self, uri, body=None, logon_required=True,
wait_for_completion=True, operation_timeout=None):
"""
Perform the HTTP POST method against the resource identified by a URI,
using a provided request body, on the faked HMC.
HMC operations using HTTP POST are either synchronous or asynchronous.
Asynchronous operations return the URI of an asynchronously executing
job that can be queried for status and result.
Examples for synchronous operations:
* With no response body: "Logon", "Update CPC Properties"
* With a response body: "Create Partition"
Examples for asynchronous operations:
* With no ``job-results`` field in the completed job status response:
"Start Partition"
* With a ``job-results`` field in the completed job status response
(under certain conditions): "Activate a Blade", or "Set CPC Power
Save"
The `wait_for_completion` parameter of this method can be used to deal
with asynchronous HMC operations in a synchronous way.
Parameters:
uri (:term:`string`):
Relative URI path of the resource, e.g. "/api/session".
This URI is relative to the base URL of the session (see the
:attr:`~zhmcclient.Session.base_url` property).
Must not be `None`.
body (:term:`json object`):
JSON object to be used as the HTTP request body (payload).
`None` means the same as an empty dictionary, namely that no HTTP
body is included in the request.
logon_required (bool):
Boolean indicating whether the operation requires that the session
is logged on to the HMC. For example, the "Logon" operation does
not require that.
Because this is a faked HMC, this does not perform a real logon,
but it is still used to update the state in the faked HMC.
wait_for_completion (bool):
Boolean controlling whether this method should wait for completion
of the requested HMC operation, as follows:
* If `True`, this method will wait for completion of the requested
operation, regardless of whether the operation is synchronous or
asynchronous.
This will cause an additional entry in the time statistics to be
created for the asynchronous operation and waiting for its
completion. This entry will have a URI that is the targeted URI,
appended with "+completion".
* If `False`, this method will immediately return the result of the
HTTP POST method, regardless of whether the operation is
synchronous or asynchronous.
operation_timeout (:term:`number`):
Timeout in seconds, when waiting for completion of an asynchronous
operation. The special value 0 means that no timeout is set. `None`
means that the default async operation timeout of the session is
used.
For `wait_for_completion=True`, a
:exc:`~zhmcclient.OperationTimeout` is raised when the timeout
expires.
For `wait_for_completion=False`, this parameter has no effect.
Returns:
:term:`json object`:
If `wait_for_completion` is `True`, returns a JSON object
representing the response body of the synchronous operation, or the
response body of the completed job that performed the asynchronous
operation. If a synchronous operation has no response body, `None`
is returned.
If `wait_for_completion` is `False`, returns a JSON object
representing the response body of the synchronous or asynchronous
operation. In case of an asynchronous operation, the JSON object
will have a member named ``job-uri``, whose value can be used with
the :meth:`~zhmcclient.Session.query_job_status` method to
determine the status of the job and the result of the original
operation, once the job has completed.
See the section in the :term:`HMC API` book about the specific HMC
operation and about the 'Query Job Status' operation, for a
description of the members of the returned JSON objects.
Raises:
:exc:`~zhmcclient.HTTPError`
:exc:`~zhmcclient.ParseError` (not implemented)
:exc:`~zhmcclient.AuthError` (not implemented)
:exc:`~zhmcclient.ConnectionError`
"""
try:
return self._urihandler.post(self._hmc, uri, body, logon_required,
wait_for_completion)
except HTTPError as exc:
new_exc = zhmcclient.HTTPError(exc.response())
new_exc.__cause__ = None
raise new_exc # zhmcclient.HTTPError
except ConnectionError as exc:
new_exc = zhmcclient.ConnectionError(exc.message, None)
new_exc.__cause__ = None
raise new_exc # zhmcclient.ConnectionError
def delete(self, uri, logon_required=True):
"""
Perform the HTTP DELETE method against the resource identified by a
URI, on the faked HMC.
Parameters:
uri (:term:`string`):
Relative URI path of the resource, e.g.
"/api/session/{session-id}".
This URI is relative to the base URL of the session (see
the :attr:`~zhmcclient.Session.base_url` property).
Must not be `None`.
logon_required (bool):
Boolean indicating whether the operation requires that the session
is logged on to the HMC. For example, for the logoff operation, it
does not make sense to first log on.
Because this is a faked HMC, this does not perform a real logon,
but it is still used to update the state in the faked HMC.
Raises:
:exc:`~zhmcclient.HTTPError`
:exc:`~zhmcclient.ParseError` (not implemented)
:exc:`~zhmcclient.AuthError` (not implemented)
:exc:`~zhmcclient.ConnectionError`
"""
try:
self._urihandler.delete(self._hmc, uri, logon_required)
except HTTPError as exc:
new_exc = zhmcclient.HTTPError(exc.response())
new_exc.__cause__ = None
raise new_exc # zhmcclient.HTTPError
except ConnectionError as exc:
new_exc = zhmcclient.ConnectionError(exc.message, None)
new_exc.__cause__ = None
raise new_exc # zhmcclient.ConnectionError | zhmcclient | /zhmcclient-1.3.2.tar.gz/zhmcclient-1.3.2/zhmcclient_mock/_session.py | _session.py |
from __future__ import absolute_import
try:
from collections import OrderedDict
except ImportError:
from ordereddict import OrderedDict
import re
import copy
from dateutil import tz
import six
from immutable_views import DictView
from zhmcclient._utils import repr_dict, repr_manager, repr_list, \
timestamp_from_datetime
from ._idpool import IdPool
__all__ = ['InputError',
'FakedBaseResource', 'FakedBaseManager', 'FakedHmc',
'FakedConsoleManager', 'FakedConsole',
'FakedUserManager', 'FakedUser',
'FakedUserRoleManager', 'FakedUserRole',
'FakedUserPatternManager', 'FakedUserPattern',
'FakedPasswordRuleManager', 'FakedPasswordRule',
'FakedTaskManager', 'FakedTask',
'FakedLdapServerDefinitionManager', 'FakedLdapServerDefinition',
'FakedActivationProfileManager', 'FakedActivationProfile',
'FakedAdapterManager', 'FakedAdapter',
'FakedCpcManager', 'FakedCpc',
'FakedUnmanagedCpcManager', 'FakedUnmanagedCpc',
'FakedHbaManager', 'FakedHba',
'FakedLparManager', 'FakedLpar',
'FakedNicManager', 'FakedNic',
'FakedPartitionManager', 'FakedPartition',
'FakedPortManager', 'FakedPort',
'FakedVirtualFunctionManager', 'FakedVirtualFunction',
'FakedVirtualSwitchManager', 'FakedVirtualSwitch',
'FakedStorageGroupManager', 'FakedStorageGroup',
'FakedMetricsContextManager', 'FakedMetricsContext',
'FakedMetricGroupDefinition', 'FakedMetricObjectValues',
'FakedCapacityGroupManager', 'FakedCapacityGroup',
]
# All currently defined metric groups with their metrics.
# This reflects system generations up to z15.
# Each item is a dict with:
# - name: Name of the metric group.
# - resource_class: Class string of applicable resource type
# - cpc_dpm_enabled: For CPC metrics, the operational mode (True=DPM,
# False=classic, None=any or N/A) in which the metric group is available.
# - metrics: List of metrics, as tuple(metric-name, metric-type).
METRIC_GROUPS = [
dict(
name='channel-usage',
resource_class='cpc',
cpc_dpm_enabled=False,
metrics=[
('channel-name', 'string-metric'),
('shared-channel', 'boolean-metric'),
('logical-partition-name', 'string-metric'),
('channel-usage', 'integer-metric'),
]),
dict(
name='cpc-usage-overview',
resource_class='cpc',
cpc_dpm_enabled=False,
metrics=[
('cpc-processor-usage', 'integer-metric'),
('channel-usage', 'integer-metric'),
('power-consumption-watts', 'integer-metric'),
('temperature-celsius', 'double-metric'),
('cp-shared-processor-usage', 'integer-metric'),
('cp-dedicated-processor-usage', 'integer-metric'),
('ifl-shared-processor-usage', 'integer-metric'),
('ifl-dedicated-processor-usage', 'integer-metric'),
('icf-shared-processor-usage', 'integer-metric'),
('icf-dedicated-processor-usage', 'integer-metric'),
('iip-shared-processor-usage', 'integer-metric'),
('iip-dedicated-processor-usage', 'integer-metric'),
('aap-shared-processor-usage', 'integer-metric'),
('aap-dedicated-processor-usage', 'integer-metric'),
('all-shared-processor-usage', 'integer-metric'),
('all-dedicated-processor-usage', 'integer-metric'),
('cp-all-processor-usage', 'integer-metric'),
('ifl-all-processor-usage', 'integer-metric'),
('icf-all-processor-usage', 'integer-metric'),
('iip-all-processor-usage', 'integer-metric'),
('cbp-shared-processor-usage', 'integer-metric'),
('cbp-dedicated-processor-usage', 'integer-metric'),
('cbp-all-processor-usage', 'integer-metric'),
]),
dict(
name='dpm-system-usage-overview',
resource_class='cpc',
cpc_dpm_enabled=True,
metrics=[
('processor-usage', 'integer-metric'),
('network-usage', 'integer-metric'),
('storage-usage', 'integer-metric'),
('accelerator-usage', 'integer-metric'),
('crypto-usage', 'integer-metric'),
('power-consumption-watts', 'integer-metric'),
('temperature-celsius', 'double-metric'),
('cp-shared-processor-usage', 'integer-metric'),
('cp-dedicated-processor-usage', 'integer-metric'),
('ifl-shared-processor-usage', 'integer-metric'),
('ifl-dedicated-processor-usage', 'integer-metric'),
]),
dict(
name='logical-partition-usage',
resource_class='logical-partition',
cpc_dpm_enabled=False,
metrics=[
('processor-usage', 'integer-metric'),
('zvm-paging-rate', 'integer-metric'),
('cp-processor-usage', 'integer-metric'),
('ifl-processor-usage', 'integer-metric'),
('icf-processor-usage', 'integer-metric'),
('iip-processor-usage', 'integer-metric'),
('cbp-processor-usage', 'integer-metric'),
]),
dict(
name='partition-usage',
resource_class='partition',
cpc_dpm_enabled=True,
metrics=[
('processor-usage', 'integer-metric'),
('network-usage', 'integer-metric'),
('storage-usage', 'integer-metric'),
('accelerator-usage', 'integer-metric'),
('crypto-usage', 'integer-metric'),
]),
dict(
name='zcpc-environmentals-and-power',
resource_class='cpc',
cpc_dpm_enabled=None,
metrics=[
('temperature-celsius', 'double-metric'),
('humidity', 'integer-metric'),
('dew-point-celsius', 'double-metric'),
('power-consumption-watts', 'integer-metric'),
('heat-load', 'integer-metric'),
('heat-load-forced-air', 'integer-metric'),
('heat-load-water', 'integer-metric'),
('exhaust-temperature-celsius', 'double-metric'),
]),
dict(
name='"environmental-power-status',
resource_class='cpc',
cpc_dpm_enabled=None,
metrics=[
('linecord-one-name', 'string-metric'),
('linecord-one-power-phase-A', 'integer-metric'),
('linecord-one-power-phase-B', 'integer-metric'),
('linecord-one-power-phase-C', 'integer-metric'),
('linecord-two-name', 'string-metric'),
('linecord-two-power-phase-A', 'integer-metric'),
('linecord-two-power-phase-B', 'integer-metric'),
('linecord-two-power-phase-C', 'integer-metric'),
('linecord-three-name', 'string-metric'),
('linecord-three-power-phase-A', 'integer-metric'),
('linecord-three-power-phase-B', 'integer-metric'),
('linecord-three-power-phase-C', 'integer-metric'),
('linecord-four-name', 'string-metric'),
('linecord-four-power-phase-A', 'integer-metric'),
('linecord-four-power-phase-B', 'integer-metric'),
('linecord-four-power-phase-C', 'integer-metric'),
('linecord-five-name', 'string-metric'),
('linecord-five-power-phase-A', 'integer-metric'),
('linecord-five-power-phase-B', 'integer-metric'),
('linecord-five-power-phase-C', 'integer-metric'),
('linecord-six-name', 'string-metric'),
('linecord-six-power-phase-A', 'integer-metric'),
('linecord-six-power-phase-B', 'integer-metric'),
('linecord-six-power-phase-C', 'integer-metric'),
('linecord-seven-name', 'string-metric'),
('linecord-seven-power-phase-A', 'integer-metric'),
('linecord-seven-power-phase-B', 'integer-metric'),
('linecord-seven-power-phase-C', 'integer-metric'),
('linecord-eight-name', 'string-metric'),
('linecord-eight-power-phase-A', 'integer-metric'),
('linecord-eight-power-phase-B', 'integer-metric'),
('linecord-eight-power-phase-C', 'integer-metric'),
]),
dict(
name='zcpc-processor-usage',
resource_class='cpc',
cpc_dpm_enabled=None,
metrics=[
('processor-name', 'string-metric'),
('processor-type', 'string-metric'),
('processor-usage', 'integer-metric'),
('smt-usage', 'integer-metric'),
('thread0-usage', 'integer-metric'),
('thread1-usage', 'integer-metric'),
]),
dict(
name='crypto-usage',
resource_class='cpc',
cpc_dpm_enabled=False,
metrics=[
('channel-id', 'string-metric'),
('crypto-id', 'string-metric'),
('adapter-usage', 'integer-metric'),
]),
dict(
name='adapter-usage',
resource_class='adapter',
cpc_dpm_enabled=False,
metrics=[
('adapter-usage', 'integer-metric'),
]),
dict(
name='flash-memory-usage',
resource_class='cpc',
cpc_dpm_enabled=False,
metrics=[
('channel-id', 'string-metric'),
('adapter-usage', 'integer-metric'),
]),
dict(
name='roce-usage',
resource_class='cpc',
cpc_dpm_enabled=False,
metrics=[
('channel-id', 'string-metric'),
('adapter-usage', 'integer-metric'),
]),
dict(
name='network-physical-adapter-port',
resource_class='adapter',
cpc_dpm_enabled=True,
metrics=[
('network-port-id', 'integer-metric'),
('bytes-sent', 'long-metric'),
('bytes-received', 'long-metric'),
('packets-sent', 'long-metric'),
('packets-received', 'long-metric'),
('packets-sent-dropped', 'long-metric'),
('packets-received-dropped', 'long-metric'),
('packets-sent-discarded', 'long-metric'),
('packets-received-discarded', 'long-metric'),
('multicast-packets-sent', 'long-metric'),
('multicast-packets-received', 'long-metric'),
('broadcast-packets-sent', 'long-metric'),
('broadcast-packets-received', 'long-metric'),
('interval-bytes-sent', 'long-metric'),
('interval-bytes-received', 'long-metric'),
('bytes-per-second-sent', 'long-metric'),
('bytes-per-second-received', 'long-metric'),
('utilization', 'long-metric'),
('mac-address', 'string-metric'),
('flags', 'long-metric'),
]),
dict(
name='partition-attached-network-interface',
resource_class='nic',
cpc_dpm_enabled=True,
metrics=[
('partition-id', 'string-metric'),
('bytes-sent', 'long-metric'),
('bytes-received', 'long-metric'),
('packets-sent', 'long-metric'),
('packets-received', 'long-metric'),
('packets-sent-dropped', 'long-metric'),
('packets-received-dropped', 'long-metric'),
('packets-sent-discarded', 'long-metric'),
('packets-received-discarded', 'long-metric'),
('multicast-packets-sent', 'long-metric'),
('multicast-packets-received', 'long-metric'),
('broadcast-packets-sent', 'long-metric'),
('broadcast-packets-received', 'long-metric'),
('interval-bytes-sent', 'long-metric'),
('interval-bytes-received', 'long-metric'),
('bytes-per-second-sent', 'long-metric'),
('bytes-per-second-received', 'long-metric'),
('flags', 'long-metric'),
]),
]
class InputError(Exception):
"""
An error that is raised by the faked resource classes and indicates
that the input is invalid in some way.
``args[0]`` will be set to a message detailing the issue.
"""
def __init__(self, message):
# pylint: disable=useless-super-delegation
super(InputError, self).__init__(message)
class FakedBaseResource(object):
"""
A base class for faked resource classes in the faked HMC.
"""
def __init__(self, manager, properties):
self._manager = manager # May be None
if properties is not None:
self._properties = copy.deepcopy(properties)
else:
self._properties = {}
if self.manager:
if self.manager.oid_prop is None:
self._oid = None
else:
if self.manager.oid_prop not in self._properties:
new_oid = self.manager._new_oid()
self._properties[self.manager.oid_prop] = new_oid
self._oid = self._properties[self.manager.oid_prop]
if self.manager.uri_prop not in self._properties:
new_uri = self.manager.base_uri
if self.oid is not None:
new_uri += '/' + self.oid
self._properties[self.manager.uri_prop] = new_uri
self._uri = self._properties[self.manager.uri_prop]
if self.manager.class_value:
if 'class' not in self._properties:
self._properties['class'] = self.manager.class_value
if self.manager.parent:
if 'parent' not in self._properties:
self._properties['parent'] = self.manager.parent.uri
else:
self._oid = None
self._uri = None
def __repr__(self):
"""
Return a string with the state of this faked resource, for debug
purposes.
Note that the derived faked resource classes that have child resources
have their own __repr__() methods, because only they know which child
resources they have.
"""
ret = (
"{classname} at 0x{id:08x} (\n"
" _manager = {_manager_classname} at 0x{_manager_id:08x}\n"
" _oid = {_oid!r}\n"
" _uri = {_uri!r}\n"
" _properties = {_properties}\n"
")".format(
classname=self.__class__.__name__,
id=id(self),
_manager_classname=self._manager.__class__.__name__,
_manager_id=id(self._manager),
_oid=self._oid,
_uri=self._uri,
_properties=repr_dict(self._properties, indent=2),
))
return ret
@property
def manager(self):
"""
The manager for this resource (a derived class of
:class:`~zhmcclient_mock.FakedBaseManager`).
"""
return self._manager
@property
def properties(self):
"""
The properties of this resource (a dictionary).
"""
return self._properties
@property
def oid(self):
"""
The object ID (property 'object-id' or 'element-id') of this resource.
"""
return self._oid
@property
def uri(self):
"""
The object URI (property 'object-uri' or 'element-uri') of this
resource.
"""
return self._uri
@property
def name(self):
"""
The name (property 'name') of this resource.
Raises:
:exc:`KeyError`: Resource does not have a 'name' property.
"""
return self._properties['name']
def update(self, properties):
"""
update the properties of this resource.
Parameters:
properties (dict):
Resource properties to be updated. Any other properties remain
unchanged.
"""
self._properties.update(properties)
def add_resources(self, resource_dict):
"""
Add faked child resources to this resource, from the provided resource
definition.
Duplicate resource names in the same scope are not permitted.
Although this method is typically used to initially load the faked
HMC with resource state just once, it can be invoked multiple times
and can also be invoked on faked resources (e.g. on a faked CPC).
Parameters:
resource_dict (dict):
Resource definition of faked child resources to be added.
For an explanation of how the resource dictionary is set up,
see the examples below.
For requirements on and auto-generation of certain resource
properties, see the ``add()`` methods of the various faked resource
managers (e.g. :meth:`zhmcclient_mock.FakedCpcManager.add`). For
example, the object-id or element-id properties and the
corresponding uri properties are always auto-generated.
The resource dictionary specifies a tree of resource managers and
resources, in an alternating manner. It starts with the resource
managers of child resources of the target resource, which contains
a list of those child resources. For an HMC, the CPCs managed by
the HMC would be its child resources.
Each resource specifies its own properties (``properties`` key)
and the resource managers for its child resources. For example, the
CPC resource specifies its adapter child resources using the
``adapters`` key. The keys for the child resource managers are the
attribute names of these resource managers in the parent resource.
For example, the ``adapters`` key is named after the
:attr:`zhmcclient.Cpc.adapters` attribute (which has the same name
as in its corresponding faked CPC resource:
:attr:`zhmcclient_mock.FakedCpc.adapters`).
Raises:
:exc:`zhmcclient_mock.InputError`: Some issue with the input
resources.
Examples:
Example for targeting a faked HMC for adding a CPC with one adapter::
resource_dict = {
'cpcs': [ # name of manager attribute for this resource
{
'properties': {
'name': 'cpc_1',
},
'adapters': [ # name of manager attribute for this
# resource
{
'properties': {
'object-id': '12',
'name': 'ad_1',
},
'ports': [
{
'properties': {
'name': 'port_1',
}
},
],
},
],
},
],
}
Example for targeting a faked CPC for adding an LPAR and a load
activation profile::
resource_dict = {
'lpars': [ # name of manager attribute for this resource
{
'properties': {
# object-id is not provided -> auto-generated
# object-uri is not provided -> auto-generated
'name': 'lpar_1',
},
},
],
'load_activation_profiles': [ # name of manager attribute
{
'properties': {
# object-id is not provided -> auto-generated
# object-uri is not provided -> auto-generated
'name': 'lpar_1',
},
},
],
}
"""
for child_attr in resource_dict:
child_list = resource_dict[child_attr]
self._process_child_list(self, child_attr, child_list)
def _process_child_list(self, parent_resource, child_attr, child_list):
"""
Add properties of child resources.
"""
child_manager = getattr(parent_resource, child_attr, None)
if child_manager is None:
# The attribute is internal state
return
for child_dict in child_list:
# child_dict is a dict of 'properties' and grand child resources
properties = child_dict.get('properties', None)
if properties is None:
raise InputError("A resource for resource type {} has no"
"properties specified.".format(child_attr))
child_resource = child_manager.add(properties)
for grandchild_attr in child_dict:
if grandchild_attr == 'properties':
continue
grandchild_list = child_dict[grandchild_attr]
self._process_child_list(child_resource, grandchild_attr,
grandchild_list)
class FakedBaseManager(object):
"""
A base class for manager classes for faked resources in the faked HMC.
"""
api_root = '/api' # root of all resource URIs
next_oid = 1 # next object ID, for auto-generating them
def __init__(self, hmc, parent, resource_class, base_uri, oid_prop,
uri_prop, class_value, name_prop,
case_insensitive_names=False):
self._hmc = hmc
self._parent = parent
self._resource_class = resource_class
self._base_uri = base_uri # Base URI for resources of this type
self._oid_prop = oid_prop
self._uri_prop = uri_prop
self._class_value = class_value
self._name_prop = name_prop
self._case_insensitive_names = case_insensitive_names
# List of Faked{Resource} objects in this faked manager, by object ID
self._resources = OrderedDict()
def __repr__(self):
"""
Return a string with the state of this faked manager, for debug
purposes.
"""
ret = (
"{classname} at 0x{id:08x} (\n"
" _hmc = {_hmc_classname} at 0x{_hmc_id:08x}\n"
" _parent = {_parent_classname} at 0x{_parent_id:08x}\n"
" _resource_class = {_resource_class!r}\n"
" _base_uri = {_base_uri!r}\n"
" _oid_prop = {_oid_prop!r}\n"
" _uri_prop = {_uri_prop!r}\n"
" _class_value = {_class_value!r}\n"
" _name_prop = {_name_prop!r}\n"
" _case_insensitive_names = {_case_insensitive_names}\n"
" _resources = {_resources}\n"
")".format(
classname=self.__class__.__name__,
id=id(self),
_hmc_classname=self._hmc.__class__.__name__,
_hmc_id=id(self._hmc),
_parent_classname=self._parent.__class__.__name__,
_parent_id=id(self._parent),
_resource_class=self._resource_class,
_base_uri=self._base_uri,
_oid_prop=self._oid_prop,
_uri_prop=self._uri_prop,
_class_value=self._class_value,
_name_prop=self._name_prop,
_case_insensitive_names=self._case_insensitive_names,
_resources=repr_dict(self._resources, indent=2),
))
return ret
def _matches_filters(self, obj, filter_args):
"""
Return a boolean indicating whether a faked resource object matches a
set of filter arguments.
This is used for implementing filtering in the faked resource managers.
Parameters:
obj (FakedBaseResource):
Resource object.
filter_args (dict):
Filter arguments. For details, see :ref:`Filtering`.
`None` causes the resource to always match.
Returns:
bool: Boolean indicating whether the resource object matches the
filter arguments.
"""
if filter_args is not None:
for prop_name in filter_args:
prop_match = filter_args[prop_name]
if prop_name == self._name_prop:
case_insensitive = self._case_insensitive_names
else:
case_insensitive = False
if not self._matches_prop(
obj, prop_name, prop_match, case_insensitive):
return False
return True
def _matches_prop(self, obj, prop_name, prop_match, case_insensitive):
"""
Return a boolean indicating whether a faked resource object matches
with a single property against a property match value.
This is used for implementing filtering in the faked resource managers.
Parameters:
obj (FakedBaseResource):
Resource object.
prop_match:
Property match value that is used to match the actual value of
the specified property against, as follows:
- If the match value is a list or tuple, this method is invoked
recursively to find whether one or more match values in the list
match.
- Else if the property is of string type, its value is matched by
interpreting the match value as a regular expression.
- Else the property value is matched by exact value comparison
with the match value.
case_insensitive (bool):
Controls whether the values of string typed properties are matched
case insensitively.
Returns:
bool: Boolean indicating whether the resource object matches w.r.t.
the specified property and the match value.
"""
if isinstance(prop_match, (list, tuple)):
# List items are logically ORed, so one matching item suffices.
for pm in prop_match:
if self._matches_prop(obj, prop_name, pm, case_insensitive):
return True
else:
# Some lists of resources do not have all properties, for example
# Hipersocket adapters do not have a "card-location" property.
# If a filter property does not exist on a resource, the resource
# does not match.
if prop_name not in obj.properties:
return False
prop_value = obj.properties[prop_name]
if isinstance(prop_value, six.string_types):
# HMC resource property is Enum String or (non-enum) String,
# and is both matched by regexp matching. Ideally, regexp
# matching should only be done for non-enum strings, but
# distinguishing them is not possible given that the client
# has no knowledge about the properties.
# The regexp matching implemented in the HMC requires begin and
# end of the string value to match, even if the '^' for begin
# and '$' for end are not specified in the pattern. The code
# here is consistent with that: We add end matching to the
# pattern, and begin matching is done by re.match()
# automatically.
re_match = prop_match + '$'
re_flags = re.IGNORECASE if case_insensitive else 0
m = re.match(re_match, prop_value, flags=re_flags)
if m:
return True
else:
if prop_value == prop_match:
return True
return False
@property
def hmc(self):
"""
The faked HMC this manager is part of (an object of
:class:`~zhmcclient_mock.FakedHmc`).
"""
return self._hmc
@property
def parent(self):
"""
The parent (scoping resource) for this manager (an object of a derived
class of :class:`~zhmcclient_mock.FakedBaseResource`).
"""
return self._parent
@property
def resource_class(self):
"""
The resource class managed by this manager (a derived class of
:class:`~zhmcclient_mock.FakedBaseResource`).
"""
return self._resource_class
@property
def base_uri(self):
"""
The base URI for URIs of resources managed by this manager.
"""
return self._base_uri
@property
def oid_prop(self):
"""
The name of the resource property for the object ID ('object-id' or
'element-id' or 'name').
"""
return self._oid_prop
@property
def uri_prop(self):
"""
The name of the resource property for the object URI ('object-uri' or
'element-uri').
"""
return self._uri_prop
@property
def class_value(self):
"""
The value for the "class" property of resources managed by this
manager, as defined in the data model for the resource.
For example, for LPAR resources this is set to 'logical-partition'.
"""
return self._class_value
def _new_oid(self):
"""
Return a new OID.
"""
new_oid = self.next_oid
self.next_oid += 1
return str(new_oid)
def add(self, properties):
"""
Add a faked resource to this manager.
For URI-based lookup, the resource is also added to the faked HMC.
Parameters:
properties (dict):
Resource properties. If the URI property (e.g. 'object-uri') or the
object ID property (e.g. 'object-id') are not specified, they
will be auto-generated.
Returns:
FakedBaseResource: The faked resource object.
"""
resource = self.resource_class(self, properties)
self._resources[resource.oid] = resource
self._hmc.all_resources[resource.uri] = resource
return resource
def remove(self, oid):
"""
Remove a faked resource from this manager.
Parameters:
oid (string):
The object ID of the resource (e.g. value of the 'object-uri'
property).
"""
uri = self._resources[oid].uri
del self._resources[oid]
del self._hmc.all_resources[uri]
def list(self, filter_args=None):
"""
List the faked resources of this manager.
Parameters:
filter_args (dict):
Filter arguments. `None` causes no filtering to happen. See
:meth:`~zhmcclient.BaseManager.list()` for details.
Returns:
list of FakedBaseResource: The faked resource objects of this
manager.
"""
res = []
for oid in self._resources:
resource = self._resources[oid]
if self._matches_filters(resource, filter_args):
res.append(resource)
return res
def lookup_by_oid(self, oid):
"""
Look up a faked resource by its object ID, in the scope of this
manager.
Parameters:
oid (string):
The object ID of the faked resource (e.g. value of the 'object-id'
property).
Returns:
FakedBaseResource: The faked resource object.
Raises:
KeyError: No resource found for this object ID.
"""
return self._resources[oid]
class FakedHmc(FakedBaseResource):
"""
A faked HMC.
Derived from :class:`zhmcclient_mock.FakedBaseResource`, see there for
common methods and attributes.
An object of this class represents a faked HMC that can have all faked
resources that are relevant for the zhmcclient package.
The Python API to this class and its child resource classes is not
compatible with the zhmcclient API. Instead, these classes serve as an
in-memory backend for a faked session class (see
:class:`zhmcclient_mock.FakedSession`) that replaces the
normal :class:`zhmcclient.Session` class.
Objects of this class should not be created by the user. Instead,
access the :attr:`zhmcclient_mock.FakedSession.hmc` attribute.
"""
def __init__(self, hmc_name, hmc_version, api_version):
super(FakedHmc, self).__init__(manager=None, properties=None)
self.hmc_name = hmc_name
self.hmc_version = hmc_version
self.api_version = api_version
self._metric_groups = {} # by metric group name
for mg_dict in METRIC_GROUPS:
mg_name = mg_dict['name']
mg_types = mg_dict['metrics']
mgd = FakedMetricGroupDefinition(mg_name, mg_types)
self._metric_groups[mg_name] = mgd
self._metric_values = {} # by metric group name
self.cpcs = FakedCpcManager(hmc=self, client=self)
self.metrics_contexts = FakedMetricsContextManager(
hmc=self, client=self)
self.consoles = FakedConsoleManager(hmc=self, client=self)
# Flat list of all Faked{Resource} objs in this faked HMC, by URI:
self.all_resources = {}
self._enabled = True
def __repr__(self):
"""
Return a string with the state of this faked HMC, for debug purposes.
"""
ret = (
"{classname} at 0x{id:08x} (\n"
" hmc_name = {hmc_name!r}\n"
" hmc_version = {hmc_version!r}\n"
" api_version = {api_version!r}\n"
" metric_groups(group names) = {mg_names}\n"
" metric_values(group names) = {mv_names}\n"
" enabled = {enabled!r}\n"
" cpcs = {cpcs}\n"
" metrics_contexts = {metrics_contexts}\n"
" consoles = {consoles}\n"
" all_resources (keys only) = {all_resource_keys}\n"
")".format(
classname=self.__class__.__name__,
id=id(self),
hmc_name=self.hmc_name,
hmc_version=self.hmc_version,
api_version=self.api_version,
mg_names=list(self.metric_groups.keys()),
mv_names=list(self.metric_values.keys()),
enabled=self.enabled,
cpcs=repr_manager(self.cpcs, indent=2),
metrics_contexts=repr_manager(self.metrics_contexts, indent=2),
consoles=repr_manager(self.consoles, indent=2),
all_resource_keys=repr_list(self.all_resources.keys(),
indent=2),
))
return ret
@property
def metric_values(self):
"""
:class:`iv:immutable_views.DictView`: The metric values in this HMC that
have been prepared for later retrieval, with:
- key(string):
Metric group name, e.g. 'partition-usage'.
- value(list of :class:`~zhmcclient_mock.FakedMetricObjectValues`):
The metric values of this metric group.
"""
return DictView(self._metric_values)
@property
def metric_groups(self):
"""
:class:`iv:immutable_views.DictView`: The metric groups supported by
this HMC, with:
- key(string):
Metric group name, e.g. 'partition-usage'.
- value(list of :class:`~zhmcclient_mock.FakedMetricGroupDefinition`):
The metric groups including their metric values and their types.
"""
return DictView(self._metric_groups)
@property
def enabled(self):
"""
Return whether the faked HMC is enabled.
"""
return self._enabled
def enable(self):
"""
Enable the faked HMC.
"""
self._enabled = True
def disable(self):
"""
Disable the faked HMC. This will cause an error to be raised when
a faked session attempts to communicate with the disabled HMC.
"""
self._enabled = False
def lookup_by_uri(self, uri):
"""
Look up a faked resource by its object URI, within this faked HMC.
Parameters:
uri (string):
The object URI of the faked resource (e.g. value of the
'object-uri' property).
Returns:
:class:`~zhmcclient_mock.FakedBaseResource`: The faked resource.
Raises:
KeyError: No resource found for this object ID.
"""
return self.all_resources[uri]
def add_metric_values(self, values):
"""
Add one set of faked metric values for a particular resource to the
metrics response for a particular metric group, for later retrieval.
For defined metric groups, see chapter "Metric groups" in the
:term:`HMC API` book.
Parameters:
values (:class:`~zhmcclient_mock.FakedMetricObjectValues`):
The set of metric values to be added. It specifies the resource URI
and the targeted metric group name.
"""
assert isinstance(values, FakedMetricObjectValues)
group_name = values.group_name
if group_name not in self._metric_values:
self._metric_values[group_name] = []
self._metric_values[group_name].append(values)
class FakedConsoleManager(FakedBaseManager):
"""
A manager for faked Console resources within a faked HMC (see
:class:`zhmcclient_mock.FakedHmc`).
Derived from :class:`zhmcclient_mock.FakedBaseManager`, see there for
common methods and attributes.
"""
def __init__(self, hmc, client):
super(FakedConsoleManager, self).__init__(
hmc=hmc,
parent=client,
resource_class=FakedConsole,
base_uri=self.api_root + '/console',
oid_prop=None, # Console does not have an object ID property
uri_prop='object-uri',
class_value='console',
name_prop='name')
self._console = None
@property
def console(self):
"""
The faked Console representing the faked HMC (an object of
:class:`~zhmcclient_mock.FakedConsole`). The object is cached.
"""
if self._console is None:
self._console = self.list()[0]
return self._console
def add(self, properties):
# pylint: disable=useless-super-delegation
"""
Add a faked Console resource.
Parameters:
properties (dict):
Resource properties.
Special handling and requirements for certain properties:
* 'object-uri' will be auto-generated to '/api/console',
if not specified.
* 'class' will be auto-generated to 'console',
if not specified.
Returns:
:class:`~zhmcclient_mock.FakedConsole`: The faked Console resource.
"""
return super(FakedConsoleManager, self).add(properties)
class FakedConsole(FakedBaseResource):
"""
A faked Console resource within a faked HMC (see
:class:`zhmcclient_mock.FakedHmc`).
Derived from :class:`zhmcclient_mock.FakedBaseResource`, see there for
common methods and attributes.
"""
def __init__(self, manager, properties):
super(FakedConsole, self).__init__(
manager=manager,
properties=properties)
self._storage_groups = FakedStorageGroupManager(
hmc=manager.hmc, console=self)
self._users = FakedUserManager(hmc=manager.hmc, console=self)
self._user_roles = FakedUserRoleManager(hmc=manager.hmc, console=self)
self._user_patterns = FakedUserPatternManager(
hmc=manager.hmc, console=self)
self._password_rules = FakedPasswordRuleManager(
hmc=manager.hmc, console=self)
self._tasks = FakedTaskManager(hmc=manager.hmc, console=self)
self._ldap_server_definitions = FakedLdapServerDefinitionManager(
hmc=manager.hmc, console=self)
self._unmanaged_cpcs = FakedUnmanagedCpcManager(
hmc=manager.hmc, console=self)
def __repr__(self):
"""
Return a string with the state of this faked Console resource, for
debug purposes.
"""
ret = (
"{classname} at 0x{id:08x} (\n"
" _manager = {manager_classname} at 0x{manager_id:08x}\n"
" _manager._parent._uri = {parent_uri!r}\n"
" _uri = {_uri!r}\n"
" _properties = {_properties}\n"
" _storage_groups = {_storage_groups}\n"
" _users = {_users}\n"
" _user_roles = {_user_roles}\n"
" _user_patterns = {_user_patterns}\n"
" _password_rules = {_password_rules}\n"
" _tasks = {_tasks}\n"
" _ldap_server_definitions = {_ldap_server_definitions}\n"
" _unmanaged_cpcs = {_unmanaged_cpcs}\n"
")".format(
classname=self.__class__.__name__,
id=id(self),
manager_classname=self._manager.__class__.__name__,
manager_id=id(self._manager),
parent_uri=self._manager.parent.uri,
_uri=self._uri,
_properties=repr_dict(self._properties, indent=2),
_storage_groups=repr_manager(self.storage_groups, indent=2),
_users=repr_manager(self.users, indent=2),
_user_roles=repr_manager(self.user_roles, indent=2),
_user_patterns=repr_manager(self.user_patterns, indent=2),
_password_rules=repr_manager(self.password_rules, indent=2),
_tasks=repr_manager(self.tasks, indent=2),
_ldap_server_definitions=repr_manager(
self.ldap_server_definitions, indent=2),
_unmanaged_cpcs=repr_manager(self.unmanaged_cpcs, indent=2),
))
return ret
@property
def storage_groups(self):
"""
:class:`~zhmcclient_mock.FakedStorageGroupManager`: Access to the faked
Storage Group resources of this Console.
"""
return self._storage_groups
@property
def users(self):
"""
:class:`~zhmcclient_mock.FakedUserManager`: Access to the faked User
resources of this Console.
"""
return self._users
@property
def user_roles(self):
"""
:class:`~zhmcclient_mock.FakedUserRoleManager`: Access to the faked
User Role resources of this Console.
"""
return self._user_roles
@property
def user_patterns(self):
"""
:class:`~zhmcclient_mock.FakedUserPatternManager`: Access to the faked
User Pattern resources of this Console.
"""
return self._user_patterns
@property
def password_rules(self):
"""
:class:`~zhmcclient_mock.FakedPasswordRulesManager`: Access to the
faked Password Rule resources of this Console.
"""
return self._password_rules
@property
def tasks(self):
"""
:class:`~zhmcclient_mock.FakedTaskManager`: Access to the faked Task
resources of this Console.
"""
return self._tasks
@property
def ldap_server_definitions(self):
"""
:class:`~zhmcclient_mock.FakedLdapServerDefinitionManager`: Access to
the faked LDAP Server Definition resources of this Console.
"""
return self._ldap_server_definitions
@property
def unmanaged_cpcs(self):
"""
:class:`~zhmcclient_mock.FakedUnmanagedCpcManager`: Access to the faked
unmanaged CPC resources of this Console.
"""
return self._unmanaged_cpcs
class FakedUserManager(FakedBaseManager):
"""
A manager for faked User resources within a faked HMC (see
:class:`zhmcclient_mock.FakedHmc`).
Derived from :class:`zhmcclient_mock.FakedBaseManager`, see there for
common methods and attributes.
"""
def __init__(self, hmc, console):
super(FakedUserManager, self).__init__(
hmc=hmc,
parent=console,
resource_class=FakedUser,
base_uri=self.api_root + '/users',
oid_prop='object-id',
uri_prop='object-uri',
class_value='user',
name_prop='name',
case_insensitive_names=True)
def add(self, properties):
# pylint: disable=useless-super-delegation
"""
Add a faked User resource.
Parameters:
properties (dict):
Resource properties.
Special handling and requirements for certain properties:
* 'object-id' will be auto-generated with a unique value across
all instances of this resource type, if not specified.
* 'object-uri' will be auto-generated based upon the object ID,
if not specified.
* 'class' will be auto-generated to 'user',
if not specified.
* 'disabled' will be auto-generated to `False`,
if not specified.
Returns:
:class:`~zhmcclient_mock.FakedUser`: The faked User resource.
"""
new_user = super(FakedUserManager, self).add(properties)
# Resource type specific default values
if 'disabled' not in new_user.properties:
new_user.properties['disabled'] = False
return new_user
class FakedUser(FakedBaseResource):
"""
A faked User resource within a faked HMC (see
:class:`zhmcclient_mock.FakedHmc`).
Derived from :class:`zhmcclient_mock.FakedBaseResource`, see there for
common methods and attributes.
"""
def __init__(self, manager, properties):
super(FakedUser, self).__init__(
manager=manager,
properties=properties)
class FakedUserRoleManager(FakedBaseManager):
"""
A manager for faked User Role resources within a faked HMC (see
:class:`zhmcclient_mock.FakedHmc`).
Derived from :class:`zhmcclient_mock.FakedBaseManager`, see there for
common methods and attributes.
"""
def __init__(self, hmc, console):
super(FakedUserRoleManager, self).__init__(
hmc=hmc,
parent=console,
resource_class=FakedUserRole,
base_uri=self.api_root + '/user-roles',
oid_prop='object-id',
uri_prop='object-uri',
class_value='user-role',
name_prop='name',
case_insensitive_names=True)
def add(self, properties):
# pylint: disable=useless-super-delegation
"""
Add a faked User Role resource.
Parameters:
properties (dict):
Resource properties.
Special handling and requirements for certain properties:
* 'object-id' will be auto-generated with a unique value across
all instances of this resource type, if not specified.
* 'object-uri' will be auto-generated based upon the object ID,
if not specified.
* 'class' will be auto-generated to 'user-role',
if not specified.
* 'type' will be auto-generated to 'user-defined',
if not specified.
* 'is-inheritance-enabled' will be auto-generated to `False`,
if not specified.
Returns:
:class:`~zhmcclient_mock.FakedUserRole`: The faked User Role
resource.
"""
new_user_role = super(FakedUserRoleManager, self).add(properties)
# Resource type specific default values
if 'type' not in new_user_role.properties:
new_user_role.properties['type'] = 'user-defined'
if 'is-inheritance-enabled' not in new_user_role.properties:
new_user_role.properties['is-inheritance-enabled'] = False
return new_user_role
class FakedUserRole(FakedBaseResource):
"""
A faked User Role resource within a faked HMC (see
:class:`zhmcclient_mock.FakedHmc`).
Derived from :class:`zhmcclient_mock.FakedBaseResource`, see there for
common methods and attributes.
"""
def __init__(self, manager, properties):
super(FakedUserRole, self).__init__(
manager=manager,
properties=properties)
class FakedUserPatternManager(FakedBaseManager):
"""
A manager for faked User Pattern resources within a faked HMC (see
:class:`zhmcclient_mock.FakedHmc`).
Derived from :class:`zhmcclient_mock.FakedBaseManager`, see there for
common methods and attributes.
"""
def __init__(self, hmc, console):
super(FakedUserPatternManager, self).__init__(
hmc=hmc,
parent=console,
resource_class=FakedUserPattern,
base_uri=console.uri + '/user-patterns',
oid_prop='element-id',
uri_prop='element-uri',
class_value='user-pattern',
name_prop='name',
case_insensitive_names=True)
def add(self, properties):
# pylint: disable=useless-super-delegation
"""
Add a faked User Pattern resource.
Parameters:
properties (dict):
Resource properties.
Special handling and requirements for certain properties:
* 'element-id' will be auto-generated with a unique value across
all instances of this resource type, if not specified.
* 'element-uri' will be auto-generated based upon the element ID,
if not specified.
* 'class' will be auto-generated to 'user-pattern',
if not specified.
Returns:
:class:`~zhmcclient_mock.FakedUserPattern`: The faked User Pattern
resource.
"""
return super(FakedUserPatternManager, self).add(properties)
class FakedUserPattern(FakedBaseResource):
"""
A faked User Pattern resource within a faked HMC (see
:class:`zhmcclient_mock.FakedHmc`).
Derived from :class:`zhmcclient_mock.FakedBaseResource`, see there for
common methods and attributes.
"""
def __init__(self, manager, properties):
super(FakedUserPattern, self).__init__(
manager=manager,
properties=properties)
class FakedPasswordRuleManager(FakedBaseManager):
"""
A manager for faked Password Rule resources within a faked HMC (see
:class:`zhmcclient_mock.FakedHmc`).
Derived from :class:`zhmcclient_mock.FakedBaseManager`, see there for
common methods and attributes.
"""
def __init__(self, hmc, console):
super(FakedPasswordRuleManager, self).__init__(
hmc=hmc,
parent=console,
resource_class=FakedPasswordRule,
base_uri=console.uri + '/password-rules',
oid_prop='element-id',
uri_prop='element-uri',
class_value='password-rule',
name_prop='name',
case_insensitive_names=True)
def add(self, properties):
# pylint: disable=useless-super-delegation
"""
Add a faked Password Rule resource.
Parameters:
properties (dict):
Resource properties.
Special handling and requirements for certain properties:
* 'element-id' will be auto-generated with a unique value across
all instances of this resource type, if not specified.
* 'element-uri' will be auto-generated based upon the element ID,
if not specified.
* 'class' will be auto-generated to 'password-rule',
if not specified.
* 'min-length' will be auto-generated to 8,
if not specified.
* 'max-length' will be auto-generated to 256,
if not specified.
Returns:
:class:`~zhmcclient_mock.FakedPasswordRule`: The faked Password Rule
resource.
"""
new_pwrule = super(FakedPasswordRuleManager, self).add(properties)
# Resource type specific default values
if 'min-length' not in new_pwrule.properties:
new_pwrule.properties['min-length'] = 8
if 'max-length' not in new_pwrule.properties:
new_pwrule.properties['max-length'] = 256
return new_pwrule
class FakedPasswordRule(FakedBaseResource):
"""
A faked Password Rule resource within a faked HMC (see
:class:`zhmcclient_mock.FakedHmc`).
Derived from :class:`zhmcclient_mock.FakedBaseResource`, see there for
common methods and attributes.
"""
def __init__(self, manager, properties):
super(FakedPasswordRule, self).__init__(
manager=manager,
properties=properties)
class FakedTaskManager(FakedBaseManager):
"""
A manager for faked Task resources within a faked HMC (see
:class:`zhmcclient_mock.FakedHmc`).
Derived from :class:`zhmcclient_mock.FakedBaseManager`, see there for
common methods and attributes.
"""
def __init__(self, hmc, console):
super(FakedTaskManager, self).__init__(
hmc=hmc,
parent=console,
resource_class=FakedTask,
base_uri=console.uri + '/tasks',
oid_prop='element-id',
uri_prop='element-uri',
class_value='task',
name_prop='name')
def add(self, properties):
# pylint: disable=useless-super-delegation
"""
Add a faked Task resource.
Parameters:
properties (dict):
Resource properties.
Special handling and requirements for certain properties:
* 'element-id' will be auto-generated with a unique value across
all instances of this resource type, if not specified.
* 'element-uri' will be auto-generated based upon the element ID,
if not specified.
* 'class' will be auto-generated to 'task',
if not specified.
Returns:
:class:`~zhmcclient_mock.FakedTask`: The faked Task resource.
"""
return super(FakedTaskManager, self).add(properties)
class FakedTask(FakedBaseResource):
"""
A faked Task resource within a faked HMC (see
:class:`zhmcclient_mock.FakedHmc`).
Derived from :class:`zhmcclient_mock.FakedBaseResource`, see there for
common methods and attributes.
"""
def __init__(self, manager, properties):
super(FakedTask, self).__init__(
manager=manager,
properties=properties)
class FakedLdapServerDefinitionManager(FakedBaseManager):
"""
A manager for faked LDAP Server Definition resources within a faked HMC
(see :class:`zhmcclient_mock.FakedHmc`).
Derived from :class:`zhmcclient_mock.FakedBaseManager`, see there for
common methods and attributes.
"""
def __init__(self, hmc, console):
super(FakedLdapServerDefinitionManager, self).__init__(
hmc=hmc,
parent=console,
resource_class=FakedLdapServerDefinition,
base_uri=console.uri + '/ldap-server-definitions',
oid_prop='element-id',
uri_prop='element-uri',
class_value='ldap-server-definition',
name_prop='name',
case_insensitive_names=True)
def add(self, properties):
# pylint: disable=useless-super-delegation
"""
Add a faked LDAP Server Definition resource.
Parameters:
properties (dict):
Resource properties.
Special handling and requirements for certain properties:
* 'element-id' will be auto-generated with a unique value across
all instances of this resource type, if not specified.
* 'element-uri' will be auto-generated based upon the element ID,
if not specified.
* 'class' will be auto-generated to 'ldap-server-definition',
if not specified.
* 'connection-port' will be auto-generated to `None`,
if not specified.
* 'use-ssl' will be auto-generated to `False`,
if not specified.
Returns:
:class:`~zhmcclient_mock.FakedLdapServerDefinition`: The faked
LdapServerDefinition resource.
"""
new_lsd = super(FakedLdapServerDefinitionManager, self).add(properties)
# Resource type specific default values
if 'connection-port' not in new_lsd.properties:
new_lsd.properties['connection-port'] = None
if 'use-ssl' not in new_lsd.properties:
new_lsd.properties['use-ssl'] = False
return new_lsd
class FakedLdapServerDefinition(FakedBaseResource):
"""
A faked LDAP Server Definition resource within a faked HMC (see
:class:`zhmcclient_mock.FakedHmc`).
Derived from :class:`zhmcclient_mock.FakedBaseResource`, see there for
common methods and attributes.
"""
def __init__(self, manager, properties):
super(FakedLdapServerDefinition, self).__init__(
manager=manager,
properties=properties)
class FakedActivationProfileManager(FakedBaseManager):
"""
A manager for faked Activation Profile resources within a faked HMC (see
:class:`zhmcclient_mock.FakedHmc`).
Derived from :class:`zhmcclient_mock.FakedBaseManager`, see there for
common methods and attributes.
"""
def __init__(self, hmc, cpc, profile_type):
ap_uri_segment = profile_type + '-activation-profiles'
ap_class_value = profile_type + '-activation-profile'
super(FakedActivationProfileManager, self).__init__(
hmc=hmc,
parent=cpc,
resource_class=FakedActivationProfile,
base_uri=cpc.uri + '/' + ap_uri_segment,
oid_prop='name', # This is an exception!
uri_prop='element-uri',
class_value=ap_class_value,
name_prop='name')
self._profile_type = profile_type
def add(self, properties):
# pylint: disable=useless-super-delegation
"""
Add a faked Activation Profile resource.
Parameters:
properties (dict):
Resource properties.
Special handling and requirements for certain properties:
* 'name' (the OID property for this resource type!) will be
auto-generated with a unique value across all instances of this
resource type, if not specified.
* 'element-uri' will be auto-generated based upon the OID ('name')
property, if not specified.
* 'class' will be auto-generated to
'{profile_type}'-activation-profile', if not specified.
Returns:
:class:`~zhmcclient_mock.FakedActivationProfile`: The faked
Activation Profile resource.
"""
return super(FakedActivationProfileManager, self).add(properties)
@property
def profile_type(self):
"""
Type of the activation profile ('reset', 'image', 'load').
"""
return self._profile_type
class FakedActivationProfile(FakedBaseResource):
"""
A faked Activation Profile resource within a faked HMC (see
:class:`zhmcclient_mock.FakedHmc`).
Derived from :class:`zhmcclient_mock.FakedBaseResource`, see there for
common methods and attributes.
"""
def __init__(self, manager, properties):
super(FakedActivationProfile, self).__init__(
manager=manager,
properties=properties)
class FakedAdapterManager(FakedBaseManager):
"""
A manager for faked Adapter resources within a faked HMC (see
:class:`zhmcclient_mock.FakedHmc`).
Derived from :class:`zhmcclient_mock.FakedBaseManager`, see there for
common methods and attributes.
"""
def __init__(self, hmc, cpc):
super(FakedAdapterManager, self).__init__(
hmc=hmc,
parent=cpc,
resource_class=FakedAdapter,
base_uri=self.api_root + '/adapters',
oid_prop='object-id',
uri_prop='object-uri',
class_value='adapter',
name_prop='name')
def add(self, properties):
# pylint: disable=useless-super-delegation
"""
Add a faked Adapter resource.
Parameters:
properties (dict):
Resource properties.
Special handling and requirements for certain properties:
* 'object-id' will be auto-generated with a unique value across
all instances of this resource type, if not specified.
* 'object-uri' will be auto-generated based upon the object ID,
if not specified.
* 'class' will be auto-generated to 'adapter',
if not specified.
* 'status' is auto-set to 'active', if not specified.
* 'adapter-family' or 'type' is required to be specified, in order
to determine whether the adapter is a network or storage adapter.
* 'adapter-family' is auto-set based upon 'type', if not specified.
* For network adapters, 'network-port-uris' is auto-set to an empty
list, if not specified.
* For storage adapters, 'storage-port-uris' is auto-set to an empty
list, if not specified.
Returns:
:class:`~zhmcclient_mock.FakedAdapter`: The faked Adapter resource.
Raises:
:exc:`zhmcclient_mock.InputError`: Some issue with the input
properties.
"""
return super(FakedAdapterManager, self).add(properties)
class FakedAdapter(FakedBaseResource):
"""
A faked Adapter resource within a faked HMC (see
:class:`zhmcclient_mock.FakedHmc`).
Derived from :class:`zhmcclient_mock.FakedBaseResource`, see there for
common methods and attributes.
"""
def __init__(self, manager, properties):
super(FakedAdapter, self).__init__(
manager=manager,
properties=properties)
# Initial values to be prepared for raising InputError
self._ports = None
# TODO: Maybe move this stuff into AdapterManager.add()?
if 'adapter-family' in self._properties:
family = self._properties['adapter-family']
if family in ('osa', 'roce', 'hipersockets'):
self._adapter_kind = 'network'
elif family in ('ficon',):
self._adapter_kind = 'storage'
else:
self._adapter_kind = 'other'
elif 'type' in self._properties:
# because 'type' is more specific than 'adapter-family', we can
# auto-set 'adapter-family' from 'type'.
type_ = self._properties['type']
if type_ in ('osd', 'osm'):
self._properties['adapter-family'] = 'osa'
self._adapter_kind = 'network'
elif type_ == 'roce':
self._properties['adapter-family'] = 'roce'
self._adapter_kind = 'network'
elif type_ == 'hipersockets':
self._properties['adapter-family'] = 'hipersockets'
self._adapter_kind = 'network'
elif type_ in ('fcp', 'fc'):
self._properties['adapter-family'] = 'ficon'
self._adapter_kind = 'storage'
elif type_ == 'crypto':
self._properties['adapter-family'] = 'crypto'
self._adapter_kind = 'other'
elif type_ == 'zedc':
self._properties['adapter-family'] = 'accelerator'
self._adapter_kind = 'other'
else:
raise InputError("FakedAdapter with object-id=%s has an "
"unknown value in its 'type' property: %s." %
(self.oid, type_))
else:
raise InputError("FakedAdapter with object-id=%s must have "
"'adapter-family' or 'type' property specified." %
self.oid)
if self.adapter_kind == 'network':
if 'network-port-uris' not in self._properties:
self._properties['network-port-uris'] = []
self._ports = FakedPortManager(hmc=manager.hmc, adapter=self)
elif self.adapter_kind == 'storage':
if 'storage-port-uris' not in self._properties:
self._properties['storage-port-uris'] = []
self._ports = FakedPortManager(hmc=manager.hmc, adapter=self)
else:
self._ports = None
if 'status' not in self._properties:
self._properties['status'] = 'active'
def __repr__(self):
"""
Return a string with the state of this faked Adapter resource, for
debug purposes.
"""
ret = (
"{classname} at 0x{id:08x} (\n"
" _manager = {manager_classname} at 0x{manager_id:08x}\n"
" _manager._parent._uri = {parent_uri!r}\n"
" _uri = {_uri!r}\n"
" _properties = {_properties}\n"
" _ports = {_ports}\n"
")".format(
classname=self.__class__.__name__,
id=id(self),
manager_classname=self._manager.__class__.__name__,
manager_id=id(self._manager),
parent_uri=self._manager.parent.uri,
_uri=self._uri,
_properties=repr_dict(self._properties, indent=2),
_ports=repr_manager(self.ports, indent=2),
))
return ret
@property
def ports(self):
"""
:class:`~zhmcclient_mock.FakedPort`: The Port resources of this
Adapter.
If the kind of adapter does not have ports, this is `None`.
"""
return self._ports
@property
def adapter_kind(self):
"""
string: The kind of adapter, determined from the 'adapter-family' or
'type' properties. This is currently used to distinguish storage and
network adapters.
Possible values are:
* 'network' - A network adapter (OSA, ROCE, Hipersockets)
* 'storage' - A storage adapter (FICON, FCP)
* 'other' - Another adapter (zEDC, Crypto)
"""
return self._adapter_kind
class FakedCpcManager(FakedBaseManager):
"""
A manager for faked managed CPC resources within a faked HMC (see
:class:`zhmcclient_mock.FakedHmc`).
Derived from :class:`zhmcclient_mock.FakedBaseManager`, see there for
common methods and attributes.
"""
def __init__(self, hmc, client):
super(FakedCpcManager, self).__init__(
hmc=hmc,
parent=client,
resource_class=FakedCpc,
base_uri=self.api_root + '/cpcs',
oid_prop='object-id',
uri_prop='object-uri',
class_value='cpc',
name_prop='name')
def add(self, properties):
# pylint: disable=useless-super-delegation
"""
Add a faked CPC resource.
Parameters:
properties (dict):
Resource properties.
Special handling and requirements for certain properties:
* 'object-id' will be auto-generated with a unique value across
all instances of this resource type, if not specified.
* 'object-uri' will be auto-generated based upon the object ID,
if not specified.
* 'class' will be auto-generated to 'cpc',
if not specified.
* 'dpm-enabled' is auto-set to `False`, if not specified.
* 'is-ensemble-member' is auto-set to `False`, if not specified.
* 'status' is auto-set, if not specified, as follows: If the
'dpm-enabled' property is `True`, it is set to 'active';
otherwise it is set to 'operating'.
Returns:
:class:`~zhmcclient_mock.FakedCpc`: The faked CPC resource.
"""
return super(FakedCpcManager, self).add(properties)
class FakedCpc(FakedBaseResource):
"""
A faked managed CPC resource within a faked HMC (see
:class:`zhmcclient_mock.FakedHmc`).
Derived from :class:`zhmcclient_mock.FakedBaseResource`, see there for
common methods and attributes.
"""
def __init__(self, manager, properties):
super(FakedCpc, self).__init__(
manager=manager,
properties=properties)
self._lpars = FakedLparManager(hmc=manager.hmc, cpc=self)
self._partitions = FakedPartitionManager(hmc=manager.hmc, cpc=self)
self._adapters = FakedAdapterManager(hmc=manager.hmc, cpc=self)
self._virtual_switches = FakedVirtualSwitchManager(
hmc=manager.hmc, cpc=self)
self._capacity_groups = FakedCapacityGroupManager(
hmc=manager.hmc, cpc=self)
self._reset_activation_profiles = FakedActivationProfileManager(
hmc=manager.hmc, cpc=self, profile_type='reset')
self._image_activation_profiles = FakedActivationProfileManager(
hmc=manager.hmc, cpc=self, profile_type='image')
self._load_activation_profiles = FakedActivationProfileManager(
hmc=manager.hmc, cpc=self, profile_type='load')
if 'dpm-enabled' not in self._properties:
self._properties['dpm-enabled'] = False
if 'is-ensemble-member' not in self._properties:
self._properties['is-ensemble-member'] = False
if 'status' not in self._properties:
if self.dpm_enabled:
self._properties['status'] = 'active'
else:
self._properties['status'] = 'operating'
def __repr__(self):
"""
Return a string with the state of this faked Cpc resource, for debug
purposes.
"""
ret = (
"{classname} at 0x{id:08x} (\n"
" _manager = {manager_classname} at 0x{manager_id:08x}\n"
" _manager._parent._uri = {parent_uri!r}\n"
" _uri = {_uri!r}\n"
" _properties = {_properties}\n"
" _lpars = {_lpars}\n"
" _partitions = {_partitions}\n"
" _adapters = {_adapters}\n"
" _virtual_switches = {_virtual_switches}\n"
" _capacity_groups = {_capacity_groups}\n"
" _reset_activation_profiles = {_reset_activation_profiles}\n"
" _image_activation_profiles = {_image_activation_profiles}\n"
" _load_activation_profiles = {_load_activation_profiles}\n"
")".format(
classname=self.__class__.__name__,
id=id(self),
manager_classname=self._manager.__class__.__name__,
manager_id=id(self._manager),
parent_uri=self._manager.parent.uri,
_uri=self._uri,
_properties=repr_dict(self._properties, indent=2),
_lpars=repr_manager(self.lpars, indent=2),
_partitions=repr_manager(self.partitions, indent=2),
_adapters=repr_manager(self.adapters, indent=2),
_virtual_switches=repr_manager(
self.virtual_switches, indent=2),
_capacity_groups=repr_manager(
self.capacity_groups, indent=2),
_reset_activation_profiles=repr_manager(
self.reset_activation_profiles, indent=2),
_image_activation_profiles=repr_manager(
self.image_activation_profiles, indent=2),
_load_activation_profiles=repr_manager(
self.load_activation_profiles, indent=2),
))
return ret
@property
def dpm_enabled(self):
"""
bool: Indicates whether this CPC is in DPM mode.
This returns the value of the 'dpm-enabled' property.
"""
return self._properties['dpm-enabled']
@property
def lpars(self):
"""
:class:`~zhmcclient_mock.FakedLparManager`: Access to the faked LPAR
resources of this CPC.
"""
return self._lpars
@property
def partitions(self):
"""
:class:`~zhmcclient_mock.FakedPartitionManager`: Access to the faked
Partition resources of this CPC.
"""
return self._partitions
@property
def adapters(self):
"""
:class:`~zhmcclient_mock.FakedAdapterManager`: Access to the faked
Adapter resources of this CPC.
"""
return self._adapters
@property
def virtual_switches(self):
"""
:class:`~zhmcclient_mock.FakedVirtualSwitchManager`: Access to the
faked Virtual Switch resources of this CPC.
"""
return self._virtual_switches
@property
def capacity_groups(self):
"""
:class:`~zhmcclient_mock.FakedCapacityGroupManager`: Access to the
faked Capacity Group resources of this CPC.
"""
return self._capacity_groups
@property
def reset_activation_profiles(self):
"""
:class:`~zhmcclient_mock.FakedActivationProfileManager`: Access to the
faked Reset Activation Profile resources of this CPC.
"""
return self._reset_activation_profiles
@property
def image_activation_profiles(self):
"""
:class:`~zhmcclient_mock.FakedActivationProfileManager`: Access to the
faked Image Activation Profile resources of this CPC.
"""
return self._image_activation_profiles
@property
def load_activation_profiles(self):
"""
:class:`~zhmcclient_mock.FakedActivationProfileManager`: Access to the
faked Load Activation Profile resources of this CPC.
"""
return self._load_activation_profiles
class FakedUnmanagedCpcManager(FakedBaseManager):
"""
A manager for faked unmanaged CPC resources within a faked HMC (see
:class:`zhmcclient_mock.FakedHmc`).
Derived from :class:`zhmcclient_mock.FakedBaseManager`, see there for
common methods and attributes.
"""
def __init__(self, hmc, console):
super(FakedUnmanagedCpcManager, self).__init__(
hmc=hmc,
parent=console,
resource_class=FakedUnmanagedCpc,
base_uri=self.api_root + '/cpcs',
oid_prop='object-id',
uri_prop='object-uri',
class_value=None,
name_prop='name')
def add(self, properties):
# pylint: disable=useless-super-delegation
"""
Add a faked unmanaged CPC resource.
Parameters:
properties (dict):
Resource properties.
Special handling and requirements for certain properties:
* 'object-id' will be auto-generated with a unique value across
all instances of this resource type, if not specified.
* 'object-uri' will be auto-generated based upon the object ID,
if not specified.
Returns:
:class:`~zhmcclient_mock.FakedUnmanagedCpc`: The faked unmanaged CPC
resource.
"""
return super(FakedUnmanagedCpcManager, self).add(properties)
class FakedUnmanagedCpc(FakedBaseResource):
"""
A faked unmanaged CPC resource within a faked HMC (see
:class:`zhmcclient_mock.FakedHmc`).
Derived from :class:`zhmcclient_mock.FakedBaseResource`, see there for
common methods and attributes.
"""
def __init__(self, manager, properties):
super(FakedUnmanagedCpc, self).__init__(
manager=manager,
properties=properties)
def __repr__(self):
"""
Return a string with the state of this faked unmanaged Cpc resource,
for debug purposes.
"""
ret = (
"{classname} at 0x{id:08x} (\n"
" _manager = {manager_classname} at 0x{manager_id:08x}\n"
" _manager._parent._uri = {parent_uri!r}\n"
" _uri = {_uri!r}\n"
" _properties = {_properties}\n"
")".format(
classname=self.__class__.__name__,
id=id(self),
manager_classname=self._manager.__class__.__name__,
manager_id=id(self._manager),
parent_uri=self._manager.parent.uri,
_uri=self._uri,
_properties=repr_dict(self._properties, indent=2),
))
return ret
class FakedHbaManager(FakedBaseManager):
"""
A manager for faked HBA resources within a faked HMC (see
:class:`zhmcclient_mock.FakedHmc`).
Derived from :class:`zhmcclient_mock.FakedBaseManager`, see there for
common methods and attributes.
"""
def __init__(self, hmc, partition):
super(FakedHbaManager, self).__init__(
hmc=hmc,
parent=partition,
resource_class=FakedHba,
base_uri=partition.uri + '/hbas',
oid_prop='element-id',
uri_prop='element-uri',
class_value='hba',
name_prop='name')
def add(self, properties):
"""
Add a faked HBA resource.
Parameters:
properties (dict):
Resource properties.
Special handling and requirements for certain properties:
* 'element-id' will be auto-generated with a unique value across
all instances of this resource type, if not specified.
* 'element-uri' will be auto-generated based upon the element ID,
if not specified.
* 'class' will be auto-generated to 'hba',
if not specified.
* 'adapter-port-uri' identifies the backing FCP port for this HBA
and is required to be specified.
* 'device-number' will be auto-generated with a unique value
within the partition in the range 0x8000 to 0xFFFF, if not
specified.
This method also updates the 'hba-uris' property in the parent
faked Partition resource, by adding the URI for the faked HBA
resource.
Returns:
:class:`~zhmcclient_mock.FakedHba`: The faked HBA resource.
Raises:
:exc:`zhmcclient_mock.InputError`: Some issue with the input
properties.
"""
new_hba = super(FakedHbaManager, self).add(properties)
partition = self.parent
# Reflect the new NIC in the partition
assert 'hba-uris' in partition.properties
partition.properties['hba-uris'].append(new_hba.uri)
# Create a default device-number if not specified
if 'device-number' not in new_hba.properties:
devno = partition.devno_alloc()
new_hba.properties['device-number'] = devno
# Create a default wwpn if not specified
if 'wwpn' not in new_hba.properties:
wwpn = partition.wwpn_alloc()
new_hba.properties['wwpn'] = wwpn
return new_hba
def remove(self, oid):
"""
Remove a faked HBA resource.
This method also updates the 'hba-uris' property in the parent
Partition resource, by removing the URI for the faked HBA resource.
Parameters:
oid (string):
The object ID of the faked HBA resource.
"""
hba = self.lookup_by_oid(oid)
partition = self.parent
devno = hba.properties.get('device-number', None)
if devno:
partition.devno_free_if_allocated(devno)
wwpn = hba.properties.get('wwpn', None)
if wwpn:
partition.wwpn_free_if_allocated(wwpn)
assert 'hba-uris' in partition.properties
hba_uris = partition.properties['hba-uris']
hba_uris.remove(hba.uri)
super(FakedHbaManager, self).remove(oid) # deletes the resource
class FakedHba(FakedBaseResource):
"""
A faked HBA resource within a faked HMC (see
:class:`zhmcclient_mock.FakedHmc`).
Derived from :class:`zhmcclient_mock.FakedBaseResource`, see there for
common methods and attributes.
"""
def __init__(self, manager, properties):
super(FakedHba, self).__init__(
manager=manager,
properties=properties)
class FakedLparManager(FakedBaseManager):
"""
A manager for faked LPAR resources within a faked HMC (see
:class:`zhmcclient_mock.FakedHmc`).
Derived from :class:`zhmcclient_mock.FakedBaseManager`, see there for
common methods and attributes.
"""
def __init__(self, hmc, cpc):
super(FakedLparManager, self).__init__(
hmc=hmc,
parent=cpc,
resource_class=FakedLpar,
base_uri=self.api_root + '/logical-partitions',
oid_prop='object-id',
uri_prop='object-uri',
class_value='logical-partition',
name_prop='name')
def add(self, properties):
# pylint: disable=useless-super-delegation
"""
Add a faked LPAR resource.
Parameters:
properties (dict):
Resource properties.
Special handling and requirements for certain properties:
* 'object-id' will be auto-generated with a unique value across
all instances of this resource type, if not specified.
* 'object-uri' will be auto-generated based upon the object ID,
if not specified.
* 'class' will be auto-generated to 'logical-partition',
if not specified.
* 'status' is auto-set to 'not-activated', if not specified.
Returns:
:class:`~zhmcclient_mock.FakedLpar`: The faked LPAR resource.
"""
return super(FakedLparManager, self).add(properties)
class FakedLpar(FakedBaseResource):
"""
A faked LPAR resource within a faked HMC (see
:class:`zhmcclient_mock.FakedHmc`).
Derived from :class:`zhmcclient_mock.FakedBaseResource`, see there for
common methods and attributes.
"""
def __init__(self, manager, properties):
super(FakedLpar, self).__init__(
manager=manager,
properties=properties)
if 'status' not in self._properties:
self._properties['status'] = 'not-activated'
class FakedNicManager(FakedBaseManager):
"""
A manager for faked NIC resources within a faked HMC (see
:class:`zhmcclient_mock.FakedHmc`).
Derived from :class:`zhmcclient_mock.FakedBaseManager`, see there for
common methods and attributes.
"""
def __init__(self, hmc, partition):
super(FakedNicManager, self).__init__(
hmc=hmc,
parent=partition,
resource_class=FakedNic,
base_uri=partition.uri + '/nics',
oid_prop='element-id',
uri_prop='element-uri',
class_value='nic',
name_prop='name')
def add(self, properties):
"""
Add a faked NIC resource.
Parameters:
properties (dict):
Resource properties.
Special handling and requirements for certain properties:
* 'element-id' will be auto-generated with a unique value across
all instances of this resource type, if not specified.
* 'element-uri' will be auto-generated based upon the element ID,
if not specified.
* 'class' will be auto-generated to 'nic',
if not specified.
* Either 'network-adapter-port-uri' (for backing ROCE adapters) or
'virtual-switch-uri'(for backing OSA or Hipersockets adapters) is
required to be specified.
* 'device-number' will be auto-generated with a unique value
within the partition in the range 0x8000 to 0xFFFF, if not
specified.
* 'type' will be auto-generated to 'iqd', if not specified.
* 'ssc-management-nic' will be auto-generated to `False`,
if not specified.
This method also updates the 'nic-uris' property in the parent
faked Partition resource, by adding the URI for the faked NIC
resource.
This method also updates the 'connected-vnic-uris' property in the
virtual switch referenced by 'virtual-switch-uri' property,
and sets it to the URI of the faked NIC resource.
Returns:
:class:`zhmcclient_mock.FakedNic`: The faked NIC resource.
Raises:
:exc:`zhmcclient_mock.InputError`: Some issue with the input
properties.
"""
new_nic = super(FakedNicManager, self).add(properties)
partition = self.parent
# For OSA-backed NICs, reflect the new NIC in the virtual switch
if 'virtual-switch-uri' in new_nic.properties:
vswitch_uri = new_nic.properties['virtual-switch-uri']
# Even though the URI handler when calling this method ensures that
# the vswitch exists, this method can be called by the user as
# well, so we have to handle the possibility that it does not
# exist:
try:
vswitch = self.hmc.lookup_by_uri(vswitch_uri)
except KeyError:
new_exc = InputError("The virtual switch specified in the "
"'virtual-switch-uri' property does not "
"exist: {!r}".format(vswitch_uri))
new_exc.__cause__ = None
raise new_exc # InputError
connected_uris = vswitch.properties['connected-vnic-uris']
if new_nic.uri not in connected_uris:
connected_uris.append(new_nic.uri)
# Create a default device-number if not specified
if 'device-number' not in new_nic.properties:
devno = partition.devno_alloc()
new_nic.properties['device-number'] = devno
# Resource type specific default values
if 'type' not in new_nic.properties:
new_nic.properties['type'] = 'iqd'
if 'ssc-management-nic' not in new_nic.properties:
new_nic.properties['ssc-management-nic'] = False
# Reflect the new NIC in the partition
assert 'nic-uris' in partition.properties
partition.properties['nic-uris'].append(new_nic.uri)
return new_nic
def remove(self, oid):
"""
Remove a faked NIC resource.
This method also updates the 'nic-uris' property in the parent
Partition resource, by removing the URI for the faked NIC resource.
Parameters:
oid (string):
The object ID of the faked NIC resource.
"""
nic = self.lookup_by_oid(oid)
partition = self.parent
devno = nic.properties.get('device-number', None)
if devno:
partition.devno_free_if_allocated(devno)
assert 'nic-uris' in partition.properties
nic_uris = partition.properties['nic-uris']
nic_uris.remove(nic.uri)
super(FakedNicManager, self).remove(oid) # deletes the resource
class FakedNic(FakedBaseResource):
"""
A faked NIC resource within a faked HMC (see
:class:`zhmcclient_mock.FakedHmc`).
Derived from :class:`zhmcclient_mock.FakedBaseResource`, see there for
common methods and attributes.
"""
def __init__(self, manager, properties):
super(FakedNic, self).__init__(
manager=manager,
properties=properties)
class FakedPartitionManager(FakedBaseManager):
"""
A manager for faked Partition resources within a faked HMC (see
:class:`zhmcclient_mock.FakedHmc`).
Derived from :class:`zhmcclient_mock.FakedBaseManager`, see there for
common methods and attributes.
"""
def __init__(self, hmc, cpc):
super(FakedPartitionManager, self).__init__(
hmc=hmc,
parent=cpc,
resource_class=FakedPartition,
base_uri=self.api_root + '/partitions',
oid_prop='object-id',
uri_prop='object-uri',
class_value='partition',
name_prop='name')
def add(self, properties):
# pylint: disable=useless-super-delegation
"""
Add a faked Partition resource.
Parameters:
properties (dict):
Resource properties.
Special handling and requirements for certain properties:
* 'object-id' will be auto-generated with a unique value across
all instances of this resource type, if not specified.
* 'object-uri' will be auto-generated based upon the object ID,
if not specified.
* 'class' will be auto-generated to 'partition',
if not specified.
* 'hba-uris' will be auto-generated as an empty array, if not
specified.
* 'nic-uris' will be auto-generated as an empty array, if not
specified.
* 'virtual-function-uris' will be auto-generated as an empty array,
if not specified.
* 'status' is auto-set to 'stopped', if not specified.
Returns:
:class:`~zhmcclient_mock.FakedPartition`: The faked Partition
resource.
"""
return super(FakedPartitionManager, self).add(properties)
class FakedPartition(FakedBaseResource):
"""
A faked Partition resource within a faked HMC (see
:class:`zhmcclient_mock.FakedHmc`).
Derived from :class:`zhmcclient_mock.FakedBaseResource`, see there for
common methods and attributes.
Each partition uses the device number range of 0x8000 to 0xFFFF for
automatically assigned device numbers of HBAs, NICs and virtual functions.
Users of the mock support should not use device numbers in that range
(unless all of them are user-assigned for a particular partition).
"""
def __init__(self, manager, properties):
super(FakedPartition, self).__init__(
manager=manager,
properties=properties)
if 'hba-uris' not in self._properties:
self._properties['hba-uris'] = []
if 'nic-uris' not in self._properties:
self._properties['nic-uris'] = []
if 'virtual-function-uris' not in self._properties:
self._properties['virtual-function-uris'] = []
if 'status' not in self._properties:
self._properties['status'] = 'stopped'
self._nics = FakedNicManager(hmc=manager.hmc, partition=self)
self._hbas = FakedHbaManager(hmc=manager.hmc, partition=self)
self._virtual_functions = FakedVirtualFunctionManager(
hmc=manager.hmc, partition=self)
self._devno_pool = IdPool(0x8000, 0xFFFF)
self._wwpn_pool = IdPool(0x8000, 0xFFFF)
def __repr__(self):
"""
Return a string with the state of this faked Partition resource, for
debug purposes.
"""
ret = (
"{classname} at 0x{id:08x} (\n"
" _manager = {manager_classname} at 0x{manager_id:08x}\n"
" _manager._parent._uri = {parent_uri!r}\n"
" _uri = {_uri!r}\n"
" _properties = {_properties}\n"
" _nics = {_nics}\n"
" _hbas = {_hbas}\n"
" _virtual_functions = {_virtual_functions}\n"
")".format(
classname=self.__class__.__name__,
id=id(self),
manager_classname=self._manager.__class__.__name__,
manager_id=id(self._manager),
parent_uri=self._manager.parent.uri,
_uri=self._uri,
_properties=repr_dict(self._properties, indent=2),
_nics=repr_manager(self.nics, indent=2),
_hbas=repr_manager(self.hbas, indent=2),
_virtual_functions=repr_manager(
self.virtual_functions, indent=2),
))
return ret
@property
def nics(self):
"""
:class:`~zhmcclient_mock.FakedNicManager`: Access to the faked NIC
resources of this Partition.
"""
return self._nics
@property
def hbas(self):
"""
:class:`~zhmcclient_mock.FakedHbaManager`: Access to the faked HBA
resources of this Partition.
"""
return self._hbas
@property
def virtual_functions(self):
"""
:class:`~zhmcclient_mock.FakedVirtualFunctionManager`: Access to the
faked Virtual Function resources of this Partition.
"""
return self._virtual_functions
def devno_alloc(self):
"""
Allocates a device number unique to this partition, in the range of
0x8000 to 0xFFFF.
Returns:
string: The device number as four hexadecimal digits in upper case.
Raises:
ValueError: No more device numbers available in that range.
"""
devno_int = self._devno_pool.alloc()
devno = "{:04X}".format(devno_int)
return devno
def devno_free(self, devno):
"""
Free a device number allocated with :meth:`devno_alloc`.
The device number must be allocated.
Parameters:
devno (string): The device number as four hexadecimal digits.
Raises:
ValueError: Device number not in pool range or not currently
allocated.
"""
devno_int = int(devno, 16)
self._devno_pool.free(devno_int)
def devno_free_if_allocated(self, devno):
"""
Free a device number allocated with :meth:`devno_alloc`.
If the device number is not currently allocated or not in the pool
range, nothing happens.
Parameters:
devno (string): The device number as four hexadecimal digits.
"""
devno_int = int(devno, 16)
self._devno_pool.free_if_allocated(devno_int)
def wwpn_alloc(self):
"""
Allocates a WWPN unique to this partition, in the range of
0xAFFEAFFE00008000 to 0xAFFEAFFE0000FFFF.
Returns:
string: The WWPN as 16 hexadecimal digits in upper case.
Raises:
ValueError: No more WWPNs available in that range.
"""
wwpn_int = self._wwpn_pool.alloc()
wwpn = "AFFEAFFE0000" + "{:04X}".format(wwpn_int)
return wwpn
def wwpn_free(self, wwpn):
"""
Free a WWPN allocated with :meth:`wwpn_alloc`.
The WWPN must be allocated.
Parameters:
WWPN (string): The WWPN as 16 hexadecimal digits.
Raises:
ValueError: WWPN not in pool range or not currently
allocated.
"""
wwpn_int = int(wwpn[-4:], 16)
self._wwpn_pool.free(wwpn_int)
def wwpn_free_if_allocated(self, wwpn):
"""
Free a WWPN allocated with :meth:`wwpn_alloc`.
If the WWPN is not currently allocated or not in the pool
range, nothing happens.
Parameters:
WWPN (string): The WWPN as 16 hexadecimal digits.
"""
wwpn_int = int(wwpn[-4:], 16)
self._wwpn_pool.free_if_allocated(wwpn_int)
class FakedPortManager(FakedBaseManager):
"""
A manager for faked Adapter Port resources within a faked HMC (see
:class:`zhmcclient_mock.FakedHmc`).
Derived from :class:`zhmcclient_mock.FakedBaseManager`, see there for
common methods and attributes.
"""
def __init__(self, hmc, adapter):
if adapter.adapter_kind == 'network':
port_uri_segment = 'network-ports'
port_class_value = 'network-port'
elif adapter.adapter_kind == 'storage':
port_uri_segment = 'storage-ports'
port_class_value = 'storage-port'
else:
raise AssertionError("FakedAdapter with object-id=%s must be a "
"storage or network adapter to have ports." %
adapter.oid)
super(FakedPortManager, self).__init__(
hmc=hmc,
parent=adapter,
resource_class=FakedPort,
base_uri=adapter.uri + '/' + port_uri_segment,
oid_prop='element-id',
uri_prop='element-uri',
class_value=port_class_value,
name_prop='name')
def add(self, properties):
"""
Add a faked Port resource.
Parameters:
properties (dict):
Resource properties.
Special handling and requirements for certain properties:
* 'element-id' will be auto-generated with a unique value across
all instances of this resource type, if not specified.
* 'element-uri' will be auto-generated based upon the element ID,
if not specified.
* 'class' will be auto-generated to 'network-port' or
'storage-port', if not specified.
This method also updates the 'network-port-uris' or
'storage-port-uris' property in the parent Adapter resource, by
adding the URI for the faked Port resource.
Returns:
:class:`zhmcclient_mock.FakedPort`: The faked Port resource.
"""
new_port = super(FakedPortManager, self).add(properties)
adapter = self.parent
if 'network-port-uris' in adapter.properties:
adapter.properties['network-port-uris'].append(new_port.uri)
if 'storage-port-uris' in adapter.properties:
adapter.properties['storage-port-uris'].append(new_port.uri)
return new_port
def remove(self, oid):
"""
Remove a faked Port resource.
This method also updates the 'network-port-uris' or 'storage-port-uris'
property in the parent Adapter resource, by removing the URI for the
faked Port resource.
Parameters:
oid (string):
The object ID of the faked Port resource.
"""
port = self.lookup_by_oid(oid)
adapter = self.parent
if 'network-port-uris' in adapter.properties:
port_uris = adapter.properties['network-port-uris']
port_uris.remove(port.uri)
if 'storage-port-uris' in adapter.properties:
port_uris = adapter.properties['storage-port-uris']
port_uris.remove(port.uri)
super(FakedPortManager, self).remove(oid) # deletes the resource
class FakedPort(FakedBaseResource):
"""
A faked Adapter Port resource within a faked HMC (see
:class:`zhmcclient_mock.FakedHmc`).
Derived from :class:`zhmcclient_mock.FakedBaseResource`, see there for
common methods and attributes.
"""
def __init__(self, manager, properties):
super(FakedPort, self).__init__(
manager=manager,
properties=properties)
class FakedVirtualFunctionManager(FakedBaseManager):
"""
A manager for faked Virtual Function resources within a faked HMC (see
:class:`zhmcclient_mock.FakedHmc`).
Derived from :class:`zhmcclient_mock.FakedBaseManager`, see there for
common methods and attributes.
"""
def __init__(self, hmc, partition):
super(FakedVirtualFunctionManager, self).__init__(
hmc=hmc,
parent=partition,
resource_class=FakedVirtualFunction,
base_uri=partition.uri + '/virtual-functions',
oid_prop='element-id',
uri_prop='element-uri',
class_value='virtual-function',
name_prop='name')
def add(self, properties):
"""
Add a faked Virtual Function resource.
Parameters:
properties (dict):
Resource properties.
Special handling and requirements for certain properties:
* 'element-id' will be auto-generated with a unique value across
all instances of this resource type, if not specified.
* 'element-uri' will be auto-generated based upon the element ID,
if not specified.
* 'class' will be auto-generated to 'virtual-function',
if not specified.
* 'device-number' will be auto-generated with a unique value
within the partition in the range 0x8000 to 0xFFFF, if not
specified.
This method also updates the 'virtual-function-uris' property in
the parent Partition resource, by adding the URI for the faked
Virtual Function resource.
Returns:
:class:`zhmcclient_mock.FakedVirtualFunction`: The faked Virtual
Function resource.
"""
new_vf = super(FakedVirtualFunctionManager, self).add(properties)
partition = self.parent
assert 'virtual-function-uris' in partition.properties
partition.properties['virtual-function-uris'].append(new_vf.uri)
if 'device-number' not in new_vf.properties:
devno = partition.devno_alloc()
new_vf.properties['device-number'] = devno
return new_vf
def remove(self, oid):
"""
Remove a faked Virtual Function resource.
This method also updates the 'virtual-function-uris' property in the
parent Partition resource, by removing the URI for the faked Virtual
Function resource.
Parameters:
oid (string):
The object ID of the faked Virtual Function resource.
"""
virtual_function = self.lookup_by_oid(oid)
partition = self.parent
devno = virtual_function.properties.get('device-number', None)
if devno:
partition.devno_free_if_allocated(devno)
assert 'virtual-function-uris' in partition.properties
vf_uris = partition.properties['virtual-function-uris']
vf_uris.remove(virtual_function.uri)
super(FakedVirtualFunctionManager, self).remove(oid) # deletes res.
class FakedVirtualFunction(FakedBaseResource):
"""
A faked Virtual Function resource within a faked HMC (see
:class:`zhmcclient_mock.FakedHmc`).
Derived from :class:`zhmcclient_mock.FakedBaseResource`, see there for
common methods and attributes.
"""
def __init__(self, manager, properties):
super(FakedVirtualFunction, self).__init__(
manager=manager,
properties=properties)
class FakedVirtualSwitchManager(FakedBaseManager):
"""
A manager for faked Virtual Switch resources within a faked HMC (see
:class:`zhmcclient_mock.FakedHmc`).
Derived from :class:`zhmcclient_mock.FakedBaseManager`, see there for
common methods and attributes.
"""
def __init__(self, hmc, cpc):
super(FakedVirtualSwitchManager, self).__init__(
hmc=hmc,
parent=cpc,
resource_class=FakedVirtualSwitch,
base_uri=self.api_root + '/virtual-switches',
oid_prop='object-id',
uri_prop='object-uri',
class_value='virtual-switch',
name_prop='name')
def add(self, properties):
# pylint: disable=useless-super-delegation
"""
Add a faked Virtual Switch resource.
Parameters:
properties (dict):
Resource properties.
Special handling and requirements for certain properties:
* 'object-id' will be auto-generated with a unique value across
all instances of this resource type, if not specified.
* 'object-uri' will be auto-generated based upon the object ID,
if not specified.
* 'class' will be auto-generated to 'virtual-switch',
if not specified.
* 'connected-vnic-uris' will be auto-generated as an empty array,
if not specified.
Returns:
:class:`~zhmcclient_mock.FakedVirtualSwitch`: The faked Virtual
Switch resource.
"""
return super(FakedVirtualSwitchManager, self).add(properties)
class FakedVirtualSwitch(FakedBaseResource):
"""
A faked Virtual Switch resource within a faked HMC (see
:class:`zhmcclient_mock.FakedHmc`).
Derived from :class:`zhmcclient_mock.FakedBaseResource`, see there for
common methods and attributes.
"""
def __init__(self, manager, properties):
super(FakedVirtualSwitch, self).__init__(
manager=manager,
properties=properties)
if 'connected-vnic-uris' not in self._properties:
self._properties['connected-vnic-uris'] = []
class FakedStorageGroupManager(FakedBaseManager):
"""
A manager for faked StorageGroup resources within a faked Console (see
:class:`zhmcclient_mock.FakedConsole`).
Derived from :class:`zhmcclient_mock.FakedBaseManager`, see there for
common methods and attributes.
"""
def __init__(self, hmc, console):
super(FakedStorageGroupManager, self).__init__(
hmc=hmc,
parent=console,
resource_class=FakedStorageGroup,
base_uri=self.api_root + '/storage-groups',
oid_prop='object-id',
uri_prop='object-uri',
class_value='storage-group',
name_prop='name')
def add(self, properties):
# pylint: disable=useless-super-delegation
"""
Add a faked StorageGroup resource.
Parameters:
properties (dict):
Resource properties.
Special handling and requirements for certain properties:
* 'object-id' will be auto-generated with a unique value across
all instances of this resource type, if not specified.
* 'object-uri' will be auto-generated based upon the object ID,
if not specified.
* 'class' will be auto-generated to 'storage-group',
if not specified.
* 'storage-volume-uris' will be auto-generated as an empty array,
if not specified.
* 'shared' is auto-set to False, if not specified.
Returns:
:class:`~zhmcclient_mock.FakedStorageGroup`: The faked StorageGroup
resource.
"""
return super(FakedStorageGroupManager, self).add(properties)
class FakedStorageGroup(FakedBaseResource):
"""
A faked StorageGroup resource within a faked HMC (see
:class:`zhmcclient_mock.FakedHmc`).
Derived from :class:`zhmcclient_mock.FakedBaseResource`, see there for
common methods and attributes.
"""
def __init__(self, manager, properties):
super(FakedStorageGroup, self).__init__(
manager=manager,
properties=properties)
if 'storage-volume-uris' not in self._properties:
self._properties['storage-volume-uris'] = []
if 'shared' not in self._properties:
self._properties['shared'] = False
self._storage_volumes = FakedStorageVolumeManager(
hmc=manager.hmc, storage_group=self)
def __repr__(self):
"""
Return a string with the state of this faked StorageGroup resource, for
debug purposes.
"""
ret = (
"{classname} at 0x{id:08x} (\n"
" _manager = {manager_classname} at 0x{manager_id:08x}\n"
" _manager._parent._uri = {parent_uri!r}\n"
" _uri = {_uri!r}\n"
" _properties = {_properties}\n"
" _storage_volumes = {_storage_volumes}\n"
")".format(
classname=self.__class__.__name__,
id=id(self),
manager_classname=self._manager.__class__.__name__,
manager_id=id(self._manager),
parent_uri=self._manager.parent.uri,
_uri=self._uri,
_properties=repr_dict(self._properties, indent=2),
_storage_volumes=repr_manager(self.storage_volumes, indent=2),
))
return ret
@property
def storage_volumes(self):
"""
:class:`~zhmcclient_mock.FakedStorageVolumeManager`: Access to the
faked StorageVolume resources of this StorageGroup.
"""
return self._storage_volumes
class FakedStorageVolumeManager(FakedBaseManager):
"""
A manager for faked StorageVolume resources within a faked HMC (see
:class:`zhmcclient_mock.FakedHmc`).
Derived from :class:`zhmcclient_mock.FakedBaseManager`, see there for
common methods and attributes.
"""
def __init__(self, hmc, storage_group):
super(FakedStorageVolumeManager, self).__init__(
hmc=hmc,
parent=storage_group,
resource_class=FakedStorageVolume,
base_uri=storage_group.uri + '/storage-volumes',
oid_prop='element-id',
uri_prop='element-uri',
class_value='storage-volume',
name_prop='name')
def add(self, properties):
# pylint: disable=useless-super-delegation
"""
Add a faked StorageVolume resource.
Parameters:
properties (dict):
Resource properties.
Special handling and requirements for certain properties:
* 'object-id' will be auto-generated with a unique value across
all instances of this resource type, if not specified.
* 'object-uri' will be auto-generated based upon the object ID,
if not specified.
* 'class' will be auto-generated to 'storage-group',
if not specified.
* 'storage-volume-uris' array in parent storage group will be
updated to add the URI of the new faked StorageVolume resource.
Returns:
:class:`~zhmcclient_mock.FakedStorageVolume`: The faked StorageVolume
resource.
"""
stovol = super(FakedStorageVolumeManager, self).add(properties)
stogrp = stovol.manager.parent
if stovol.uri not in stogrp.properties['storage-volume-uris']:
stogrp.properties['storage-volume-uris'].append(stovol.uri)
return stovol
class FakedStorageVolume(FakedBaseResource):
"""
A faked StorageVolume resource within a faked StorageGroup (see
:class:`zhmcclient_mock.FakedStorageGroup`).
Derived from :class:`zhmcclient_mock.FakedBaseResource`, see there for
common methods and attributes.
"""
def __init__(self, manager, properties):
super(FakedStorageVolume, self).__init__(
manager=manager,
properties=properties)
def __repr__(self):
"""
Return a string with the state of this faked StorageVolume resource,
for debug purposes.
"""
ret = (
"{classname} at 0x{id:08x} (\n"
" _manager = {manager_classname} at 0x{manager_id:08x}\n"
" _manager._parent._uri = {parent_uri!r}\n"
" _uri = {_uri!r}\n"
" _properties = {_properties}\n"
")".format(
classname=self.__class__.__name__,
id=id(self),
manager_classname=self._manager.__class__.__name__,
manager_id=id(self._manager),
parent_uri=self._manager.parent.uri,
_uri=self._uri,
_properties=repr_dict(self._properties, indent=2),
))
return ret
class FakedCapacityGroupManager(FakedBaseManager):
"""
A manager for faked CapacityGroup resources within a faked Cpc (see
:class:`zhmcclient_mock.FakedCpc`).
Derived from :class:`zhmcclient_mock.FakedBaseManager`, see there for
common methods and attributes.
"""
def __init__(self, hmc, cpc):
super(FakedCapacityGroupManager, self).__init__(
hmc=hmc,
parent=cpc,
resource_class=FakedCapacityGroup,
base_uri=cpc.uri + '/capacity-groups',
oid_prop='element-id',
uri_prop='element-uri',
class_value='capacity-group',
name_prop='name')
def add(self, properties):
# pylint: disable=useless-super-delegation
"""
Add a faked CapacityGroup resource.
Parameters:
properties (dict):
Resource properties.
Special handling and requirements for certain properties:
* 'element-id' will be auto-generated with a unique value across
all instances of this resource type, if not specified.
* 'element-uri' will be auto-generated based upon the object ID,
if not specified.
* 'class' will be auto-generated to 'capacity-group',
if not specified.
* 'capping-enabled' will be auto-generated to `True`,
if not specified.
Returns:
:class:`~zhmcclient_mock.FakedCapacityGroup`: The faked CapacityGroup
resource.
"""
return super(FakedCapacityGroupManager, self).add(properties)
class FakedCapacityGroup(FakedBaseResource):
"""
A faked CapacityGroup resource within a faked Cpc (see
:class:`zhmcclient_mock.FakedCpc`).
Derived from :class:`zhmcclient_mock.FakedBaseResource`, see there for
common methods and attributes.
"""
def __init__(self, manager, properties):
super(FakedCapacityGroup, self).__init__(
manager=manager,
properties=properties)
if 'capping-enabled' not in self._properties:
self._properties['capping-enabled'] = True
if 'partition-uris' not in self._properties:
self._properties['partition-uris'] = []
def __repr__(self):
"""
Return a string with the state of this faked CapacityGroup resource, for
debug purposes.
"""
ret = (
"{classname} at 0x{id:08x} (\n"
" _manager = {manager_classname} at 0x{manager_id:08x}\n"
" _manager._parent._uri = {parent_uri!r}\n"
" _uri = {_uri!r}\n"
" _properties = {_properties}\n"
")".format(
classname=self.__class__.__name__,
id=id(self),
manager_classname=self._manager.__class__.__name__,
manager_id=id(self._manager),
parent_uri=self._manager.parent.uri,
_uri=self._uri,
_properties=repr_dict(self._properties, indent=2),
))
return ret
class FakedMetricsContextManager(FakedBaseManager):
"""
A manager for faked Metrics Context resources within a faked HMC (see
:class:`zhmcclient_mock.FakedHmc`).
Derived from :class:`zhmcclient_mock.FakedBaseManager`, see there for
common methods and attributes.
Example:
* The following code sets up the faked data for metrics retrieval for
partition usage metrics, and then retrieves the metrics:
.. code-block:: python
session = FakedSession('fake-host', 'fake-hmc', '2.13.1', '1.8')
client = Client(session)
# URIs of (faked or real) Partitions the metric apply to:
part1_uri = ...
part2_uri = ...
# Prepare the faked metric response for that metric group, with
# data for two partitions:
session.hmc.add_metric_values(
FakedMetricObjectValues(
group_name='partition-usage',
resource_uri=part1_uri,
timestamp=datetime.now(),
values=[
('processor-usage', 15),
('network-usage', 0),
('storage-usage', 1),
('accelerator-usage', 0),
('crypto-usage', 0),
]))
session.hmc.add_metric_values(
FakedMetricObjectValues(
group_name='partition-usage',
resource_uri=part2_uri,
timestamp=datetime.now(),
values=[
('processor-usage', 17),
('network-usage', 5),
('storage-usage', 2),
('accelerator-usage', 0),
('crypto-usage', 0),
]))
# Create a Metrics Context resource for one metric group:
mc = client.metrics_contexts.create({
'anticipated-frequency-seconds': 15,
'metric-groups' ['partition-usage'],
})
# Retrieve the metrics for that metric context:
metrics_response = mc.get_metrics()
"""
def __init__(self, hmc, client):
super(FakedMetricsContextManager, self).__init__(
hmc=hmc,
parent=client,
resource_class=FakedMetricsContext,
base_uri=self.api_root + '/services/metrics/context',
oid_prop='fake-id',
uri_prop='fake-uri',
class_value=None,
name_prop=None)
def add(self, properties):
# pylint: disable=useless-super-delegation
"""
Add a faked Metrics Context resource.
Parameters:
properties (dict):
Resource properties, as defined in the description of the
:class:`~zhmcclient_mock.FakedMetricsContext` class.
Special handling and requirements for certain properties:
* 'fake-id' will be auto-generated with a unique value across
all instances of this resource type, if not specified.
* 'fake-uri' will be auto-generated based upon the 'fake-id'
property, if not specified.
Returns:
:class:`~zhmcclient_mock.FakedMetricsContext`: The faked Metrics
Context resource.
"""
return super(FakedMetricsContextManager, self).add(properties)
class FakedMetricsContext(FakedBaseResource):
"""
A faked Metrics Context resource within a faked HMC (see
:class:`zhmcclient_mock.FakedHmc`).
Derived from :class:`zhmcclient_mock.FakedBaseResource`, see there for
common methods and attributes.
The Metrics Context "resource" is really a service and therefore does not
have a data model defined in the :term:`HMC API` book.
In order to fit into the zhmcclient mock framework, the faked Metrics
Context in the zhmcclient mock framework is treated like all other faked
resources and does have a data model.
Data Model:
'fake-id' (:term:`string`): Object ID of the resource.
Initialization: Optional. If omitted, it will be auto-generated.
'fake-uri' (:term:`string`): Resource URI of the resource (used for Get
Metrics operation).
Initialization: Optional. If omitted, it will be auto-generated from
the Object ID.
'anticipated-frequency-seconds' (:term:`integer`):
The number of seconds the client anticipates will elapse between
metrics retrievals using this context. The minimum accepted value is
15.
Initialization: Required.
'metric-groups' (list of :term:`string`):
The metric group names to be returned by a metric retrieval
using this context.
Initialization: Optional. If omitted or the empty list, all metric
groups that are valid for the operational mode of each CPC will be
returned.
"""
def __init__(self, manager, properties):
super(FakedMetricsContext, self).__init__(
manager=manager,
properties=properties)
assert 'anticipated-frequency-seconds' in properties
def get_metric_group_definitions(self):
"""
Get the faked metric group definitions for this context object
that are to be returned from its create operation.
If a 'metric-groups' property had been specified for this context,
only those faked metric group definitions of its manager object that
are in that list, are included in the result. Otherwise, all metric
group definitions of its manager are included in the result.
Returns:
iterable of :class:`~zhmcclient_mock.FakedMetricGroupDefinition`: The
faked metric group definitions, in the order they had been added.
"""
hmc = self.manager.parent
group_names = self._properties.get('metric-groups', None)
if not group_names:
group_names = hmc.metric_groups.keys()
mg_defs = []
for group_name in group_names:
if group_name in hmc.metric_groups:
# Use only metric groups that have a definition
mg_def = hmc.metric_groups[group_name]
mg_defs.append(mg_def)
return mg_defs
def get_metric_group_infos(self):
"""
Get the faked metric group definitions for this context object
that are to be returned from its create operation, in the format
needed for the "Create Metrics Context" operation response.
Returns:
"metric-group-infos" JSON object as described for the "Create Metrics
Context "operation response.
"""
mg_defs = self.get_metric_group_definitions()
mg_infos = []
for mg_def in mg_defs:
metric_infos = []
for metric_name, metric_type in mg_def.types:
metric_infos.append({
'metric-name': metric_name,
'metric-type': metric_type,
})
mg_info = {
'group-name': mg_def.name,
'metric-infos': metric_infos,
}
mg_infos.append(mg_info)
return mg_infos
def get_metric_values(self):
"""
Get the faked metrics, for all metric groups and all resources that
have been prepared on the manager object of this context object.
Returns:
iterable of tuple (group_name, iterable of values): The faked
metrics, in the order they had been added, where:
group_name (string): Metric group name.
values (:class:`~zhmcclient_mock.FakedMetricObjectValues`):
The metric values for one resource at one point in time.
"""
hmc = self.manager.parent
group_names = self._properties.get('metric-groups', None)
if not group_names:
group_names = hmc.metric_values.keys()
ret = []
for group_name in group_names:
if group_name in hmc.metric_values:
# Use only metric groups that have metric values
mo_val = hmc.metric_values[group_name]
ret_item = (group_name, mo_val)
ret.append(ret_item)
return ret
def get_metric_values_response(self):
"""
Get the faked metrics, for all metric groups and all resources that
have been prepared on the manager object of this context object, as a
string in the format needed for the "Get Metrics" operation response.
Returns:
"MetricsResponse" string as described for the "Get Metrics"
operation response.
"""
mv_list = self.get_metric_values()
resp_lines = []
for mv in mv_list:
group_name = mv[0]
resp_lines.append('"{}"'.format(group_name))
mo_vals = mv[1]
for mo_val in mo_vals:
resp_lines.append('"{}"'.format(mo_val.resource_uri))
resp_lines.append(
str(timestamp_from_datetime(mo_val.timestamp)))
v_list = []
for _, v in mo_val.values:
if isinstance(v, six.string_types):
v_str = '"{}"'.format(v)
else:
v_str = str(v)
v_list.append(v_str)
v_line = ','.join(v_list)
resp_lines.append(v_line)
resp_lines.append('')
resp_lines.append('')
resp_lines.append('')
return '\n'.join(resp_lines) + '\n'
class FakedMetricGroupDefinition(object):
# pylint: disable=too-few-public-methods
"""
A faked metric group definition (of one metric group).
An object of this class contains the information (in a differently
structured way) of a "metric-group-info" object described for the
"Create Metrics Context" operation in the :term:`HMC API` book.
The following table lists for each type mentioned in the metric group
descriptions in chapter "Metric groups" in the :term:`HMC API` book,
the Python types that are used for representing metric values of that type,
and the metric type strings used in the metric group definitions for
that type:
============================= ====================== ==================
Metric group description type Python type Metric type string
============================= ====================== ==================
Boolean :class:`py:bool` ``boolean-metric``
Byte :term:`integer` ``byte-metric``
Short :term:`integer` ``short-metric``
Integer :term:`integer` ``integer-metric``
Long :term:`integer` ``long-metric``
Double :class:`py:float` ``double-metric``
String, String Enum :term:`unicode string` ``string-metric``
============================= ====================== ==================
"""
def __init__(self, name, types):
"""
Parameters:
name (:term:`string`): Name of the metric group.
types (list of tuple(name, type)): Definition of the metric names
and their types, as follows:
* name (string): The metric name.
* type (string): The metric type string (see table above).
"""
self.name = name
self.types = copy.deepcopy(types)
def __repr__(self):
"""
Return a string with the state of this object, for debug purposes.
"""
ret = (
"{classname} at 0x{id:08x} (\n"
" name = {s.name!r}\n"
" types = {s.types!r}\n"
")".format(classname=self.__class__.__name__, id=id(self), s=self))
return ret
class FakedMetricObjectValues(object):
# pylint: disable=too-few-public-methods
"""
Faked metric values for one resource and one metric group.
An object of this class contains the information (in a structured way)
of an "ObjectValues" item described for the data format of the response
body of the "Get Metrics" operation in the :term:`HMC API` book.
"""
def __init__(self, group_name, resource_uri, timestamp, values):
"""
Parameters:
group_name (:term:`string`): Name of the metric group to which
these metric values apply.
resource_uri (:term:`string`): URI of the resource to which these
metric values apply.
timestamp (datetime): Point in time to which these metric values
apply. Timezone-naive values are converted to timezone-aware values
using the local timezone as determined by
:class:`dateutil:dateutil.tz.tzlocal`.
values (list of tuple(name, value)): The metric values, as follows:
* name (string): The metric name.
* value: The metric value as an object of the Python type listed
in the table in the description of
:class:`~zhmcclient_mock.FakedMetricGroupDefinition`).
"""
if timestamp.tzinfo is None:
timestamp = timestamp.replace(tzinfo=tz.tzlocal()) # new object
self.group_name = group_name
self.resource_uri = resource_uri
self.timestamp = timestamp
self.values = copy.deepcopy(values)
def __repr__(self):
"""
Return a string with the state of this object, for debug purposes.
"""
ret = (
"{classname} at 0x{id:08x} (\n"
" group_name = {s.group_name!r}\n"
" resource_uri = {s.resource_uri!r}\n"
" timestamp = {s.timestamp!r}\n"
" values = {s.values!r}\n"
")".format(classname=self.__class__.__name__, id=id(self), s=self))
return ret | zhmcclient | /zhmcclient-1.3.2.tar.gz/zhmcclient-1.3.2/zhmcclient_mock/_hmc.py | _hmc.py |
# pylint: disable=too-few-public-methods
"""
A module with various handler classes for the HTTP methods against HMC URIs,
based on the faked HMC.
Most handler classes do not need to be documented, but some of them have
methods that can be mocked in order to provoke non-standard behavior in
the handling of the HTTP methods.
"""
from __future__ import absolute_import
import re
import time
import copy
from requests.utils import unquote
from ._hmc import InputError
__all__ = ['UriHandler', 'LparActivateHandler', 'LparDeactivateHandler',
'LparLoadHandler', 'HTTPError', 'URIS']
# CPC status values
CPC_ACTIVE_STATUSES = (
"active",
"operating",
"degraded",
"acceptable",
"exceptions",
"service-required",
"service",
)
CPC_INACTIVE_STATUSES = (
"not-operating",
"no-power",
)
CPC_BAD_STATUSES = (
"not-communicating",
"status-check",
)
class HTTPError(Exception):
"""
Exception that will be turned into an HTTP error response message.
"""
def __init__(self, method, uri, http_status, reason, message):
super(HTTPError, self).__init__()
self.method = method
self.uri = uri
self.http_status = http_status
self.reason = reason
self.message = message
def response(self):
"""
Return the JSON object for the HTTP error response message.
"""
return {
'request-method': self.method,
'request-uri': self.uri,
'http-status': self.http_status,
'reason': self.reason,
'message': self.message,
}
class ConnectionError(Exception):
# pylint: disable=redefined-builtin
"""
Indicates a connection error to the faked HMC.
This mimics the requests.exception.ConnectionError.
"""
def __init__(self, message):
super(ConnectionError, self).__init__()
self.message = message
class InvalidResourceError(HTTPError):
"""
HTTP error indicating an invalid resource.
"""
def __init__(self, method, uri, handler_class=None, reason=1,
resource_uri=None):
if handler_class is not None:
handler_txt = " (handler class %s)" % handler_class.__name__
else:
handler_txt = ""
if not resource_uri:
resource_uri = uri
super(InvalidResourceError, self).__init__(
method, uri,
http_status=404,
reason=reason,
message="Unknown resource with URI: %s%s" %
(resource_uri, handler_txt))
class InvalidMethodError(HTTPError):
"""
HTTP error indicating an invalid HTTP method.
"""
def __init__(self, method, uri, handler_class=None):
if handler_class is not None:
handler_txt = "handler class %s" % handler_class.__name__
else:
handler_txt = "no handler class"
super(InvalidMethodError, self).__init__(
method, uri,
http_status=404,
reason=1,
message="Invalid HTTP method %s on URI: %s %s" %
(method, uri, handler_txt))
class BadRequestError(HTTPError):
"""
HTTP error indicating an invalid client request (status 400).
"""
def __init__(self, method, uri, reason, message):
super(BadRequestError, self).__init__(
method, uri,
http_status=400,
reason=reason,
message=message)
class ConflictError(HTTPError):
"""
HTTP error indicating a conflict in the client request (status 409).
"""
def __init__(self, method, uri, reason, message):
super(ConflictError, self).__init__(
method, uri,
http_status=409,
reason=reason,
message=message)
class CpcNotInDpmError(ConflictError):
"""
Indicates that the operation requires DPM mode but the CPC is not in DPM
mode.
Out of the set of operations that only work in DPM mode, this error is used
only for the following subset:
- Create Partition
- Create Hipersocket
- Start CPC
- Stop CPC
- Set Auto-Start List
"""
def __init__(self, method, uri, cpc):
super(CpcNotInDpmError, self).__init__(
method, uri, reason=5,
message="CPC is not in DPM mode: %s" % cpc.uri)
class CpcInDpmError(ConflictError):
"""
Indicates that the operation requires to be not in DPM mode, but the CPC is
in DPM mode.
Out of the set of operations that do not work in DPM mode, this error is
used only for the following subset:
- Activate CPC (not yet implemented in zhmcclient)
- Deactivate CPC (not yet implemented in zhmcclient)
- Import Profiles (not yet implemented in this URI handler)
- Export Profiles (not yet implemented in this URI handler)
"""
def __init__(self, method, uri, cpc):
super(CpcInDpmError, self).__init__(
method, uri, reason=4,
message="CPC is in DPM mode: %s" % cpc.uri)
class ServerError(HTTPError):
"""
HTTP error indicating a server error (status 500).
"""
def __init__(self, method, uri, reason, message):
super(ServerError, self).__init__(
method, uri,
http_status=500,
reason=reason,
message=message)
def parse_query_parms(method, uri, query_str):
"""
Parse the specified query parms string and return a dictionary of query
parameters. The key of each dict item is the query parameter name, and the
value of each dict item is the query parameter value. If a query parameter
shows up more than once, the resulting dict item value is a list of all
those values.
query_str is the query string from the URL, everything after the '?'. If
it is empty or None, None is returned.
If a query parameter is not of the format "name=value", an HTTPError 400,1
is raised.
"""
if not query_str:
return None
query_parms = {}
for query_item in query_str.split('&'):
# Example for these items: 'name=a%20b'
if query_item == '':
continue
items = query_item.split('=')
if len(items) != 2:
raise BadRequestError(
method, uri, reason=1,
message="Invalid format for URI query parameter: {!r} "
"(valid format is: 'name=value').".
format(query_item))
name = unquote(items[0])
value = unquote(items[1])
if name in query_parms:
existing_value = query_parms[name]
if not isinstance(existing_value, list):
query_parms[name] = []
query_parms[name].append(existing_value)
query_parms[name].append(value)
else:
query_parms[name] = value
return query_parms
def check_required_fields(method, uri, body, field_names):
"""
Check required fields in the request body.
Raises:
BadRequestError with reason 3: Missing request body
BadRequestError with reason 5: Missing required field in request body
"""
# Check presence of request body
if body is None:
raise BadRequestError(method, uri, reason=3,
message="Missing request body")
# Check required input fields
for field_name in field_names:
if field_name not in body:
raise BadRequestError(method, uri, reason=5,
message="Missing required field in request "
"body: {}".format(field_name))
def check_valid_cpc_status(method, uri, cpc):
"""
Check that the CPC is in a valid status, as indicated by its 'status'
property.
If the Cpc object does not have a 'status' property set, this function does
nothing (in order to make the mock support easy to use).
Raises:
ConflictError with reason 1: The CPC itself has been targeted by the
operation.
ConflictError with reason 6: The CPC is hosting the resource targeted by
the operation.
"""
status = cpc.properties.get('status', None)
if status is None:
# Do nothing if no status is set on the faked CPC
return
valid_statuses = ['active', 'service-required', 'degraded', 'exceptions']
if status not in valid_statuses:
if uri.startswith(cpc.uri):
# The uri targets the CPC (either is the CPC uri or some
# multiplicity under the CPC uri)
raise ConflictError(method, uri, reason=1,
message="The operation cannot be performed "
"because the targeted CPC {} has a status "
"that is not valid for the operation: {}".
format(cpc.name, status))
# The uri targets a resource hosted by the CPC
raise ConflictError(method, uri, reason=6,
message="The operation cannot be performed "
"because CPC {} hosting the targeted resource "
"has a status that is not valid for the "
"operation: {}".
format(cpc.name, status))
def check_partition_status(method, uri, partition, valid_statuses=None,
invalid_statuses=None):
"""
Check that the partition is in one of the valid statuses (if specified)
and not in one of the invalid statuses (if specified), as indicated by its
'status' property.
If the Partition object does not have a 'status' property set, this
function does nothing (in order to make the mock support easy to use).
Raises:
ConflictError with reason 1 (reason 6 is not used for partitions).
"""
status = partition.properties.get('status', None)
if status is None:
# Do nothing if no status is set on the faked partition
return
if valid_statuses and status not in valid_statuses or \
invalid_statuses and status in invalid_statuses:
if uri.startswith(partition.uri):
# The uri targets the partition (either is the partition uri or
# some multiplicity under the partition uri)
raise ConflictError(method, uri, reason=1,
message="The operation cannot be performed "
"because the targeted partition {} has a "
"status that is not valid for the operation: "
"{}".
format(partition.name, status))
# The uri targets a resource hosted by the partition
raise ConflictError(method, uri,
reason=1, # Note: 6 not used for partitions
message="The operation cannot be performed "
"because partition {} hosting the targeted "
"resource has a status that is not valid for "
"the operation: {}".
format(partition.name, status))
def check_writable(method, uri, body, writeable):
"""
Check that the body specifies only writeable properties.
Raises:
BadRequestError with reason 6.
"""
for prop in body:
if prop not in writeable:
raise BadRequestError(
method, uri, reason=6,
message="Property is not writable: {!r}".format(prop))
class UriHandler(object):
"""
Handle HTTP methods against a set of known URIs and invoke respective
handlers.
"""
def __init__(self, uris):
self._uri_handlers = [] # tuple of (regexp-pattern, handler-name)
for uri, handler_class in uris:
uri_pattern = re.compile('^' + uri + '$')
tup = (uri_pattern, handler_class)
self._uri_handlers.append(tup)
def handler(self, uri, method):
"""
Return the handler function for an URI and HTTP method.
"""
for uri_pattern, handler_class in self._uri_handlers:
m = uri_pattern.match(uri)
if m:
uri_parms = m.groups()
return handler_class, uri_parms
new_exc = InvalidResourceError(method, uri)
new_exc.__cause__ = None
raise new_exc # zhmcclient_mock.InvalidResourceError
def get(self, hmc, uri, logon_required):
"""
Process a HTTP GET method on a URI.
"""
if not hmc.enabled:
raise ConnectionError("HMC is not enabled.")
handler_class, uri_parms = self.handler(uri, 'GET')
if not getattr(handler_class, 'get', None):
raise InvalidMethodError('GET', uri, handler_class)
return handler_class.get('GET', hmc, uri, uri_parms, logon_required)
def post(self, hmc, uri, body, logon_required, wait_for_completion):
"""
Process a HTTP POST method on a URI.
"""
if not hmc.enabled:
raise ConnectionError("HMC is not enabled.")
handler_class, uri_parms = self.handler(uri, 'POST')
if not getattr(handler_class, 'post', None):
raise InvalidMethodError('POST', uri, handler_class)
return handler_class.post('POST', hmc, uri, uri_parms, body,
logon_required, wait_for_completion)
def delete(self, hmc, uri, logon_required):
"""
Process a HTTP DELETE method on a URI.
"""
if not hmc.enabled:
raise ConnectionError("HMC is not enabled.")
handler_class, uri_parms = self.handler(uri, 'DELETE')
if not getattr(handler_class, 'delete', None):
raise InvalidMethodError('DELETE', uri, handler_class)
handler_class.delete('DELETE', hmc, uri, uri_parms, logon_required)
class GenericGetPropertiesHandler(object):
"""
Handler class for generic get of resource properties.
"""
@staticmethod
def get(method, hmc, uri, uri_parms, logon_required):
# pylint: disable=unused-argument
"""Operation: Get <resource> Properties."""
try:
resource = hmc.lookup_by_uri(uri)
except KeyError:
new_exc = InvalidResourceError(method, uri)
new_exc.__cause__ = None
raise new_exc # zhmcclient_mock.InvalidResourceError
return resource.properties
class GenericUpdatePropertiesHandler(object):
"""
Handler class for generic update of resource properties.
"""
@staticmethod
def post(method, hmc, uri, uri_parms, body, logon_required,
wait_for_completion):
# pylint: disable=unused-argument
"""Operation: Update <resource> Properties."""
assert wait_for_completion is True # async not supported yet
try:
resource = hmc.lookup_by_uri(uri)
except KeyError:
new_exc = InvalidResourceError(method, uri)
new_exc.__cause__ = None
raise new_exc # zhmcclient_mock.InvalidResourceError
resource.update(body)
class GenericDeleteHandler(object):
"""
Handler class for generic delete of a resource.
"""
@staticmethod
def delete(method, hmc, uri, uri_parms, logon_required):
# pylint: disable=unused-argument
"""Operation: Delete <resource>."""
try:
resource = hmc.lookup_by_uri(uri)
except KeyError:
new_exc = InvalidResourceError(method, uri)
new_exc.__cause__ = None
raise new_exc # zhmcclient_mock.InvalidResourceError
resource.manager.remove(resource.oid)
class VersionHandler(object):
"""
Handler class for operation: Get version.
"""
@staticmethod
def get(method, hmc, uri, uri_parms, logon_required):
# pylint: disable=unused-argument
"""Operation: Get version."""
api_major, api_minor = hmc.api_version.split('.')
return {
'hmc-name': hmc.hmc_name,
'hmc-version': hmc.hmc_version,
'api-major-version': int(api_major),
'api-minor-version': int(api_minor),
}
class ConsoleHandler(GenericGetPropertiesHandler):
"""
Handler class for HTTP methods on Console resource.
"""
pass
class ConsoleRestartHandler(object):
"""
Handler class for Console operation: Restart Console.
"""
@staticmethod
def post(method, hmc, uri, uri_parms, body, logon_required,
wait_for_completion):
# pylint: disable=unused-argument
"""Operation: Restart Console."""
assert wait_for_completion is True # synchronous operation
console_uri = '/api/console'
try:
hmc.lookup_by_uri(console_uri)
except KeyError:
new_exc = InvalidResourceError(method, uri)
new_exc.__cause__ = None
raise new_exc # zhmcclient_mock.InvalidResourceError
hmc.disable()
time.sleep(5)
hmc.enable()
# Note: The HTTP status 202 that the real HMC operation returns, is
# not visible for the caller of FakedSession (or Session).
class ConsoleShutdownHandler(object):
"""
Handler class for Console operation: Shutdown Console.
"""
@staticmethod
def post(method, hmc, uri, uri_parms, body, logon_required,
wait_for_completion):
# pylint: disable=unused-argument
"""Operation: Shutdown Console."""
assert wait_for_completion is True # synchronous operation
console_uri = '/api/console'
try:
hmc.lookup_by_uri(console_uri)
except KeyError:
new_exc = InvalidResourceError(method, uri)
new_exc.__cause__ = None
raise new_exc # zhmcclient_mock.InvalidResourceError
hmc.disable()
# Note: The HTTP status 202 that the real HMC operation returns, is
# not visible for the caller of FakedSession (or Session).
class ConsoleMakePrimaryHandler(object):
"""
Handler class for Console operation: Make Console Primary.
"""
@staticmethod
def post(method, hmc, uri, uri_parms, body, logon_required,
wait_for_completion):
# pylint: disable=unused-argument
"""Operation: Make Console Primary."""
assert wait_for_completion is True # synchronous operation
console_uri = '/api/console'
try:
hmc.lookup_by_uri(console_uri)
except KeyError:
new_exc = InvalidResourceError(method, uri)
new_exc.__cause__ = None
raise new_exc # zhmcclient_mock.InvalidResourceError
# Nothing to do, as long as the faked HMC does not need to know whether
# it is primary or alternate.
class ConsoleReorderUserPatternsHandler(object):
"""
Handler class for Console operation: Reorder User Patterns.
"""
@staticmethod
def post(method, hmc, uri, uri_parms, body, logon_required,
wait_for_completion):
# pylint: disable=unused-argument
"""Operation: Reorder User Patterns."""
assert wait_for_completion is True # synchronous operation
console_uri = '/api/console'
try:
console = hmc.lookup_by_uri(console_uri)
except KeyError:
new_exc = InvalidResourceError(method, uri)
new_exc.__cause__ = None
raise new_exc # zhmcclient_mock.InvalidResourceError
check_required_fields(method, uri, body, ['user-pattern-uris'])
new_order_uris = body['user-pattern-uris']
objs = console.user_patterns.list()
obj_by_uri = {}
for obj in objs:
obj_by_uri[obj.uri] = obj
# Perform the reordering in the faked HMC:
for _uri in new_order_uris:
obj = obj_by_uri[_uri]
console.user_patterns.remove(obj.oid) # remove from old position
console.user_patterns.add(obj.properties) # append to end
class ConsoleGetAuditLogHandler(object):
"""
Handler class for Console operation: Get Console Audit Log.
"""
@staticmethod
def get(method, hmc, uri, uri_parms, logon_required):
# pylint: disable=unused-argument
"""Operation: Get Console Audit Log."""
console_uri = '/api/console'
try:
hmc.lookup_by_uri(console_uri)
except KeyError:
new_exc = InvalidResourceError(method, uri)
new_exc.__cause__ = None
raise new_exc # zhmcclient_mock.InvalidResourceError
resp = []
# TODO: Add the ability to return audit log entries in mock support.
return resp
class ConsoleGetSecurityLogHandler(object):
"""
Handler class for Console operation: Get Console Security Log.
"""
@staticmethod
def get(method, hmc, uri, uri_parms, logon_required):
# pylint: disable=unused-argument
"""Operation: Get Console Security Log."""
console_uri = '/api/console'
try:
hmc.lookup_by_uri(console_uri)
except KeyError:
new_exc = InvalidResourceError(method, uri)
new_exc.__cause__ = None
raise new_exc # zhmcclient_mock.InvalidResourceError
resp = []
# TODO: Add the ability to return security log entries in mock support.
return resp
class ConsoleListUnmanagedCpcsHandler(object):
"""
Handler class for Console operation: List Unmanaged CPCs.
"""
@staticmethod
def get(method, hmc, uri, uri_parms, logon_required):
# pylint: disable=unused-argument
"""Operation: List Unmanaged CPCs."""
query_str = uri_parms[0]
try:
console = hmc.consoles.lookup_by_oid(None)
except KeyError:
new_exc = InvalidResourceError(method, uri)
new_exc.__cause__ = None
raise new_exc # zhmcclient_mock.InvalidResourceError
result_ucpcs = []
filter_args = parse_query_parms(method, uri, query_str)
for ucpc in console.unmanaged_cpcs.list(filter_args):
result_ucpc = {}
for prop in ucpc.properties:
if prop in ('object-uri', 'name'):
result_ucpc[prop] = ucpc.properties[prop]
result_ucpcs.append(result_ucpc)
return {'cpcs': result_ucpcs}
class ConsoleListPermittedPartitionsHandler(object):
"""
Handler class for Console operation: List Permitted Partitions (DPM).
"""
@staticmethod
def get(method, hmc, uri, uri_parms, logon_required):
# pylint: disable=unused-argument
"""Operation: List Permitted Partitions."""
query_str = uri_parms[0]
filter_args = parse_query_parms(method, uri, query_str)
result_partitions = []
for cpc in hmc.cpcs.list():
# Reflect the result of listing the partition
if cpc.dpm_enabled:
# Apply the CPC name filter, if specified
if filter_args and 'cpc-name' in filter_args:
if not re.match(filter_args['cpc-name'], cpc.name):
continue
del filter_args['cpc-name']
for partition in cpc.partitions.list(filter_args):
result_partition = {}
result_partition['object-uri'] = \
partition.properties.get('object-uri', None)
result_partition['name'] = \
partition.properties.get('name', None)
result_partition['type'] = \
partition.properties.get('type', None)
result_partition['status'] = \
partition.properties.get('status', None)
result_partition['has-unacceptable-status'] = \
partition.properties.get(
'has-unacceptable-status', None)
result_partition['cpc-name'] = cpc.name
result_partition['cpc-object-uri'] = cpc.uri
result_partition['se-version'] = \
cpc.properties.get('se-version', None)
result_partitions.append(result_partition)
return {'partitions': result_partitions}
class ConsoleListPermittedLparsHandler(object):
"""
Handler class for Console operation: List Permitted LPARs (classic).
"""
@staticmethod
def get(method, hmc, uri, uri_parms, logon_required):
# pylint: disable=unused-argument
"""Operation: List Permitted LPARs."""
query_str = uri_parms[0]
filter_args = parse_query_parms(method, uri, query_str)
result_lpars = []
for cpc in hmc.cpcs.list():
# Reflect the result of listing the partition
if not cpc.dpm_enabled:
# Apply the CPC name filter, if specified
if filter_args and 'cpc-name' in filter_args:
if not re.match(filter_args['cpc-name'], cpc.name):
continue
del filter_args['cpc-name']
for lpar in cpc.lpars.list(filter_args):
result_lpar = {}
result_lpar['object-uri'] = \
lpar.properties.get('object-uri', None)
result_lpar['name'] = \
lpar.properties.get('name', None)
result_lpar['activation-mode'] = \
lpar.properties.get('activation-mode', None)
result_lpar['status'] = \
lpar.properties.get('status', None)
result_lpar['has-unacceptable-status'] = \
lpar.properties.get(
'has-unacceptable-status', None)
result_lpar['cpc-name'] = cpc.name
result_lpar['cpc-object-uri'] = cpc.uri
result_lpar['se-version'] = \
cpc.properties.get('se-version', None)
result_lpars.append(result_lpar)
return {'logical-partitions': result_lpars}
class UsersHandler(object):
"""
Handler class for HTTP methods on set of User resources.
"""
@staticmethod
def get(method, hmc, uri, uri_parms, logon_required):
# pylint: disable=unused-argument
"""Operation: List Users."""
query_str = uri_parms[0]
try:
console = hmc.consoles.lookup_by_oid(None)
except KeyError:
new_exc = InvalidResourceError(method, uri)
new_exc.__cause__ = None
raise new_exc # zhmcclient_mock.InvalidResourceError
result_users = []
filter_args = parse_query_parms(method, uri, query_str)
for user in console.users.list(filter_args):
result_user = {}
for prop in user.properties:
if prop in ('object-uri', 'name', 'type'):
result_user[prop] = user.properties[prop]
result_users.append(result_user)
return {'users': result_users}
@staticmethod
def post(method, hmc, uri, uri_parms, body, logon_required,
wait_for_completion):
# pylint: disable=unused-argument
"""Operation: Create User."""
assert wait_for_completion is True # synchronous operation
try:
console = hmc.consoles.lookup_by_oid(None)
except KeyError:
new_exc = InvalidResourceError(method, uri)
new_exc.__cause__ = None
raise new_exc # zhmcclient_mock.InvalidResourceError
check_required_fields(method, uri, body,
['name', 'type', 'authentication-type'])
properties = copy.deepcopy(body)
user_name = properties['name']
properties.setdefault('allow-management-interfaces', True)
properties.setdefault('allow-remote-access', True)
properties.setdefault('default-group-uri', None)
properties.setdefault('description', '')
properties.setdefault('disable-delay', 1)
properties.setdefault('disabled', False)
properties.setdefault('disruptive-pw-required', True)
properties.setdefault('disruptive-text-required', False)
properties.setdefault('email-address', None)
properties.setdefault('force-password-change', False)
properties.setdefault('force-shared-secret-key-change', None)
properties.setdefault('idle-timeout', 0)
properties.setdefault('inactivity-timeout', 0)
properties.setdefault('is-locked', False)
properties.setdefault('max-failed-logins', 3)
properties.setdefault('max-web-services-api-sessions', 1000)
properties.setdefault('min-pw-change-time', 0)
properties.setdefault('multi-factor-authentication-required', False)
properties.setdefault('password-expires', -1)
properties.setdefault('replication-overwrite-possible', False)
properties.setdefault('session-timeout', 0)
properties.setdefault('user-roles', [])
properties.setdefault('userid-on-ldap-server', None)
properties.setdefault('verify-timeout', 15)
properties.setdefault('web-services-api-session-idle-timeout', 360)
auth_type = properties['authentication-type']
if auth_type == 'local':
check_required_fields(method, uri, body,
['password', 'password-rule-uri'])
elif auth_type == 'ldap':
check_required_fields(method, uri, body,
['ldap-server-definition-uri'])
else:
raise BadRequestError(
method, uri, reason=4,
message="Invalid authentication-type: {!r}".format(auth_type))
user_type = properties['type']
if user_type == 'standard':
pass
elif user_type == 'template':
pass
elif user_type == 'pattern-based':
pass
elif user_type == 'system-defined':
raise BadRequestError(
method, uri, reason=4,
message="System-defined users cannot be created: {!r}".
format(user_name))
else:
raise BadRequestError(
method, uri, reason=4,
message="Invalid user type: {!r}".format(user_type))
new_user = console.users.add(properties)
return {'object-uri': new_user.uri}
class UserHandler(GenericGetPropertiesHandler):
"""
Handler class for HTTP methods on single User resource.
"""
@staticmethod
def post(method, hmc, uri, uri_parms, body, logon_required,
wait_for_completion):
# pylint: disable=unused-argument
"""Operation: Update StoragePort Properties."""
try:
user = hmc.lookup_by_uri(uri)
except KeyError:
new_exc = InvalidResourceError(method, uri)
new_exc.__cause__ = None
raise new_exc # zhmcclient_mock.InvalidResourceError
# Check whether requested properties are modifiable
check_writable(
method, uri, body,
[
'description',
'disabled',
'authentication-type',
'password-rule-uri',
'password',
'force-password-change',
'ldap-server-definition-uri',
'userid-on-ldap-server',
'session-timeout',
'verify-timeout',
'timeout',
'idle-timeout',
'min-pw-change-time',
'max-failed-logins',
'disable-delay',
'inactivity-timeout',
'disruptive-pw-required',
'disruptive-text-required',
'allow-remote-access',
'allow-management-interfaces',
'max-web-services-api-sessions',
'web-services-api-session-idle-timeout',
'default-group-uri',
'multi-factor-authentication-required',
'force-shared-secret-key-change',
'email-address',
'mfa-types',
'primary-mfa-server-definition-uri',
'backup-mfa-server-definition-uri',
'mfa-policy',
'mfa-userid',
'mfa-userid-override',
])
user.update(body)
@staticmethod
def delete(method, hmc, uri, uri_parms, logon_required):
# pylint: disable=unused-argument
"""Operation: Delete User."""
try:
user = hmc.lookup_by_uri(uri)
except KeyError:
new_exc = InvalidResourceError(method, uri)
new_exc.__cause__ = None
raise new_exc # zhmcclient_mock.InvalidResourceError
# Check user type
type_ = user.properties['type']
if type_ == 'pattern-based':
raise BadRequestError(
method, uri, reason=312,
message="Cannot delete pattern-based user {!r}".
format(user.name))
# Delete the mocked resource
user.manager.remove(user.oid)
class UserAddUserRoleHandler(object):
"""
Handler class for operation: Add User Role to User.
"""
@staticmethod
def post(method, hmc, uri, uri_parms, body, logon_required,
wait_for_completion):
# pylint: disable=unused-argument
"""Operation: Add User Role to User."""
assert wait_for_completion is True # synchronous operation
user_oid = uri_parms[0]
user_uri = '/api/users/' + user_oid
try:
user = hmc.lookup_by_uri(user_uri)
except KeyError:
new_exc = InvalidResourceError(method, uri)
new_exc.__cause__ = None
raise new_exc # zhmcclient_mock.InvalidResourceError
check_required_fields(method, uri, body, ['user-role-uri'])
user_type = user.properties['type']
if user_type in ('pattern-based', 'system-defined'):
raise BadRequestError(
method, uri, reason=314,
message="Cannot add user role to user of type {}: {}".
format(user_type, user_uri))
user_role_uri = body['user-role-uri']
try:
hmc.lookup_by_uri(user_role_uri)
except KeyError:
new_exc = InvalidResourceError(method, user_role_uri, reason=2)
new_exc.__cause__ = None
raise new_exc # zhmcclient_mock.InvalidResourceError
if user.properties.get('user-roles', None) is None:
user.properties['user-roles'] = []
user.properties['user-roles'].append(user_role_uri)
class UserRemoveUserRoleHandler(object):
"""
Handler class for operation: Remove User Role from User.
"""
@staticmethod
def post(method, hmc, uri, uri_parms, body, logon_required,
wait_for_completion):
# pylint: disable=unused-argument
"""Operation: Remove User Role from User."""
assert wait_for_completion is True # synchronous operation
user_oid = uri_parms[0]
user_uri = '/api/users/' + user_oid
try:
user = hmc.lookup_by_uri(user_uri)
except KeyError:
new_exc = InvalidResourceError(method, uri)
new_exc.__cause__ = None
raise new_exc # zhmcclient_mock.InvalidResourceError
check_required_fields(method, uri, body, ['user-role-uri'])
user_type = user.properties['type']
if user_type in ('pattern-based', 'system-defined'):
raise BadRequestError(
method, uri, reason=314,
message="Cannot remove user role from user of type {}: {}".
format(user_type, user_uri))
user_role_uri = body['user-role-uri']
try:
user_role = hmc.lookup_by_uri(user_role_uri)
except KeyError:
new_exc = InvalidResourceError(method, user_role_uri, reason=2)
new_exc.__cause__ = None
raise new_exc # zhmcclient_mock.InvalidResourceError
if user.properties.get('user-roles', None) is None \
or user_role_uri not in user.properties['user-roles']:
raise ConflictError(
method, uri, reason=316,
message="User {!r} does not have User Role {!r}".
format(user.name, user_role.name))
user.properties['user-roles'].remove(user_role_uri)
class UserRolesHandler(object):
"""
Handler class for HTTP methods on set of UserRole resources.
"""
@staticmethod
def get(method, hmc, uri, uri_parms, logon_required):
# pylint: disable=unused-argument
"""Operation: List User Roles."""
query_str = uri_parms[0]
try:
console = hmc.consoles.lookup_by_oid(None)
except KeyError:
new_exc = InvalidResourceError(method, uri)
new_exc.__cause__ = None
raise new_exc # zhmcclient_mock.InvalidResourceError
result_user_roles = []
filter_args = parse_query_parms(method, uri, query_str)
for user_role in console.user_roles.list(filter_args):
result_user_role = {}
for prop in user_role.properties:
if prop in ('object-uri', 'name', 'type'):
result_user_role[prop] = user_role.properties[prop]
result_user_roles.append(result_user_role)
return {'user-roles': result_user_roles}
@staticmethod
def post(method, hmc, uri, uri_parms, body, logon_required,
wait_for_completion):
# pylint: disable=unused-argument
"""Operation: Create User Role."""
assert wait_for_completion is True # synchronous operation
try:
console = hmc.consoles.lookup_by_oid(None)
except KeyError:
new_exc = InvalidResourceError(method, uri)
new_exc.__cause__ = None
raise new_exc # zhmcclient_mock.InvalidResourceError
check_required_fields(method, uri, body, ['name'])
if 'type' in body:
raise BadRequestError(
method, uri, reason=6,
message="The 'type' property cannot be specified when "
"creating a user role (type: {!r}, uri: {!r})".
format(body['type'], uri))
properties = copy.deepcopy(body)
# createable/updateable
properties.setdefault('description', '')
if 'associated-system-defined-user-role-uri' not in properties:
# Use the default
uroles = console.user_roles.list(
filter_args=dict(name='hmc-operator-tasks'))
if not uroles:
new_exc = ServerError(
method, uri, reason=99,
message="Mock setup error: System-defined user role "
"'hmc-operator-tasks' does not exist")
new_exc.__cause__ = None
raise new_exc # zhmcclient_mock.ServerError
urole_uri = uroles[0].uri
properties['associated-system-defined-user-role-uri'] = urole_uri
properties.setdefault('is-inheritance-enabled', False)
# read-only
properties.setdefault('type', 'user-defined')
properties.setdefault('replication-overwrite-possible', True)
properties.setdefault('permissions', [])
new_user_role = console.user_roles.add(properties)
return {'object-uri': new_user_role.uri}
class UserRoleHandler(GenericGetPropertiesHandler,
GenericDeleteHandler):
"""
Handler class for HTTP methods on single UserRole resource.
"""
@staticmethod
def post(method, hmc, uri, uri_parms, body, logon_required,
wait_for_completion):
# pylint: disable=unused-argument
"""Operation: Update StoragePort Properties."""
try:
user_role = hmc.lookup_by_uri(uri)
except KeyError:
new_exc = InvalidResourceError(method, uri)
new_exc.__cause__ = None
raise new_exc # zhmcclient_mock.InvalidResourceError
# Check whether requested properties are modifiable
check_writable(
method, uri, body,
[
'description',
'associated-system-defined-user-role-uri',
'is-inheritance-enabled',
])
user_role.update(body)
# TODO: Add delete() for Delete UserRole that rejects system-defined type
class UserRoleAddPermissionHandler(object):
"""
Handler class for operation: Add Permission to User Role.
"""
@staticmethod
def post(method, hmc, uri, uri_parms, body, logon_required,
wait_for_completion):
# pylint: disable=unused-argument
"""Operation: Add Permission to User Role."""
assert wait_for_completion is True # synchronous operation
user_role_oid = uri_parms[0]
user_role_uri = '/api/user-roles/' + user_role_oid
try:
user_role = hmc.lookup_by_uri(user_role_uri)
except KeyError:
new_exc = InvalidResourceError(method, uri)
new_exc.__cause__ = None
raise new_exc # zhmcclient_mock.InvalidResourceError
check_required_fields(method, uri, body,
['permitted-object', 'permitted-object-type'])
# Reject if User Role is system-defined:
if user_role.properties['type'] == 'system-defined':
raise BadRequestError(
method, uri, reason=314, message="Cannot add permission to "
"system-defined user role: {}".format(user_role_uri))
# Apply defaults, so our internally stored copy has all fields:
permission = copy.deepcopy(body)
if 'include-members' not in permission:
permission['include-members'] = False
if 'view-only-mode' not in permission:
permission['view-only-mode'] = True
# Add the permission to its store (the faked User Role object):
if user_role.properties.get('permissions', None) is None:
user_role.properties['permissions'] = []
user_role.properties['permissions'].append(permission)
class UserRoleRemovePermissionHandler(object):
"""
Handler class for operation: Remove Permission from User Role.
"""
@staticmethod
def post(method, hmc, uri, uri_parms, body, logon_required,
wait_for_completion):
# pylint: disable=unused-argument
"""Operation: Remove Permission from User Role."""
assert wait_for_completion is True # synchronous operation
user_role_oid = uri_parms[0]
user_role_uri = '/api/user-roles/' + user_role_oid
try:
user_role = hmc.lookup_by_uri(user_role_uri)
except KeyError:
new_exc = InvalidResourceError(method, uri)
new_exc.__cause__ = None
raise new_exc # zhmcclient_mock.InvalidResourceError
check_required_fields(method, uri, body,
['permitted-object', 'permitted-object-type'])
# Reject if User Role is system-defined:
if user_role.properties['type'] == 'system-defined':
raise BadRequestError(
method, uri, reason=314, message="Cannot remove permission "
"from system-defined user role: {}".format(user_role_uri))
# Apply defaults, so we can locate it based upon all fields:
permission = copy.deepcopy(body)
if 'include-members' not in permission:
permission['include-members'] = False
if 'view-only-mode' not in permission:
permission['view-only-mode'] = True
# Remove the permission from its store (the faked User Role object):
if user_role.properties.get('permissions', None) is not None:
user_role.properties['permissions'].remove(permission)
class TasksHandler(object):
"""
Handler class for HTTP methods on set of Task resources.
"""
@staticmethod
def get(method, hmc, uri, uri_parms, logon_required):
# pylint: disable=unused-argument
"""Operation: List Tasks."""
query_str = uri_parms[0]
try:
console = hmc.consoles.lookup_by_oid(None)
except KeyError:
new_exc = InvalidResourceError(method, uri)
new_exc.__cause__ = None
raise new_exc # zhmcclient_mock.InvalidResourceError
result_tasks = []
filter_args = parse_query_parms(method, uri, query_str)
for task in console.tasks.list(filter_args):
result_task = {}
for prop in task.properties:
if prop in ('element-uri', 'name'):
result_task[prop] = task.properties[prop]
result_tasks.append(result_task)
return {'tasks': result_tasks}
class TaskHandler(GenericGetPropertiesHandler):
"""
Handler class for HTTP methods on single Task resource.
"""
pass
class UserPatternsHandler(object):
"""
Handler class for HTTP methods on set of UserPattern resources.
"""
@staticmethod
def get(method, hmc, uri, uri_parms, logon_required):
# pylint: disable=unused-argument
"""Operation: List User Patterns."""
query_str = uri_parms[0]
try:
console = hmc.consoles.lookup_by_oid(None)
except KeyError:
new_exc = InvalidResourceError(method, uri)
new_exc.__cause__ = None
raise new_exc # zhmcclient_mock.InvalidResourceError
result_user_patterns = []
filter_args = parse_query_parms(method, uri, query_str)
for user_pattern in console.user_patterns.list(filter_args):
result_user_pattern = {}
for prop in user_pattern.properties:
if prop in ('element-uri', 'name', 'type'):
result_user_pattern[prop] = user_pattern.properties[prop]
result_user_patterns.append(result_user_pattern)
return {'user-patterns': result_user_patterns}
@staticmethod
def post(method, hmc, uri, uri_parms, body, logon_required,
wait_for_completion):
# pylint: disable=unused-argument
"""Operation: Create User Pattern."""
assert wait_for_completion is True # synchronous operation
try:
console = hmc.consoles.lookup_by_oid(None)
except KeyError:
new_exc = InvalidResourceError(method, uri)
new_exc.__cause__ = None
raise new_exc # zhmcclient_mock.InvalidResourceError
check_required_fields(method, uri, body,
['name', 'pattern', 'type', 'retention-time',
'user-template-uri'])
new_user_pattern = console.user_patterns.add(body)
return {'element-uri': new_user_pattern.uri}
class UserPatternHandler(GenericGetPropertiesHandler,
GenericUpdatePropertiesHandler,
GenericDeleteHandler):
"""
Handler class for HTTP methods on single UserPattern resource.
"""
pass
class PasswordRulesHandler(object):
"""
Handler class for HTTP methods on set of PasswordRule resources.
"""
@staticmethod
def get(method, hmc, uri, uri_parms, logon_required):
# pylint: disable=unused-argument
"""Operation: List Password Rules."""
query_str = uri_parms[0]
try:
console = hmc.consoles.lookup_by_oid(None)
except KeyError:
new_exc = InvalidResourceError(method, uri)
new_exc.__cause__ = None
raise new_exc # zhmcclient_mock.InvalidResourceError
result_password_rules = []
filter_args = parse_query_parms(method, uri, query_str)
for password_rule in console.password_rules.list(filter_args):
result_password_rule = {}
for prop in password_rule.properties:
if prop in ('element-uri', 'name', 'type'):
result_password_rule[prop] = password_rule.properties[prop]
result_password_rules.append(result_password_rule)
return {'password-rules': result_password_rules}
@staticmethod
def post(method, hmc, uri, uri_parms, body, logon_required,
wait_for_completion):
# pylint: disable=unused-argument
"""Operation: Create Password Rule."""
assert wait_for_completion is True # synchronous operation
try:
console = hmc.consoles.lookup_by_oid(None)
except KeyError:
new_exc = InvalidResourceError(method, uri)
new_exc.__cause__ = None
raise new_exc # zhmcclient_mock.InvalidResourceError
check_required_fields(method, uri, body, ['name'])
properties = copy.deepcopy(body)
# createable/updateable
properties.setdefault('description', '')
properties.setdefault('expiration', 0)
properties.setdefault('min-length', 8)
properties.setdefault('max-length', 256)
properties.setdefault('consecutive-characters', 0)
properties.setdefault('similarity-count', 0)
properties.setdefault('history-count', 0)
properties.setdefault('case-sensitive', False)
properties.setdefault('character-rules', [])
# read-only
properties.setdefault('type', 'user-defined')
properties.setdefault('replication-overwrite-possible', True)
new_password_rule = console.password_rules.add(properties)
return {'element-uri': new_password_rule.uri}
class PasswordRuleHandler(GenericGetPropertiesHandler,
GenericDeleteHandler):
"""
Handler class for HTTP methods on single PasswordRule resource.
"""
@staticmethod
def post(method, hmc, uri, uri_parms, body, logon_required,
wait_for_completion):
# pylint: disable=unused-argument
"""Operation: Update PasswordRule Properties."""
try:
pw_rule = hmc.lookup_by_uri(uri)
except KeyError:
new_exc = InvalidResourceError(method, uri)
new_exc.__cause__ = None
raise new_exc # zhmcclient_mock.InvalidResourceError
# Check whether requested properties are modifiable
check_writable(
method, uri, body,
[
'description',
'expiration',
'min-length',
'max-length',
'consecutive-characters',
'similarity-count',
'history-count',
'case-sensitive',
'character-rules',
])
pw_rule.update(body)
class LdapServerDefinitionsHandler(object):
"""
Handler class for HTTP methods on set of LdapServerDefinition resources.
"""
@staticmethod
def get(method, hmc, uri, uri_parms, logon_required):
# pylint: disable=unused-argument
"""Operation: List LDAP Server Definitions."""
query_str = uri_parms[0]
try:
console = hmc.consoles.lookup_by_oid(None)
except KeyError:
new_exc = InvalidResourceError(method, uri)
new_exc.__cause__ = None
raise new_exc # zhmcclient_mock.InvalidResourceError
result_ldap_srv_defs = []
filter_args = parse_query_parms(method, uri, query_str)
for ldap_srv_def in console.ldap_server_definitions.list(filter_args):
result_ldap_srv_def = {}
for prop in ldap_srv_def.properties:
if prop in ('element-uri', 'name', 'type'):
result_ldap_srv_def[prop] = ldap_srv_def.properties[prop]
result_ldap_srv_defs.append(result_ldap_srv_def)
return {'ldap-server-definitions': result_ldap_srv_defs}
@staticmethod
def post(method, hmc, uri, uri_parms, body, logon_required,
wait_for_completion):
# pylint: disable=unused-argument
"""Operation: Create LDAP Server Definition."""
assert wait_for_completion is True # synchronous operation
try:
console = hmc.consoles.lookup_by_oid(None)
except KeyError:
new_exc = InvalidResourceError(method, uri)
new_exc.__cause__ = None
raise new_exc # zhmcclient_mock.InvalidResourceError
check_required_fields(method, uri, body, ['name'])
new_ldap_srv_def = console.ldap_server_definitions.add(body)
return {'element-uri': new_ldap_srv_def.uri}
class LdapServerDefinitionHandler(GenericGetPropertiesHandler,
GenericDeleteHandler):
"""
Handler class for HTTP methods on single LdapServerDefinition resource.
"""
@staticmethod
def post(method, hmc, uri, uri_parms, body, logon_required,
wait_for_completion):
# pylint: disable=unused-argument
"""Operation: Update LdapServerDefinition Properties."""
try:
lsd = hmc.lookup_by_uri(uri)
except KeyError:
new_exc = InvalidResourceError(method, uri)
new_exc.__cause__ = None
raise new_exc # zhmcclient_mock.InvalidResourceError
# Check whether requested properties are modifiable
check_writable(
method, uri, body,
[
'description',
'primary-hostname-ipaddr',
'connection-port',
'backup-hostname-ipaddr',
'tolerate-untrusted-certificates',
'bind-distinguished-name',
'bind-password',
'location-method',
'search-distinguished-name',
'search-scope',
'search-filter',
])
lsd.update(body)
class CpcsHandler(object):
"""
Handler class for HTTP methods on set of Cpc resources.
"""
@staticmethod
def get(method, hmc, uri, uri_parms, logon_required):
# pylint: disable=unused-argument
"""Operation: List CPCs."""
query_str = uri_parms[0]
result_cpcs = []
filter_args = parse_query_parms(method, uri, query_str)
for cpc in hmc.cpcs.list(filter_args):
result_cpc = {}
for prop in cpc.properties:
if prop in ('object-uri', 'name', 'status'):
result_cpc[prop] = cpc.properties[prop]
result_cpcs.append(result_cpc)
return {'cpcs': result_cpcs}
class CpcHandler(GenericGetPropertiesHandler,
GenericUpdatePropertiesHandler):
"""
Handler class for HTTP methods on single Cpc resource.
"""
pass
class CpcSetPowerSaveHandler(object):
"""
Handler class for operation: Set CPC Power Save.
"""
@staticmethod
def post(method, hmc, uri, uri_parms, body, logon_required,
wait_for_completion):
# pylint: disable=unused-argument
"""Operation: Set CPC Power Save (any CPC mode)."""
assert wait_for_completion is True # async not supported yet
cpc_oid = uri_parms[0]
try:
cpc = hmc.cpcs.lookup_by_oid(cpc_oid)
except KeyError:
new_exc = InvalidResourceError(method, uri)
new_exc.__cause__ = None
raise new_exc # zhmcclient_mock.InvalidResourceError
check_required_fields(method, uri, body, ['power-saving'])
power_saving = body['power-saving']
if power_saving not in ['high-performance', 'low-power', 'custom']:
raise BadRequestError(method, uri, reason=7,
message="Invalid power-saving value: %r" %
power_saving)
cpc.properties['cpc-power-saving'] = power_saving
cpc.properties['cpc-power-saving-state'] = power_saving
cpc.properties['zcpc-power-saving'] = power_saving
cpc.properties['zcpc-power-saving-state'] = power_saving
class CpcSetPowerCappingHandler(object):
"""
Handler class for operation: Set CPC Power Capping.
"""
@staticmethod
def post(method, hmc, uri, uri_parms, body, logon_required,
wait_for_completion):
# pylint: disable=unused-argument
"""Operation: Set CPC Power Capping (any CPC mode)."""
assert wait_for_completion is True # async not supported yet
cpc_oid = uri_parms[0]
try:
cpc = hmc.cpcs.lookup_by_oid(cpc_oid)
except KeyError:
new_exc = InvalidResourceError(method, uri)
new_exc.__cause__ = None
raise new_exc # zhmcclient_mock.InvalidResourceError
check_required_fields(method, uri, body, ['power-capping-state'])
power_capping_state = body['power-capping-state']
power_cap_current = body.get('power-cap-current', None)
if power_capping_state not in ['disabled', 'enabled', 'custom']:
raise BadRequestError(method, uri, reason=7,
message="Invalid power-capping-state value: "
"%r" % power_capping_state)
if power_capping_state == 'enabled' and power_cap_current is None:
raise BadRequestError(method, uri, reason=7,
message="Power-cap-current must be provided "
"when enabling power capping")
cpc.properties['cpc-power-capping-state'] = power_capping_state
cpc.properties['cpc-power-cap-current'] = power_cap_current
cpc.properties['zcpc-power-capping-state'] = power_capping_state
cpc.properties['zcpc-power-cap-current'] = power_cap_current
class CpcGetEnergyManagementDataHandler(object):
"""
Handler class for operation: Get CPC Energy Management Data.
"""
@staticmethod
def get(method, hmc, uri, uri_parms, logon_required):
# pylint: disable=unused-argument
"""Operation: Get CPC Energy Management Data (any CPC mode)."""
cpc_oid = uri_parms[0]
try:
cpc = hmc.cpcs.lookup_by_oid(cpc_oid)
except KeyError:
new_exc = InvalidResourceError(method, uri)
new_exc.__cause__ = None
raise new_exc # zhmcclient_mock.InvalidResourceError
energy_props = {
'cpc-power-cap-allowed':
cpc.properties.get('cpc-power-cap-allowed'),
'cpc-power-cap-current':
cpc.properties.get('cpc-power-cap-current'),
'cpc-power-cap-maximum':
cpc.properties.get('cpc-power-cap-maximum'),
'cpc-power-cap-minimum':
cpc.properties.get('cpc-power-cap-minimum'),
'cpc-power-capping-state':
cpc.properties.get('cpc-power-capping-state'),
'cpc-power-consumption':
cpc.properties.get('cpc-power-consumption'),
'cpc-power-rating':
cpc.properties.get('cpc-power-rating'),
'cpc-power-save-allowed':
cpc.properties.get('cpc-power-save-allowed'),
'cpc-power-saving':
cpc.properties.get('cpc-power-saving'),
'cpc-power-saving-state':
cpc.properties.get('cpc-power-saving-state'),
'zcpc-ambient-temperature':
cpc.properties.get('zcpc-ambient-temperature'),
'zcpc-dew-point':
cpc.properties.get('zcpc-dew-point'),
'zcpc-exhaust-temperature':
cpc.properties.get('zcpc-exhaust-temperature'),
'zcpc-heat-load':
cpc.properties.get('zcpc-heat-load'),
'zcpc-heat-load-forced-air':
cpc.properties.get('zcpc-heat-load-forced-air'),
'zcpc-heat-load-water':
cpc.properties.get('zcpc-heat-load-water'),
'zcpc-humidity':
cpc.properties.get('zcpc-humidity'),
'zcpc-maximum-potential-heat-load':
cpc.properties.get('zcpc-maximum-potential-heat-load'),
'zcpc-maximum-potential-power':
cpc.properties.get('zcpc-maximum-potential-power'),
'zcpc-power-cap-allowed':
cpc.properties.get('zcpc-power-cap-allowed'),
'zcpc-power-cap-current':
cpc.properties.get('zcpc-power-cap-current'),
'zcpc-power-cap-maximum':
cpc.properties.get('zcpc-power-cap-maximum'),
'zcpc-power-cap-minimum':
cpc.properties.get('zcpc-power-cap-minimum'),
'zcpc-power-capping-state':
cpc.properties.get('zcpc-power-capping-state'),
'zcpc-power-consumption':
cpc.properties.get('zcpc-power-consumption'),
'zcpc-power-rating':
cpc.properties.get('zcpc-power-rating'),
'zcpc-power-save-allowed':
cpc.properties.get('zcpc-power-save-allowed'),
'zcpc-power-saving':
cpc.properties.get('zcpc-power-saving'),
'zcpc-power-saving-state':
cpc.properties.get('zcpc-power-saving-state'),
}
cpc_data = {
'error-occurred': False,
'object-uri': cpc.uri,
'object-id': cpc.oid,
'class': 'cpcs',
'properties': energy_props,
}
result = {'objects': [cpc_data]}
return result
class CpcStartHandler(object):
"""
Handler class for operation: Start CPC (DPM mode).
"""
@staticmethod
def post(method, hmc, uri, uri_parms, body, logon_required,
wait_for_completion):
# pylint: disable=unused-argument
"""Operation: Start CPC (requires DPM mode)."""
assert wait_for_completion is True # async not supported yet
cpc_oid = uri_parms[0]
try:
cpc = hmc.cpcs.lookup_by_oid(cpc_oid)
except KeyError:
new_exc = InvalidResourceError(method, uri)
new_exc.__cause__ = None
raise new_exc # zhmcclient_mock.InvalidResourceError
if not cpc.dpm_enabled:
raise CpcNotInDpmError(method, uri, cpc)
cpc.properties['status'] = 'active'
class CpcStopHandler(object):
"""
Handler class for operation: Stop CPC (DPM mode).
"""
@staticmethod
def post(method, hmc, uri, uri_parms, body, logon_required,
wait_for_completion):
# pylint: disable=unused-argument
"""Operation: Stop CPC (requires DPM mode)."""
assert wait_for_completion is True # async not supported yet
cpc_oid = uri_parms[0]
try:
cpc = hmc.cpcs.lookup_by_oid(cpc_oid)
except KeyError:
new_exc = InvalidResourceError(method, uri)
new_exc.__cause__ = None
raise new_exc # zhmcclient_mock.InvalidResourceError
if not cpc.dpm_enabled:
raise CpcNotInDpmError(method, uri, cpc)
cpc.properties['status'] = 'not-operating'
class CpcActivateHandler(object):
"""
Handler class for operation: Activate CPC (classic mode)
"""
@staticmethod
def post(method, hmc, uri, uri_parms, body, logon_required,
wait_for_completion):
# pylint: disable=unused-argument
"""Operation: Activate CPC (requires classic mode)."""
assert wait_for_completion is True # async not supported yet
cpc_oid = uri_parms[0]
try:
cpc = hmc.cpcs.lookup_by_oid(cpc_oid)
except KeyError:
new_exc = InvalidResourceError(method, uri)
new_exc.__cause__ = None
raise new_exc # zhmcclient_mock.InvalidResourceError
check_required_fields(method, uri, body, ['activation-profile-name'])
if cpc.dpm_enabled:
raise CpcInDpmError(method, uri, cpc)
profile_name = body['activation-profile-name']
force = body.get('force', False)
status = cpc.properties['status']
if status in CPC_BAD_STATUSES:
raise ConflictError(method, uri, reason=1,
message="The operation cannot be performed "
"because the targeted CPC {} has a bad status "
"{!r}".
format(cpc.name, status))
if status in CPC_ACTIVE_STATUSES and not force:
raise ConflictError(method, uri, reason=1,
message="The operation cannot be performed "
"because the targeted CPC {} already has an "
"active status {!r} and force is not specified".
format(cpc.name, status))
cpc.properties['status'] = 'operating'
cpc.properties['last-used-activation-profile'] = profile_name
# TODO: Set last-used-iocds from profile
class CpcDeactivateHandler(object):
"""
Handler class for operation: Deactivate CPC (classic mode).
"""
@staticmethod
def post(method, hmc, uri, uri_parms, body, logon_required,
wait_for_completion):
# pylint: disable=unused-argument
"""Operation: Deactivate CPC (requires classic mode)."""
assert wait_for_completion is True # async not supported yet
cpc_oid = uri_parms[0]
try:
cpc = hmc.cpcs.lookup_by_oid(cpc_oid)
except KeyError:
new_exc = InvalidResourceError(method, uri)
new_exc.__cause__ = None
raise new_exc # zhmcclient_mock.InvalidResourceError
if cpc.dpm_enabled:
raise CpcInDpmError(method, uri, cpc)
force = body.get('force', False)
status = cpc.properties['status']
if status in CPC_BAD_STATUSES:
raise ConflictError(method, uri, reason=1,
message="The operation cannot be performed "
"because the targeted CPC {} has a bad status "
"{!r}".
format(cpc.name, status))
if status in CPC_ACTIVE_STATUSES and not force:
raise ConflictError(method, uri, reason=1,
message="The operation cannot be performed "
"because the targeted CPC {} has an active "
"status {!r} and force is not specified".
format(cpc.name, status))
cpc.properties['status'] = 'no-power'
class CpcImportProfilesHandler(object):
"""
Handler class for operation: Import Profiles.
"""
@staticmethod
def post(method, hmc, uri, uri_parms, body, logon_required,
wait_for_completion):
# pylint: disable=unused-argument
"""Operation: Import Profiles (requires classic mode)."""
assert wait_for_completion is True # no async
cpc_oid = uri_parms[0]
try:
cpc = hmc.cpcs.lookup_by_oid(cpc_oid)
except KeyError:
new_exc = InvalidResourceError(method, uri)
new_exc.__cause__ = None
raise new_exc # zhmcclient_mock.InvalidResourceError
if cpc.dpm_enabled:
raise CpcInDpmError(method, uri, cpc)
check_required_fields(method, uri, body, ['profile-area'])
# TODO: Import the CPC profiles from a simulated profile area
class CpcExportProfilesHandler(object):
"""
Handler class for operation: Export Profiles.
"""
@staticmethod
def post(method, hmc, uri, uri_parms, body, logon_required,
wait_for_completion):
# pylint: disable=unused-argument
"""Operation: Export Profiles (requires classic mode)."""
assert wait_for_completion is True # no async
cpc_oid = uri_parms[0]
try:
cpc = hmc.cpcs.lookup_by_oid(cpc_oid)
except KeyError:
new_exc = InvalidResourceError(method, uri)
new_exc.__cause__ = None
raise new_exc # zhmcclient_mock.InvalidResourceError
if cpc.dpm_enabled:
raise CpcInDpmError(method, uri, cpc)
check_required_fields(method, uri, body, ['profile-area'])
# TODO: Export the CPC profiles to a simulated profile area
class CpcExportPortNamesListHandler(object):
"""
Handler class for operation: Export WWPN List.
"""
@staticmethod
def post(method, hmc, uri, uri_parms, body, logon_required,
wait_for_completion):
# pylint: disable=unused-argument
"""Operation: Export WWPN List (requires DPM mode)."""
assert wait_for_completion is True # this operation is always synchr.
cpc_oid = uri_parms[0]
try:
cpc = hmc.cpcs.lookup_by_oid(cpc_oid)
except KeyError:
new_exc = InvalidResourceError(method, uri)
new_exc.__cause__ = None
raise new_exc # zhmcclient_mock.InvalidResourceError
if not cpc.dpm_enabled:
raise CpcNotInDpmError(method, uri, cpc)
check_required_fields(method, uri, body, ['partitions'])
partition_uris = body['partitions']
if len(partition_uris) == 0:
raise BadRequestError(
method, uri, reason=149,
message="'partitions' field in request body is empty.")
wwpn_list = []
for partition_uri in partition_uris:
partition = hmc.lookup_by_uri(partition_uri)
partition_cpc = partition.manager.parent
if partition_cpc.oid != cpc_oid:
raise BadRequestError(
method, uri, reason=149,
message="Partition %r specified in 'partitions' field "
"is not in the targeted CPC with ID %r (but in the CPC "
"with ID %r)." %
(partition.uri, cpc_oid, partition_cpc.oid))
partition_name = partition.properties.get('name', '')
for hba in partition.hbas.list():
port_uri = hba.properties['adapter-port-uri']
port = hmc.lookup_by_uri(port_uri)
adapter = port.manager.parent
adapter_id = adapter.properties.get('adapter-id', '')
devno = hba.properties.get('device-number', '')
wwpn = hba.properties.get('wwpn', '')
wwpn_str = '%s,%s,%s,%s' % (partition_name, adapter_id,
devno, wwpn)
wwpn_list.append(wwpn_str)
return {
'wwpn-list': wwpn_list
}
CPC_PROPNAME_FROM_PROCTYPE = {
'sap': 'processor-count-service-assist',
'aap': 'processor-count-aap',
'ifl': 'processor-count-ifl',
'icf': 'processor-count-icf',
'iip': 'processor-count-iip',
'cbp': 'processor-count-cbp',
}
class CpcAddTempCapacityHandler(object):
"""
Handler class for operation: Add Temporary Capacity.
"""
@staticmethod
def post(method, hmc, uri, uri_parms, body, logon_required,
wait_for_completion):
# pylint: disable=unused-argument
"""Operation: Add Temporary Capacity."""
assert wait_for_completion is True # no async
cpc_oid = uri_parms[0]
try:
cpc = hmc.cpcs.lookup_by_oid(cpc_oid)
except KeyError:
new_exc = InvalidResourceError(method, uri)
new_exc.__cause__ = None
raise new_exc # zhmcclient_mock.InvalidResourceError
check_required_fields(method, uri, body, ['record-id', 'test'])
# record_id = body['record-id'] # TODO: Implement
# test = body['test'] # TODO: Implement
# force = body.get('force', False) # TODO: Implement
software_model = body.get('software-model', None)
processor_info = body.get('processor-info', None)
if software_model is not None:
current_software_model = \
cpc.properties['software-model-permanent-plus-temporary']
if current_software_model is not None:
raise BadRequestError(
method, uri, reason=277,
message="Cannot activate temporary software model {} "
"because temporary software model {} is already active".
format(software_model, current_software_model))
# We accept any software model, and imply the desired total number
# of general purpose processors from the last two digits.
pnum = int(software_model[1:])
pname = 'processor-count-general-purpose'
ptype = 'cp'
if pnum < cpc.properties[pname]:
raise BadRequestError(
method, uri, reason=276,
message="Cannot activate temporary software model {} "
"because its target number of {} {} processors is below "
"the current number of {} {} processors".
format(software_model, pnum, ptype, cpc.properties[pname],
ptype))
cpc.properties[pname] = pnum
cpc.properties['software-model-permanent-plus-temporary'] = \
software_model
if processor_info is not None:
for pitem in processor_info:
ptype = pitem['processor-type']
psteps = pitem.get('num-processor-steps', None)
if ptype not in CPC_PROPNAME_FROM_PROCTYPE:
raise BadRequestError(
method, uri, reason=276,
message="Invalid processor type {} was specified in a "
"processor-info entry".format(ptype))
pname = CPC_PROPNAME_FROM_PROCTYPE[ptype]
if psteps is not None:
# TODO: Check against installed number of processors
cpc.properties[pname] += psteps
class CpcRemoveTempCapacityHandler(object):
"""
Handler class for operation: Remove Temporary Capacity.
"""
@staticmethod
def post(method, hmc, uri, uri_parms, body, logon_required,
wait_for_completion):
# pylint: disable=unused-argument
"""Operation: Remove Temporary Capacity."""
assert wait_for_completion is True # no async
cpc_oid = uri_parms[0]
try:
cpc = hmc.cpcs.lookup_by_oid(cpc_oid)
except KeyError:
new_exc = InvalidResourceError(method, uri)
new_exc.__cause__ = None
raise new_exc # zhmcclient_mock.InvalidResourceError
check_required_fields(method, uri, body, ['record-id'])
# record_id = body['record-id'] # TODO: Implement
software_model = body.get('software-model', None)
processor_info = body.get('processor-info', None)
if software_model is not None:
current_software_model = \
cpc.properties['software-model-permanent-plus-temporary']
if current_software_model is None:
raise BadRequestError(
method, uri, reason=277,
message="Cannot deactivate temporary software model {} "
"because no temporary software model is currently active".
format(software_model))
# We accept any software model, and imply the desired total number
# of general purpose processors from the last two digits.
pnum = int(software_model[1:])
pname = 'processor-count-general-purpose'
ptype = 'cp'
if pnum > cpc.properties[pname]:
raise BadRequestError(
method, uri, reason=276,
message="Cannot activate temporary software model {} "
"because its target number of {} {} processors is above "
"the current number of {} {} processors".
format(software_model, pnum, ptype, cpc.properties[pname],
ptype))
cpc.properties[pname] = pnum
cpc.properties['software-model-permanent-plus-temporary'] = None
if processor_info is not None:
for pitem in processor_info:
ptype = pitem['processor-type']
psteps = pitem.get('num-processor-steps', None)
if ptype not in CPC_PROPNAME_FROM_PROCTYPE:
raise BadRequestError(
method, uri, reason=276,
message="Invalid processor type {} was specified in a "
"processor-info entry".format(ptype))
pname = CPC_PROPNAME_FROM_PROCTYPE[ptype]
if psteps is not None:
if cpc.properties[pname] - psteps < 1:
raise BadRequestError(
method, uri, reason=276,
message="Cannot reduce the number of {} {} "
"processors by {} because at least one processor "
"must remain.".
format(cpc.properties[pname], ptype, psteps))
cpc.properties[pname] -= psteps
class CpcSetAutoStartListHandler(object):
"""
Handler class for operation: Set Auto-start List.
"""
@staticmethod
def post(method, hmc, uri, uri_parms, body, logon_required,
wait_for_completion):
# pylint: disable=unused-argument
"""Operation: Set Auto-start List."""
assert wait_for_completion is True # no async
cpc_oid = uri_parms[0]
try:
cpc = hmc.cpcs.lookup_by_oid(cpc_oid)
except KeyError:
new_exc = InvalidResourceError(method, uri)
new_exc.__cause__ = None
raise new_exc # zhmcclient_mock.InvalidResourceError
check_required_fields(method, uri, body, ['auto-start-list'])
auto_start_list = body['auto-start-list']
# Store it in the CPC
cpc.properties['auto-start-list'] = auto_start_list
class MetricsContextsHandler(object):
"""
Handler class for HTTP methods on set of MetricsContext resources.
"""
@staticmethod
def post(method, hmc, uri, uri_parms, body, logon_required,
wait_for_completion):
# pylint: disable=unused-argument
"""Operation: Create Metrics Context."""
assert wait_for_completion is True # always synchronous
check_required_fields(method, uri, body,
['anticipated-frequency-seconds'])
new_metrics_context = hmc.metrics_contexts.add(body)
result = {
'metrics-context-uri': new_metrics_context.uri,
'metric-group-infos': new_metrics_context.get_metric_group_infos()
}
return result
class MetricsContextHandler(object):
"""
Handler class for HTTP methods on single MetricsContext resource.
"""
@staticmethod
def delete(method, hmc, uri, uri_parms, logon_required):
# pylint: disable=unused-argument
"""Operation: Delete Metrics Context."""
try:
metrics_context = hmc.lookup_by_uri(uri)
except KeyError:
new_exc = InvalidResourceError(method, uri)
new_exc.__cause__ = None
raise new_exc # zhmcclient_mock.InvalidResourceError
hmc.metrics_contexts.remove(metrics_context.oid)
@staticmethod
def get(method, hmc, uri, uri_parms, logon_required):
# pylint: disable=unused-argument
"""Operation: Get Metrics."""
try:
metrics_context = hmc.lookup_by_uri(uri)
except KeyError:
new_exc = InvalidResourceError(method, uri)
new_exc.__cause__ = None
raise new_exc # zhmcclient_mock.InvalidResourceError
result = metrics_context.get_metric_values_response()
return result
class AdaptersHandler(object):
"""
Handler class for HTTP methods on set of Adapter resources.
"""
@staticmethod
def get(method, hmc, uri, uri_parms, logon_required):
# pylint: disable=unused-argument
"""Operation: List Adapters of a CPC (empty result if not in DPM
mode)."""
cpc_oid = uri_parms[0]
query_str = uri_parms[1]
try:
cpc = hmc.cpcs.lookup_by_oid(cpc_oid)
except KeyError:
new_exc = InvalidResourceError(method, uri)
new_exc.__cause__ = None
raise new_exc # zhmcclient_mock.InvalidResourceError
result_adapters = []
if cpc.dpm_enabled:
filter_args = parse_query_parms(method, uri, query_str)
for adapter in cpc.adapters.list(filter_args):
result_adapter = {}
for prop in adapter.properties:
if prop in ('object-uri', 'name', 'adapter-id',
'adapter-family', 'type', 'status'):
result_adapter[prop] = adapter.properties[prop]
result_adapters.append(result_adapter)
return {'adapters': result_adapters}
@staticmethod
def post(method, hmc, uri, uri_parms, body, logon_required,
wait_for_completion):
# pylint: disable=unused-argument
"""Operation: Create Hipersocket (requires DPM mode)."""
assert wait_for_completion is True
cpc_oid = uri_parms[0]
try:
cpc = hmc.cpcs.lookup_by_oid(cpc_oid)
except KeyError:
new_exc = InvalidResourceError(method, uri)
new_exc.__cause__ = None
raise new_exc # zhmcclient_mock.InvalidResourceError
if not cpc.dpm_enabled:
raise CpcNotInDpmError(method, uri, cpc)
check_required_fields(method, uri, body, ['name'])
# We need to emulate the behavior of this POST to always create a
# hipersocket, but the add() method is used for adding all kinds of
# faked adapters to the faked HMC. So we need to specify the adapter
# type, but because the behavior of the Adapter resource object is
# that it only has its input properties set, we add the 'type'
# property on a copy of the input properties.
body2 = body.copy()
body2['type'] = 'hipersockets'
try:
new_adapter = cpc.adapters.add(body2)
except InputError as exc:
new_exc = BadRequestError(method, uri, reason=5, message=str(exc))
new_exc.__cause__ = None
raise new_exc # zhmcclient_mock.BadRequestError
# Create the VirtualSwitch for the new adapter
vs_props = {
'name': new_adapter.name,
'type': 'hipersockets',
'backing-adapter-uri': new_adapter.uri,
'port': 0,
}
cpc.virtual_switches.add(vs_props)
# Create the Port for the new adapter
port_props = {
'index': 0,
'name': 'Port 0',
}
new_adapter.ports.add(port_props)
return {'object-uri': new_adapter.uri}
class AdapterHandler(GenericGetPropertiesHandler,
GenericUpdatePropertiesHandler):
"""
Handler class for HTTP methods on single Adapter resource.
"""
@staticmethod
def delete(method, hmc, uri, uri_parms, logon_required):
# pylint: disable=unused-argument
"""Operation: Delete Hipersocket (requires DPM mode)."""
try:
adapter = hmc.lookup_by_uri(uri)
except KeyError:
new_exc = InvalidResourceError(method, uri)
new_exc.__cause__ = None
raise new_exc # zhmcclient_mock.InvalidResourceError
cpc = adapter.manager.parent
if not cpc.dpm_enabled:
raise CpcNotInDpmError(method, uri, cpc)
adapter.manager.remove(adapter.oid)
class AdapterChangeCryptoTypeHandler(object):
"""
Handler class for operation: Change Crypto Type.
"""
@staticmethod
def post(method, hmc, uri, uri_parms, body, logon_required,
wait_for_completion):
# pylint: disable=unused-argument
"""Operation: Change Crypto Type (requires DPM mode)."""
assert wait_for_completion is True # HMC operation is synchronous
adapter_uri = uri.split('/operations/')[0]
try:
adapter = hmc.lookup_by_uri(adapter_uri)
except KeyError:
new_exc = InvalidResourceError(method, uri)
new_exc.__cause__ = None
raise new_exc # zhmcclient_mock.InvalidResourceError
cpc = adapter.manager.parent
if not cpc.dpm_enabled:
raise CpcNotInDpmError(method, uri, cpc)
check_required_fields(method, uri, body, ['crypto-type'])
# Check the validity of the new crypto_type
crypto_type = body['crypto-type']
if crypto_type not in ['accelerator', 'cca-coprocessor',
'ep11-coprocessor']:
raise BadRequestError(
method, uri, reason=8,
message="Invalid value for 'crypto-type' field: %s" %
crypto_type)
# Reflect the result of changing the crypto type
adapter.properties['crypto-type'] = crypto_type
class AdapterChangeAdapterTypeHandler(object):
"""
Handler class for operation: Change Adapter Type.
"""
@staticmethod
def post(method, hmc, uri, uri_parms, body, logon_required,
wait_for_completion):
# pylint: disable=unused-argument
"""Operation: Change Adapter Type (requires DPM mode)."""
assert wait_for_completion is True # HMC operation is synchronous
adapter_uri = uri.split('/operations/')[0]
try:
adapter = hmc.lookup_by_uri(adapter_uri)
except KeyError:
new_exc = InvalidResourceError(method, uri)
new_exc.__cause__ = None
raise new_exc # zhmcclient_mock.InvalidResourceError
cpc = adapter.manager.parent
if not cpc.dpm_enabled:
raise CpcNotInDpmError(method, uri, cpc)
check_required_fields(method, uri, body, ['type'])
new_adapter_type = body['type']
# Check the validity of the adapter family
adapter_family = adapter.properties.get('adapter-family', None)
if adapter_family != 'ficon':
raise BadRequestError(
method, uri, reason=18,
message="The adapter type cannot be changed for adapter "
"family: %s" % adapter_family)
# Check the adapter status
adapter_status = adapter.properties.get('status', None)
if adapter_status == 'exceptions':
raise BadRequestError(
method, uri, reason=18,
message="The adapter type cannot be changed for adapter "
"status: %s" % adapter_status)
# Check the validity of the new adapter type
if new_adapter_type not in ['fc', 'fcp', 'not-configured']:
raise BadRequestError(
method, uri, reason=8,
message="Invalid new value for 'type' field: %s" %
new_adapter_type)
# Check that the new adapter type is not already set
adapter_type = adapter.properties.get('type', None)
if new_adapter_type == adapter_type:
raise BadRequestError(
method, uri, reason=8,
message="New value for 'type' field is already set: %s" %
new_adapter_type)
# TODO: Reject if adapter is attached to a partition.
# Reflect the result of changing the adapter type
adapter.properties['type'] = new_adapter_type
class NetworkPortHandler(GenericGetPropertiesHandler):
"""
Handler class for HTTP methods on single NetworkPort resource.
"""
@staticmethod
def post(method, hmc, uri, uri_parms, body, logon_required,
wait_for_completion):
# pylint: disable=unused-argument
"""Operation: Update NetworkPort Properties."""
try:
network_port = hmc.lookup_by_uri(uri)
except KeyError:
new_exc = InvalidResourceError(method, uri)
new_exc.__cause__ = None
raise new_exc # zhmcclient_mock.InvalidResourceError
# Check whether requested properties are modifiable
check_writable(
method, uri, body,
[
'description',
])
network_port.update(body)
class StoragePortHandler(GenericGetPropertiesHandler):
"""
Handler class for HTTP methods on single StoragePort resource.
"""
@staticmethod
def post(method, hmc, uri, uri_parms, body, logon_required,
wait_for_completion):
# pylint: disable=unused-argument
"""Operation: Update StoragePort Properties."""
try:
storage_port = hmc.lookup_by_uri(uri)
except KeyError:
new_exc = InvalidResourceError(method, uri)
new_exc.__cause__ = None
raise new_exc # zhmcclient_mock.InvalidResourceError
# Check whether requested properties are modifiable
check_writable(
method, uri, body,
[
'description',
'connection-endpoint-uri',
])
storage_port.update(body)
class PartitionsHandler(object):
"""
Handler class for HTTP methods on set of Partition resources.
"""
@staticmethod
def get(method, hmc, uri, uri_parms, logon_required):
# pylint: disable=unused-argument
"""Operation: List Partitions of a CPC (empty result if not in DPM
mode)."""
cpc_oid = uri_parms[0]
query_str = uri_parms[1]
try:
cpc = hmc.cpcs.lookup_by_oid(cpc_oid)
except KeyError:
new_exc = InvalidResourceError(method, uri)
new_exc.__cause__ = None
raise new_exc # zhmcclient_mock.InvalidResourceError
# Reflect the result of listing the partition
result_partitions = []
if cpc.dpm_enabled:
filter_args = parse_query_parms(method, uri, query_str)
for partition in cpc.partitions.list(filter_args):
result_partition = {}
for prop in partition.properties:
if prop in ('object-uri', 'name', 'status', 'type'):
result_partition[prop] = partition.properties[prop]
result_partitions.append(result_partition)
return {'partitions': result_partitions}
@staticmethod
def post(method, hmc, uri, uri_parms, body, logon_required,
wait_for_completion):
# pylint: disable=unused-argument
"""Operation: Create Partition (requires DPM mode)."""
assert wait_for_completion is True # async not supported yet
cpc_oid = uri_parms[0]
try:
cpc = hmc.cpcs.lookup_by_oid(cpc_oid)
except KeyError:
new_exc = InvalidResourceError(method, uri)
new_exc.__cause__ = None
raise new_exc # zhmcclient_mock.InvalidResourceError
if not cpc.dpm_enabled:
raise CpcNotInDpmError(method, uri, cpc)
check_valid_cpc_status(method, uri, cpc)
check_required_fields(method, uri, body,
['name', 'initial-memory', 'maximum-memory'])
# TODO: There are some more input properties that are required under
# certain conditions.
# Reflect the result of creating the partition
new_partition = cpc.partitions.add(body)
return {'object-uri': new_partition.uri}
class PartitionHandler(GenericGetPropertiesHandler,
GenericUpdatePropertiesHandler):
"""
Handler class for HTTP methods on single Partition resource.
"""
# TODO: Add check_valid_cpc_status() in Update Partition Properties
# TODO: Add check_partition_status(transitional) in Update Partition Props
# TODO: Add check whether properties are modifiable in Update Part. Props
@staticmethod
def delete(method, hmc, uri, uri_parms, logon_required):
# pylint: disable=unused-argument
"""Operation: Delete Partition."""
try:
partition = hmc.lookup_by_uri(uri)
except KeyError:
new_exc = InvalidResourceError(method, uri)
new_exc.__cause__ = None
raise new_exc # zhmcclient_mock.InvalidResourceError
cpc = partition.manager.parent
if not cpc.dpm_enabled:
raise CpcNotInDpmError(method, uri, cpc)
check_valid_cpc_status(method, uri, cpc)
check_partition_status(method, uri, partition,
valid_statuses=['stopped'])
# Reflect the result of deleting the partition
partition.manager.remove(partition.oid)
class PartitionStartHandler(object):
"""
Handler class for operation: Start Partition.
"""
@staticmethod
def post(method, hmc, uri, uri_parms, body, logon_required,
wait_for_completion):
# pylint: disable=unused-argument
"""Operation: Start Partition (requires DPM mode)."""
assert wait_for_completion is True # async not supported yet
partition_oid = uri_parms[0]
partition_uri = '/api/partitions/' + partition_oid
try:
partition = hmc.lookup_by_uri(partition_uri)
except KeyError:
new_exc = InvalidResourceError(method, uri)
new_exc.__cause__ = None
raise new_exc # zhmcclient_mock.InvalidResourceError
cpc = partition.manager.parent
if not cpc.dpm_enabled:
raise CpcNotInDpmError(method, uri, cpc)
check_valid_cpc_status(method, uri, cpc)
check_partition_status(method, uri, partition,
valid_statuses=['stopped'])
# Reflect the result of starting the partition
partition.properties['status'] = 'active'
return {}
class PartitionStopHandler(object):
"""
Handler class for operation: Stop Partition.
"""
@staticmethod
def post(method, hmc, uri, uri_parms, body, logon_required,
wait_for_completion):
# pylint: disable=unused-argument
"""Operation: Stop Partition (requires DPM mode)."""
assert wait_for_completion is True # async not supported yet
partition_oid = uri_parms[0]
partition_uri = '/api/partitions/' + partition_oid
try:
partition = hmc.lookup_by_uri(partition_uri)
except KeyError:
new_exc = InvalidResourceError(method, uri)
new_exc.__cause__ = None
raise new_exc # zhmcclient_mock.InvalidResourceError
cpc = partition.manager.parent
if not cpc.dpm_enabled:
raise CpcNotInDpmError(method, uri, cpc)
check_valid_cpc_status(method, uri, cpc)
check_partition_status(method, uri, partition,
valid_statuses=['active', 'paused',
'terminated'])
# TODO: Clarify with HMC team whether statuses 'degraded' and
# 'reservation-error' should also be stoppable. Otherwise, the
# partition cannot leave these states.
# Reflect the result of stopping the partition
partition.properties['status'] = 'stopped'
return {}
class PartitionScsiDumpHandler(object):
"""
Handler class for operation: Dump Partition.
"""
@staticmethod
def post(method, hmc, uri, uri_parms, body, logon_required,
wait_for_completion):
# pylint: disable=unused-argument
"""Operation: Dump Partition (requires DPM mode)."""
assert wait_for_completion is True # async not supported yet
partition_oid = uri_parms[0]
partition_uri = '/api/partitions/' + partition_oid
try:
partition = hmc.lookup_by_uri(partition_uri)
except KeyError:
new_exc = InvalidResourceError(method, uri)
new_exc.__cause__ = None
raise new_exc # zhmcclient_mock.InvalidResourceError
cpc = partition.manager.parent
if not cpc.dpm_enabled:
raise CpcNotInDpmError(method, uri, cpc)
check_valid_cpc_status(method, uri, cpc)
check_partition_status(method, uri, partition,
valid_statuses=['active', 'paused',
'terminated'])
check_required_fields(method, uri, body,
['dump-load-hba-uri',
'dump-world-wide-port-name',
'dump-logical-unit-number'])
# We don't reflect the dump in the mock state.
return {}
class PartitionStartDumpProgramHandler(object):
"""
Handler class for operation: Start Dump Program.
"""
@staticmethod
def post(method, hmc, uri, uri_parms, body, logon_required,
wait_for_completion):
# pylint: disable=unused-argument
"""Operation: Start Dump Program (requires DPM mode)."""
assert wait_for_completion is True # async not supported yet
partition_oid = uri_parms[0]
partition_uri = '/api/partitions/' + partition_oid
try:
partition = hmc.lookup_by_uri(partition_uri)
except KeyError:
new_exc = InvalidResourceError(method, uri)
new_exc.__cause__ = None
raise new_exc # zhmcclient_mock.InvalidResourceError
cpc = partition.manager.parent
if not cpc.dpm_enabled:
raise CpcNotInDpmError(method, uri, cpc)
check_valid_cpc_status(method, uri, cpc)
check_partition_status(method, uri, partition,
valid_statuses=['active', 'degraded', 'paused',
'terminated'])
check_required_fields(method, uri, body,
['dump-program-info',
'dump-program-type'])
# We don't reflect the dump in the mock state.
return {}
class PartitionPswRestartHandler(object):
"""
Handler class for operation: Perform PSW Restart.
"""
@staticmethod
def post(method, hmc, uri, uri_parms, body, logon_required,
wait_for_completion):
# pylint: disable=unused-argument
"""Operation: Perform PSW Restart (requires DPM mode)."""
assert wait_for_completion is True # async not supported yet
partition_oid = uri_parms[0]
partition_uri = '/api/partitions/' + partition_oid
try:
partition = hmc.lookup_by_uri(partition_uri)
except KeyError:
new_exc = InvalidResourceError(method, uri)
new_exc.__cause__ = None
raise new_exc # zhmcclient_mock.InvalidResourceError
cpc = partition.manager.parent
if not cpc.dpm_enabled:
raise CpcNotInDpmError(method, uri, cpc)
check_valid_cpc_status(method, uri, cpc)
check_partition_status(method, uri, partition,
valid_statuses=['active', 'paused',
'terminated'])
# We don't reflect the PSW restart in the mock state.
return {}
class PartitionMountIsoImageHandler(object):
"""
Handler class for operation: Mount ISO Image.
"""
@staticmethod
def post(method, hmc, uri, uri_parms, body, logon_required,
wait_for_completion):
# pylint: disable=unused-argument
"""Operation: Mount ISO Image (requires DPM mode)."""
assert wait_for_completion is True # synchronous operation
partition_oid = uri_parms[0]
partition_uri = '/api/partitions/' + partition_oid
try:
partition = hmc.lookup_by_uri(partition_uri)
except KeyError:
new_exc = InvalidResourceError(method, uri)
new_exc.__cause__ = None
raise new_exc # zhmcclient_mock.InvalidResourceError
cpc = partition.manager.parent
if not cpc.dpm_enabled:
raise CpcNotInDpmError(method, uri, cpc)
check_valid_cpc_status(method, uri, cpc)
check_partition_status(method, uri, partition,
invalid_statuses=['starting', 'stopping'])
# Parse and check required query parameters
query_parms = parse_query_parms(method, uri, uri_parms[1])
try:
image_name = query_parms['image-name']
except KeyError:
new_exc = BadRequestError(
method, uri, reason=1,
message="Missing required URI query parameter 'image-name'")
new_exc.__cause__ = None
raise new_exc # zhmcclient_mock.BadRequestError
try:
ins_file_name = query_parms['ins-file-name']
except KeyError:
new_exc = BadRequestError(
method, uri, reason=1,
message="Missing required URI query parameter 'ins-file-name'")
new_exc.__cause__ = None
raise new_exc # zhmcclient_mock.BadRequestError
# Reflect the effect of mounting in the partition properties
partition.properties['boot-iso-image-name'] = image_name
partition.properties['boot-iso-ins-file'] = ins_file_name
return {}
class PartitionUnmountIsoImageHandler(object):
"""
Handler class for operation: Unmount ISO Image.
"""
@staticmethod
def post(method, hmc, uri, uri_parms, body, logon_required,
wait_for_completion):
# pylint: disable=unused-argument
"""Operation: Unmount ISO Image (requires DPM mode)."""
assert wait_for_completion is True # synchronous operation
partition_oid = uri_parms[0]
partition_uri = '/api/partitions/' + partition_oid
try:
partition = hmc.lookup_by_uri(partition_uri)
except KeyError:
new_exc = InvalidResourceError(method, uri)
new_exc.__cause__ = None
raise new_exc # zhmcclient_mock.InvalidResourceError
cpc = partition.manager.parent
if not cpc.dpm_enabled:
raise CpcNotInDpmError(method, uri, cpc)
check_valid_cpc_status(method, uri, cpc)
check_partition_status(method, uri, partition,
invalid_statuses=['starting', 'stopping'])
# Reflect the effect of unmounting in the partition properties
partition.properties['boot-iso-image-name'] = None
partition.properties['boot-iso-ins-file'] = None
return {}
def ensure_crypto_config(partition):
"""
Ensure that the 'crypto-configuration' property on the faked partition
is initialized.
"""
if 'crypto-configuration' not in partition.properties or \
partition.properties['crypto-configuration'] is None:
partition.properties['crypto-configuration'] = {}
crypto_config = partition.properties['crypto-configuration']
if 'crypto-adapter-uris' not in crypto_config or \
crypto_config['crypto-adapter-uris'] is None:
crypto_config['crypto-adapter-uris'] = []
adapter_uris = crypto_config['crypto-adapter-uris']
if 'crypto-domain-configurations' not in crypto_config or \
crypto_config['crypto-domain-configurations'] is None:
crypto_config['crypto-domain-configurations'] = []
domain_configs = crypto_config['crypto-domain-configurations']
return adapter_uris, domain_configs
class PartitionIncreaseCryptoConfigHandler(object):
"""
Handler class for operation: Increase Crypto Configuration.
"""
@staticmethod
def post(method, hmc, uri, uri_parms, body, logon_required,
wait_for_completion):
# pylint: disable=unused-argument
"""Operation: Increase Crypto Configuration (requires DPM mode)."""
assert wait_for_completion is True # async not supported yet
partition_oid = uri_parms[0]
partition_uri = '/api/partitions/' + partition_oid
try:
partition = hmc.lookup_by_uri(partition_uri)
except KeyError:
new_exc = InvalidResourceError(method, uri)
new_exc.__cause__ = None
raise new_exc # zhmcclient_mock.InvalidResourceError
cpc = partition.manager.parent
if not cpc.dpm_enabled:
raise CpcNotInDpmError(method, uri, cpc)
check_valid_cpc_status(method, uri, cpc)
check_partition_status(method, uri, partition,
invalid_statuses=['starting', 'stopping'])
check_required_fields(method, uri, body, []) # check just body
adapter_uris, domain_configs = ensure_crypto_config(partition)
add_adapter_uris = body.get('crypto-adapter-uris', [])
add_domain_configs = body.get('crypto-domain-configurations', [])
# We don't support finding errors in this simple-minded mock support,
# so we assume that the input is fine (e.g. no invalid adapters) and
# we just add it.
for _uri in add_adapter_uris:
if _uri not in adapter_uris:
adapter_uris.append(_uri)
for dc in add_domain_configs:
if dc not in domain_configs:
domain_configs.append(dc)
class PartitionDecreaseCryptoConfigHandler(object):
"""
Handler class for operation: Decrease Crypto Configuration.
"""
@staticmethod
def post(method, hmc, uri, uri_parms, body, logon_required,
wait_for_completion):
# pylint: disable=unused-argument
"""Operation: Decrease Crypto Configuration (requires DPM mode)."""
assert wait_for_completion is True # async not supported yet
partition_oid = uri_parms[0]
partition_uri = '/api/partitions/' + partition_oid
try:
partition = hmc.lookup_by_uri(partition_uri)
except KeyError:
new_exc = InvalidResourceError(method, uri)
new_exc.__cause__ = None
raise new_exc # zhmcclient_mock.InvalidResourceError
cpc = partition.manager.parent
if not cpc.dpm_enabled:
raise CpcNotInDpmError(method, uri, cpc)
check_valid_cpc_status(method, uri, cpc)
check_partition_status(method, uri, partition,
invalid_statuses=['starting', 'stopping'])
check_required_fields(method, uri, body, []) # check just body
adapter_uris, domain_configs = ensure_crypto_config(partition)
remove_adapter_uris = body.get('crypto-adapter-uris', [])
remove_domain_indexes = body.get('crypto-domain-indexes', [])
# We don't support finding errors in this simple-minded mock support,
# so we assume that the input is fine (e.g. no invalid adapters) and
# we just remove it.
for _uri in remove_adapter_uris:
if _uri in adapter_uris:
adapter_uris.remove(_uri)
for remove_di in remove_domain_indexes:
for i, dc in enumerate(domain_configs):
if dc['domain-index'] == remove_di:
del domain_configs[i]
class PartitionChangeCryptoConfigHandler(object):
"""
Handler class for operation: Change Crypto Configuration.
"""
@staticmethod
def post(method, hmc, uri, uri_parms, body, logon_required,
wait_for_completion):
# pylint: disable=unused-argument
"""Operation: Change Crypto Configuration (requires DPM mode)."""
assert wait_for_completion is True # async not supported yet
partition_oid = uri_parms[0]
partition_uri = '/api/partitions/' + partition_oid
try:
partition = hmc.lookup_by_uri(partition_uri)
except KeyError:
new_exc = InvalidResourceError(method, uri)
new_exc.__cause__ = None
raise new_exc # zhmcclient_mock.InvalidResourceError
cpc = partition.manager.parent
if not cpc.dpm_enabled:
raise CpcNotInDpmError(method, uri, cpc)
check_valid_cpc_status(method, uri, cpc)
check_partition_status(method, uri, partition,
invalid_statuses=['starting', 'stopping'])
check_required_fields(method, uri, body,
['domain-index', 'access-mode'])
_, domain_configs = ensure_crypto_config(partition)
change_domain_index = body['domain-index']
change_access_mode = body['access-mode']
# We don't support finding errors in this simple-minded mock support,
# so we assume that the input is fine (e.g. no invalid domain indexes)
# and we just change it.
for dc in domain_configs:
if dc['domain-index'] == change_domain_index:
dc['access-mode'] = change_access_mode
class HbasHandler(object):
"""
Handler class for HTTP methods on set of Hba resources.
"""
@staticmethod
def post(method, hmc, uri, uri_parms, body, logon_required,
wait_for_completion):
# pylint: disable=unused-argument
"""Operation: Create HBA (requires DPM mode)."""
assert wait_for_completion is True # async not supported yet
partition_uri = re.sub('/hbas$', '', uri)
try:
partition = hmc.lookup_by_uri(partition_uri)
except KeyError:
new_exc = InvalidResourceError(method, uri)
new_exc.__cause__ = None
raise new_exc # zhmcclient_mock.InvalidResourceError
cpc = partition.manager.parent
if not cpc.dpm_enabled:
raise CpcNotInDpmError(method, uri, cpc)
check_valid_cpc_status(method, uri, cpc)
check_partition_status(method, uri, partition,
invalid_statuses=['starting', 'stopping'])
check_required_fields(method, uri, body, ['name', 'adapter-port-uri'])
# Check the port-related input property
port_uri = body['adapter-port-uri']
m = re.match(r'(^/api/adapters/[^/]+)/storage-ports/[^/]+$', port_uri)
if not m:
# We treat an invalid port URI like "port not found".
raise InvalidResourceError(method, uri, reason=6,
resource_uri=port_uri)
adapter_uri = m.group(1)
try:
hmc.lookup_by_uri(adapter_uri)
except KeyError:
new_exc = InvalidResourceError(method, uri, reason=2,
resource_uri=adapter_uri)
new_exc.__cause__ = None
raise new_exc # zhmcclient_mock.InvalidResourceError
try:
hmc.lookup_by_uri(port_uri)
except KeyError:
new_exc = InvalidResourceError(method, uri, reason=6,
resource_uri=port_uri)
new_exc.__cause__ = None
raise new_exc # zhmcclient_mock.InvalidResourceError
new_hba = partition.hbas.add(body)
return {'element-uri': new_hba.uri}
class HbaHandler(GenericGetPropertiesHandler,
GenericUpdatePropertiesHandler):
"""
Handler class for HTTP methods on single Hba resource.
"""
# TODO: Add check_valid_cpc_status() in Update HBA Properties
# TODO: Add check_partition_status(transitional) in Update HBA Properties
@staticmethod
def delete(method, hmc, uri, uri_parms, logon_required):
# pylint: disable=unused-argument
"""Operation: Delete HBA (requires DPM mode)."""
try:
hba = hmc.lookup_by_uri(uri)
except KeyError:
new_exc = InvalidResourceError(method, uri)
new_exc.__cause__ = None
raise new_exc # zhmcclient_mock.InvalidResourceError
partition = hba.manager.parent
cpc = partition.manager.parent
if not cpc.dpm_enabled:
raise CpcNotInDpmError(method, uri, cpc)
check_valid_cpc_status(method, uri, cpc)
check_partition_status(method, uri, partition,
invalid_statuses=['starting', 'stopping'])
partition.hbas.remove(hba.oid)
class HbaReassignPortHandler(object):
"""
Handler class for operation: Reassign Storage Adapter Port.
"""
@staticmethod
def post(method, hmc, uri, uri_parms, body, logon_required,
wait_for_completion):
# pylint: disable=unused-argument
"""Operation: Reassign Storage Adapter Port (requires DPM mode)."""
assert wait_for_completion is True # async not supported yet
partition_oid = uri_parms[0]
partition_uri = '/api/partitions/' + partition_oid
hba_oid = uri_parms[1]
hba_uri = '/api/partitions/' + partition_oid + '/hbas/' + hba_oid
try:
hba = hmc.lookup_by_uri(hba_uri)
except KeyError:
new_exc = InvalidResourceError(method, uri)
new_exc.__cause__ = None
raise new_exc # zhmcclient_mock.InvalidResourceError
partition = hmc.lookup_by_uri(partition_uri) # assert it exists
cpc = partition.manager.parent
if not cpc.dpm_enabled:
raise CpcNotInDpmError(method, uri, cpc)
check_valid_cpc_status(method, uri, cpc)
check_partition_status(method, uri, partition,
invalid_statuses=['starting', 'stopping'])
check_required_fields(method, uri, body, ['adapter-port-uri'])
# Reflect the effect of the operation on the HBA
new_port_uri = body['adapter-port-uri']
hba.properties['adapter-port-uri'] = new_port_uri
class NicsHandler(object):
"""
Handler class for HTTP methods on set of Nic resources.
"""
@staticmethod
def post(method, hmc, uri, uri_parms, body, logon_required,
wait_for_completion):
# pylint: disable=unused-argument
"""Operation: Create NIC (requires DPM mode)."""
assert wait_for_completion is True # async not supported yet
partition_uri = re.sub('/nics$', '', uri)
try:
partition = hmc.lookup_by_uri(partition_uri)
except KeyError:
new_exc = InvalidResourceError(method, uri)
new_exc.__cause__ = None
raise new_exc # zhmcclient_mock.InvalidResourceError
cpc = partition.manager.parent
if not cpc.dpm_enabled:
raise CpcNotInDpmError(method, uri, cpc)
check_valid_cpc_status(method, uri, cpc)
check_partition_status(method, uri, partition,
invalid_statuses=['starting', 'stopping'])
check_required_fields(method, uri, body, ['name'])
# Check the port-related input properties
if 'network-adapter-port-uri' in body:
port_uri = body['network-adapter-port-uri']
m = re.match(r'(^/api/adapters/[^/]+)/network-ports/[^/]+$',
port_uri)
if not m:
# We treat an invalid port URI like "port not found".
raise InvalidResourceError(method, uri, reason=6,
resource_uri=port_uri)
adapter_uri = m.group(1)
try:
hmc.lookup_by_uri(adapter_uri)
except KeyError:
new_exc = InvalidResourceError(method, uri, reason=2,
resource_uri=adapter_uri)
new_exc.__cause__ = None
raise new_exc # zhmcclient_mock.InvalidResourceError
try:
hmc.lookup_by_uri(port_uri)
except KeyError:
new_exc = InvalidResourceError(method, uri, reason=6,
resource_uri=port_uri)
new_exc.__cause__ = None
raise new_exc # zhmcclient_mock.InvalidResourceError
elif 'virtual-switch-uri' in body:
vswitch_uri = body['virtual-switch-uri']
try:
hmc.lookup_by_uri(vswitch_uri)
except KeyError:
new_exc = InvalidResourceError(method, uri, reason=2,
resource_uri=vswitch_uri)
new_exc.__cause__ = None
raise new_exc # zhmcclient_mock.InvalidResourceError
else:
nic_name = body.get('name', None)
raise BadRequestError(
method, uri, reason=5,
message="The input properties for creating a NIC {!r} in "
"partition {!r} must specify either the "
"'network-adapter-port-uri' or the "
"'virtual-switch-uri' property.".
format(nic_name, partition.name))
# We have ensured that the vswitch exists, so no InputError handling
new_nic = partition.nics.add(body)
return {'element-uri': new_nic.uri}
class NicHandler(GenericGetPropertiesHandler):
"""
Handler class for HTTP methods on single Nic resource.
"""
@staticmethod
def post(method, hmc, uri, uri_parms, body, logon_required,
wait_for_completion):
# pylint: disable=unused-argument
"""Operation: Update NIC Properties."""
try:
nic = hmc.lookup_by_uri(uri)
except KeyError:
new_exc = InvalidResourceError(method, uri)
new_exc.__cause__ = None
raise new_exc # zhmcclient_mock.InvalidResourceError
partition = nic.manager.parent
cpc = partition.manager.parent
if not cpc.dpm_enabled:
raise CpcNotInDpmError(method, uri, cpc)
check_valid_cpc_status(method, uri, cpc)
check_partition_status(method, uri, partition,
invalid_statuses=['starting', 'stopping'])
# Check whether requested properties are modifiable
check_writable(
method, uri, body,
[
'description',
'name',
'device-number',
'network-adapter-port-uri',
'ssc-management-nic',
'ssc-ip-address-type',
'ssc-ip-address',
'ssc-mask-prefix',
'vlan-id',
'mac-address',
'vlan-type',
'function-number',
'function-range',
])
nic.update(body)
@staticmethod
def delete(method, hmc, uri, uri_parms, logon_required):
# pylint: disable=unused-argument
"""Operation: Delete NIC (requires DPM mode)."""
try:
nic = hmc.lookup_by_uri(uri)
except KeyError:
new_exc = InvalidResourceError(method, uri)
new_exc.__cause__ = None
raise new_exc # zhmcclient_mock.InvalidResourceError
partition = nic.manager.parent
cpc = partition.manager.parent
if not cpc.dpm_enabled:
raise CpcNotInDpmError(method, uri, cpc)
check_valid_cpc_status(method, uri, cpc)
check_partition_status(method, uri, partition,
invalid_statuses=['starting', 'stopping'])
partition.nics.remove(nic.oid)
class VirtualFunctionsHandler(object):
"""
Handler class for HTTP methods on set of VirtualFunction resources.
"""
@staticmethod
def post(method, hmc, uri, uri_parms, body, logon_required,
wait_for_completion):
# pylint: disable=unused-argument
"""Operation: Create Virtual Function (requires DPM mode)."""
assert wait_for_completion is True # async not supported yet
partition_uri = re.sub('/virtual-functions$', '', uri)
try:
partition = hmc.lookup_by_uri(partition_uri)
except KeyError:
new_exc = InvalidResourceError(method, uri)
new_exc.__cause__ = None
raise new_exc # zhmcclient_mock.InvalidResourceError
cpc = partition.manager.parent
if not cpc.dpm_enabled:
raise CpcNotInDpmError(method, uri, cpc)
check_valid_cpc_status(method, uri, cpc)
check_partition_status(method, uri, partition,
invalid_statuses=['starting', 'stopping'])
check_required_fields(method, uri, body, ['name'])
new_vf = partition.virtual_functions.add(body)
return {'element-uri': new_vf.uri}
class VirtualFunctionHandler(GenericGetPropertiesHandler,
GenericUpdatePropertiesHandler):
"""
Handler class for HTTP methods on single VirtualFunction resource.
"""
# TODO: Add check_valid_cpc_status() in Update VF Properties
# TODO: Add check_partition_status(transitional) in Update VF Properties
@staticmethod
def delete(method, hmc, uri, uri_parms, logon_required):
# pylint: disable=unused-argument
"""Operation: Delete Virtual Function (requires DPM mode)."""
try:
vf = hmc.lookup_by_uri(uri)
except KeyError:
new_exc = InvalidResourceError(method, uri)
new_exc.__cause__ = None
raise new_exc # zhmcclient_mock.InvalidResourceError
partition = vf.manager.parent
cpc = partition.manager.parent
if not cpc.dpm_enabled:
raise CpcNotInDpmError(method, uri, cpc)
check_valid_cpc_status(method, uri, cpc)
check_partition_status(method, uri, partition,
invalid_statuses=['starting', 'stopping'])
partition.virtual_functions.remove(vf.oid)
class VirtualSwitchesHandler(object):
"""
Handler class for HTTP methods on set of VirtualSwitch resources.
"""
@staticmethod
def get(method, hmc, uri, uri_parms, logon_required):
# pylint: disable=unused-argument
"""Operation: List Virtual Switches of a CPC (empty result if not in
DPM mode)."""
cpc_oid = uri_parms[0]
query_str = uri_parms[1]
try:
cpc = hmc.cpcs.lookup_by_oid(cpc_oid)
except KeyError:
new_exc = InvalidResourceError(method, uri)
new_exc.__cause__ = None
raise new_exc # zhmcclient_mock.InvalidResourceError
result_vswitches = []
if cpc.dpm_enabled:
filter_args = parse_query_parms(method, uri, query_str)
for vswitch in cpc.virtual_switches.list(filter_args):
result_vswitch = {}
for prop in vswitch.properties:
if prop in ('object-uri', 'name', 'type'):
result_vswitch[prop] = vswitch.properties[prop]
result_vswitches.append(result_vswitch)
return {'virtual-switches': result_vswitches}
class VirtualSwitchHandler(GenericGetPropertiesHandler,
GenericUpdatePropertiesHandler):
"""
Handler class for HTTP methods on single VirtualSwitch resource.
"""
pass
class VirtualSwitchGetVnicsHandler(object):
"""
Handler class for operation: Get Connected VNICs of a Virtual Switch.
"""
@staticmethod
def post(method, hmc, uri, uri_parms, body, logon_required,
wait_for_completion):
# pylint: disable=unused-argument
"""Operation: Get Connected VNICs of a Virtual Switch
(requires DPM mode)."""
assert wait_for_completion is True # async not supported yet
vswitch_oid = uri_parms[0]
vswitch_uri = '/api/virtual-switches/' + vswitch_oid
try:
vswitch = hmc.lookup_by_uri(vswitch_uri)
except KeyError:
new_exc = InvalidResourceError(method, uri)
new_exc.__cause__ = None
raise new_exc # zhmcclient_mock.InvalidResourceError
cpc = vswitch.manager.parent
if not cpc.dpm_enabled:
raise CpcNotInDpmError(method, uri, cpc)
connected_vnic_uris = vswitch.properties['connected-vnic-uris']
return {'connected-vnic-uris': connected_vnic_uris}
class StorageGroupsHandler(object):
"""
Handler class for HTTP methods on set of StorageGroup resources.
"""
@staticmethod
def get(method, hmc, uri, uri_parms, logon_required):
# pylint: disable=unused-argument
"""Operation: List Storage Groups (always global but with filters)."""
query_str = uri_parms[0]
filter_args = parse_query_parms(method, uri, query_str)
result_storage_groups = []
for sg in hmc.consoles.console.storage_groups.list(filter_args):
result_sg = {}
for prop in sg.properties:
if prop in ('object-uri', 'cpc-uri', 'name',
'fulfillment-state', 'type'):
result_sg[prop] = sg.properties[prop]
result_storage_groups.append(result_sg)
return {'storage-groups': result_storage_groups}
@staticmethod
def post(method, hmc, uri, uri_parms, body, logon_required,
wait_for_completion):
# pylint: disable=unused-argument
"""Operation: Create Storage Group."""
assert wait_for_completion is True # async not supported yet
check_required_fields(method, uri, body, ['name', 'cpc-uri', 'type'])
cpc_uri = body['cpc-uri']
try:
cpc = hmc.lookup_by_uri(cpc_uri)
except KeyError:
new_exc = InvalidResourceError(method, uri)
new_exc.__cause__ = None
raise new_exc # zhmcclient_mock.InvalidResourceError
if not cpc.dpm_enabled:
raise CpcNotInDpmError(method, uri, cpc)
check_valid_cpc_status(method, uri, cpc)
# Reflect the result of creating the storage group
body2 = body.copy()
sv_requests = body2.pop('storage-volumes', None)
new_storage_group = hmc.consoles.console.storage_groups.add(body2)
sv_uris = []
if sv_requests:
for sv_req in sv_requests:
check_required_fields(method, uri, sv_req, ['operation'])
operation = sv_req['operation']
if operation == 'create':
sv_props = sv_req.copy()
del sv_props['operation']
if 'element-uri' in sv_props:
raise BadRequestError(
method, uri, 7,
"The 'element-uri' field in storage-volumes is "
"invalid for the create operation")
sv_uri = new_storage_group.storage_volumes.add(sv_props)
sv_uris.append(sv_uri)
else:
raise BadRequestError(
method, uri, 5,
"Invalid value for storage-volumes 'operation' "
"field: %s" % operation)
return {
'object-uri': new_storage_group.uri,
'element-uris': sv_uris,
}
class StorageGroupHandler(GenericGetPropertiesHandler):
"""
Handler class for HTTP methods on single StorageGroup resource.
"""
pass
class StorageGroupModifyHandler(object):
"""
Handler class for operation: Modify Storage Group Properties.
"""
@staticmethod
def post(method, hmc, uri, uri_parms, body, logon_required,
wait_for_completion):
# pylint: disable=unused-argument
"""Operation: Modify Storage Group Properties."""
assert wait_for_completion is True # async not supported yet
# The URI is a POST operation, so we need to construct the SG URI
storage_group_oid = uri_parms[0]
storage_group_uri = '/api/storage-groups/' + storage_group_oid
try:
storage_group = hmc.lookup_by_uri(storage_group_uri)
except KeyError:
new_exc = InvalidResourceError(method, uri)
new_exc.__cause__ = None
raise new_exc # zhmcclient_mock.InvalidResourceError
# Reflect the result of modifying the storage group
body2 = body.copy()
sv_requests = body2.pop('storage-volumes', None)
storage_group.update(body2)
sv_uris = []
if sv_requests:
for sv_req in sv_requests:
check_required_fields(method, uri, sv_req, ['operation'])
operation = sv_req['operation']
if operation == 'create':
sv_props = sv_req.copy()
del sv_props['operation']
if 'element-uri' in sv_props:
raise BadRequestError(
method, uri, 7,
"The 'element-uri' field in storage-volumes is "
"invalid for the create operation")
sv_uri = storage_group.storage_volumes.add(sv_props)
sv_uris.append(sv_uri)
elif operation == 'modify':
check_required_fields(method, uri, sv_req, ['element-uri'])
sv_uri = sv_req['element-uri']
storage_volume = hmc.lookup_by_uri(sv_uri)
storage_volume.update_properties(sv_props)
elif operation == 'delete':
check_required_fields(method, uri, sv_req, ['element-uri'])
sv_uri = sv_req['element-uri']
storage_volume = hmc.lookup_by_uri(sv_uri)
storage_volume.delete()
else:
raise BadRequestError(
method, uri, 5,
"Invalid value for storage-volumes 'operation' "
"field: %s" % operation)
return {
'element-uris': sv_uris, # SVs created, maintaining the order
}
class StorageGroupDeleteHandler(object):
"""
Handler class for operation: Delete Storage Group.
"""
@staticmethod
def post(method, hmc, uri, uri_parms, body, logon_required,
wait_for_completion):
# pylint: disable=unused-argument
"""Operation: Delete Storage Group."""
assert wait_for_completion is True # async not supported yet
# The URI is a POST operation, so we need to construct the SG URI
storage_group_oid = uri_parms[0]
storage_group_uri = '/api/storage-groups/' + storage_group_oid
try:
storage_group = hmc.lookup_by_uri(storage_group_uri)
except KeyError:
new_exc = InvalidResourceError(method, uri)
new_exc.__cause__ = None
raise new_exc # zhmcclient_mock.InvalidResourceError
# TODO: Check that the SG is detached from any partitions
# Reflect the result of deleting the storage_group
storage_group.manager.remove(storage_group.oid)
class StorageGroupRequestFulfillmentHandler(object):
"""
Handler class for operation: Request Storage Group Fulfillment.
"""
@staticmethod
def post(method, hmc, uri, uri_parms, body, logon_required,
wait_for_completion):
# pylint: disable=unused-argument
"""Operation: Request Storage Group Fulfillment."""
assert wait_for_completion is True # async not supported yet
# The URI is a POST operation, so we need to construct the SG URI
storage_group_oid = uri_parms[0]
storage_group_uri = '/api/storage-groups/' + storage_group_oid
try:
hmc.lookup_by_uri(storage_group_uri)
except KeyError:
new_exc = InvalidResourceError(method, uri)
new_exc.__cause__ = None
raise new_exc # zhmcclient_mock.InvalidResourceError
# Reflect the result of requesting fulfilment for the storage group
pass
class StorageGroupAddCandidatePortsHandler(object):
"""
Handler class for operation: Add Candidate Adapter Ports to an FCP Storage
Group.
"""
@staticmethod
def post(method, hmc, uri, uri_parms, body, logon_required,
wait_for_completion):
# pylint: disable=unused-argument
"""Operation: Add Candidate Adapter Ports to an FCP Storage Group."""
assert wait_for_completion is True # async not supported yet
# The URI is a POST operation, so we need to construct the SG URI
storage_group_oid = uri_parms[0]
storage_group_uri = '/api/storage-groups/' + storage_group_oid
try:
storage_group = hmc.lookup_by_uri(storage_group_uri)
except KeyError:
new_exc = InvalidResourceError(method, uri)
new_exc.__cause__ = None
raise new_exc # zhmcclient_mock.InvalidResourceError
check_required_fields(method, uri, body, ['adapter-port-uris'])
# TODO: Check that storage group has type FCP
# Reflect the result of adding the candidate ports
candidate_adapter_port_uris = \
storage_group.properties['candidate-adapter-port-uris']
for ap_uri in body['adapter-port-uris']:
if ap_uri in candidate_adapter_port_uris:
raise ConflictError(method, uri, 483,
"Adapter port is already in candidate "
"list of storage group %s: %s" %
(storage_group.name, ap_uri))
candidate_adapter_port_uris.append(ap_uri)
class StorageGroupRemoveCandidatePortsHandler(object):
"""
Handler class for operation: Remove Candidate Adapter Ports from an FCP
Storage Group.
"""
@staticmethod
def post(method, hmc, uri, uri_parms, body, logon_required,
wait_for_completion):
# pylint: disable=unused-argument
"""Operation: Remove Candidate Adapter Ports from an FCP Storage
Group."""
assert wait_for_completion is True # async not supported yet
# The URI is a POST operation, so we need to construct the SG URI
storage_group_oid = uri_parms[0]
storage_group_uri = '/api/storage-groups/' + storage_group_oid
try:
storage_group = hmc.lookup_by_uri(storage_group_uri)
except KeyError:
new_exc = InvalidResourceError(method, uri)
new_exc.__cause__ = None
raise new_exc # zhmcclient_mock.InvalidResourceError
check_required_fields(method, uri, body, ['adapter-port-uris'])
# TODO: Check that storage group has type FCP
# Reflect the result of adding the candidate ports
candidate_adapter_port_uris = \
storage_group.properties['candidate-adapter-port-uris']
for ap_uri in body['adapter-port-uris']:
if ap_uri not in candidate_adapter_port_uris:
raise ConflictError(method, uri, 479,
"Adapter port is not in candidate "
"list of storage group %s: %s" %
(storage_group.name, ap_uri))
candidate_adapter_port_uris.remove(ap_uri)
class StorageVolumesHandler(object):
"""
Handler class for HTTP methods on set of StorageVolume resources.
"""
@staticmethod
def get(method, hmc, uri, uri_parms, logon_required):
# pylint: disable=unused-argument
"""Operation: List Storage Volumes of a Storage Group."""
sg_uri = re.sub('/storage-volumes$', '', uri)
try:
sg = hmc.lookup_by_uri(sg_uri)
except KeyError:
new_exc = InvalidResourceError(method, uri)
new_exc.__cause__ = None
raise new_exc # zhmcclient_mock.InvalidResourceError
query_str = uri_parms[1]
filter_args = parse_query_parms(method, uri, query_str)
result_storage_volumes = []
for sv in sg.storage_volumes.list(filter_args):
result_sv = {}
for prop in sv.properties:
if prop in ('element-uri', 'name', 'fulfillment-state', 'size',
'usage'):
result_sv[prop] = sv.properties[prop]
result_storage_volumes.append(result_sv)
return {'storage-volumes': result_storage_volumes}
class StorageVolumeHandler(GenericGetPropertiesHandler):
"""
Handler class for HTTP methods on single StorageVolume resource.
"""
pass
class CapacityGroupsHandler(object):
"""
Handler class for HTTP methods on set of CapacityGroup resources.
"""
@staticmethod
def get(method, hmc, uri, uri_parms, logon_required):
# pylint: disable=unused-argument
"""Operation: List Capacity Groups (always global but with filters)."""
cpc_oid = uri_parms[0]
cpc_uri = '/api/cpcs/' + cpc_oid
try:
cpc = hmc.lookup_by_uri(cpc_uri)
except KeyError:
new_exc = InvalidResourceError(method, uri)
new_exc.__cause__ = None
raise new_exc # zhmcclient_mock.InvalidResourceError
query_str = uri_parms[1]
filter_args = parse_query_parms(method, uri, query_str)
result_capacity_groups = []
for cg in cpc.capacity_groups.list(filter_args):
result_cg = {}
for prop in cg.properties:
if prop in ('element-uri', 'name'):
result_cg[prop] = cg.properties[prop]
result_capacity_groups.append(result_cg)
return {'capacity-groups': result_capacity_groups}
@staticmethod
def post(method, hmc, uri, uri_parms, body, logon_required,
wait_for_completion):
# pylint: disable=unused-argument
"""Operation: Create Capacity Group."""
assert wait_for_completion is True # async not supported yet
check_required_fields(method, uri, body, ['name'])
cpc_oid = uri_parms[0]
cpc_uri = '/api/cpcs/' + cpc_oid
try:
cpc = hmc.lookup_by_uri(cpc_uri)
except KeyError:
new_exc = InvalidResourceError(method, uri)
new_exc.__cause__ = None
raise new_exc # zhmcclient_mock.InvalidResourceError
if not cpc.dpm_enabled:
raise CpcNotInDpmError(method, uri, cpc)
check_valid_cpc_status(method, uri, cpc)
# Reflect the result of creating the capacity group
new_capacity_group = cpc.capacity_groups.add(body)
return {
'element-uri': new_capacity_group.uri
}
class CapacityGroupHandler(GenericGetPropertiesHandler,
GenericUpdatePropertiesHandler,
GenericDeleteHandler):
"""
Handler class for HTTP methods on single CapacityGroup resource.
"""
@staticmethod
def delete(method, hmc, uri, uri_parms, logon_required):
# pylint: disable=unused-argument
"""Operation: Delete Capacity Group."""
try:
capacity_group = hmc.lookup_by_uri(uri)
except KeyError:
new_exc = InvalidResourceError(method, uri)
new_exc.__cause__ = None
raise new_exc # zhmcclient_mock.InvalidResourceError
# Check that Capacity Group is empty
partition_uris = capacity_group.properties['partition-uris']
if partition_uris:
raise ConflictError(
method, uri, reason=110,
message="Capacity group {!r} is not empty and contains "
"partitions with URIs {!r}".
format(capacity_group.name, partition_uris))
# Delete the mocked resource
capacity_group.manager.remove(capacity_group.oid)
class CapacityGroupAddPartitionHandler(object):
"""
Handler class for operation: Add Partition to Capacity Group.
"""
@staticmethod
def post(method, hmc, uri, uri_parms, body, logon_required,
wait_for_completion):
# pylint: disable=unused-argument
"""Operation: Add Partition to Capacity Group."""
assert wait_for_completion is True # async not supported yet
# The URI is a POST operation, so we need to construct the CG URI
cpc_oid = uri_parms[0]
cpc_uri = '/api/cpcs/' + cpc_oid
try:
cpc = hmc.lookup_by_uri(cpc_uri)
except KeyError:
new_exc = InvalidResourceError(method, uri)
new_exc.__cause__ = None
raise new_exc # zhmcclient_mock.InvalidResourceError
cg_oid = uri_parms[1]
cg_uri = cpc_uri + '/capacity-groups/' + cg_oid
try:
capacity_group = hmc.lookup_by_uri(cg_uri)
except KeyError:
new_exc = InvalidResourceError(method, uri, reason=150)
new_exc.__cause__ = None
raise new_exc # zhmcclient_mock.InvalidResourceError
check_required_fields(method, uri, body, ['partition-uri'])
# Check the partition exists
partition_uri = body['partition-uri']
try:
partition = hmc.lookup_by_uri(partition_uri)
except KeyError:
new_exc = InvalidResourceError(method, uri, reason=2)
new_exc.__cause__ = None
raise new_exc # zhmcclient_mock.InvalidResourceError
# Check the partition is in shared processor mode
processor_mode = partition.properties.get('processor-mode', 'shared')
if processor_mode != 'shared':
raise ConflictError(method, uri, 170,
"Partition %s is in %s processor mode" %
(partition.name, processor_mode))
# Check the partition is not in this capacity group
partition_uris = capacity_group.properties['partition-uris']
if partition.uri in partition_uris:
raise ConflictError(method, uri, 130,
"Partition %s is already a member of "
"this capacity group %s" %
(partition.name, capacity_group.name))
# Check the partition is not in any other capacity group
for cg in cpc.capacity_groups.list():
if partition.uri in cg.properties['partition-uris']:
raise ConflictError(method, uri, 120,
"Partition %s is already a member of "
"another capacity group %s" %
(partition.name, cg.name))
# Reflect the result of adding the partition to the capacity group
capacity_group.properties['partition-uris'].append(partition.uri)
class CapacityGroupRemovePartitionHandler(object):
"""
Handler class for operation: Remove Partition from Capacity Group.
"""
@staticmethod
def post(method, hmc, uri, uri_parms, body, logon_required,
wait_for_completion):
# pylint: disable=unused-argument
"""Operation: Remove Partition from Capacity Group."""
assert wait_for_completion is True # async not supported yet
# The URI is a POST operation, so we need to construct the CG URI
cpc_oid = uri_parms[0]
cpc_uri = '/api/cpcs/' + cpc_oid
try:
hmc.lookup_by_uri(cpc_uri)
except KeyError:
new_exc = InvalidResourceError(method, uri)
new_exc.__cause__ = None
raise new_exc # zhmcclient_mock.InvalidResourceError
cg_oid = uri_parms[1]
cg_uri = cpc_uri + '/capacity-groups/' + cg_oid
try:
capacity_group = hmc.lookup_by_uri(cg_uri)
except KeyError:
new_exc = InvalidResourceError(method, uri, reason=150)
new_exc.__cause__ = None
raise new_exc # zhmcclient_mock.InvalidResourceError
check_required_fields(method, uri, body, ['partition-uri'])
# Check the partition exists
partition_uri = body['partition-uri']
try:
partition = hmc.lookup_by_uri(partition_uri)
except KeyError:
new_exc = InvalidResourceError(method, uri)
new_exc.__cause__ = None
raise new_exc # zhmcclient_mock.InvalidResourceError
# Check the partition is in this capacity group
partition_uris = capacity_group.properties['partition-uris']
if partition.uri not in partition_uris:
raise ConflictError(method, uri, 140,
"Partition %s is not a member of "
"capacity group %s" %
(partition.name, capacity_group.name))
# Reflect the result of removing the partition from the capacity group
capacity_group.properties['partition-uris'].remove(partition.uri)
class LparsHandler(object):
"""
Handler class for HTTP methods on set of Lpar resources.
"""
@staticmethod
def get(method, hmc, uri, uri_parms, logon_required):
# pylint: disable=unused-argument
"""Operation: List Logical Partitions of CPC (empty result in DPM
mode."""
cpc_oid = uri_parms[0]
query_str = uri_parms[1]
try:
cpc = hmc.cpcs.lookup_by_oid(cpc_oid)
except KeyError:
new_exc = InvalidResourceError(method, uri)
new_exc.__cause__ = None
raise new_exc # zhmcclient_mock.InvalidResourceError
result_lpars = []
if not cpc.dpm_enabled:
filter_args = parse_query_parms(method, uri, query_str)
for lpar in cpc.lpars.list(filter_args):
result_lpar = {}
for prop in lpar.properties:
if prop in ('object-uri', 'name', 'status'):
result_lpar[prop] = lpar.properties[prop]
result_lpars.append(result_lpar)
return {'logical-partitions': result_lpars}
class LparHandler(GenericGetPropertiesHandler):
"""
Handler class for HTTP methods on single Lpar resource.
"""
@staticmethod
def post(method, hmc, uri, uri_parms, body, logon_required,
wait_for_completion):
# pylint: disable=unused-argument
"""Operation: Update Logical Partition Properties."""
assert wait_for_completion is True # async not supported yet
try:
lpar = hmc.lookup_by_uri(uri)
except KeyError:
new_exc = InvalidResourceError(method, uri)
new_exc.__cause__ = None
raise new_exc # zhmcclient_mock.InvalidResourceError
cpc = lpar.manager.parent
if cpc.dpm_enabled:
raise CpcInDpmError(method, uri, cpc)
check_valid_cpc_status(method, uri, cpc)
status = lpar.properties.get('status', None)
if status not in ('not-operating', 'operating', 'exceptions'):
# LPAR permits property updates only when a active
new_exc = ConflictError(
method, uri, 1,
"Cannot update LPAR properties in status {}".format(status))
new_exc.__cause__ = None
raise new_exc # zhmcclient_mock.ConflictError
# TODO: Add check whether requested properties are modifiable
lpar.update(body)
class LparActivateHandler(object):
"""
A handler class for the "Activate Logical Partition" operation.
"""
@staticmethod
def get_status():
"""
Status retrieval method that returns the status the faked Lpar will
have after completion of the "Activate Logical Partition" operation.
This method returns the successful status 'not-operating' for LPARs that
do not auto-load their OSs, and can be mocked by testcases to return a
different status (e.g. 'operating' for LPARs that do auto-load, or
'acceptable' or 'exceptions').
"""
return 'not-operating'
@staticmethod
def post(method, hmc, uri, uri_parms, body, logon_required,
wait_for_completion):
# pylint: disable=unused-argument
"""Operation: Activate Logical Partition (requires classic mode)."""
assert wait_for_completion is True # async not supported yet
lpar_oid = uri_parms[0]
lpar_uri = '/api/logical-partitions/' + lpar_oid
try:
lpar = hmc.lookup_by_uri(lpar_uri)
except KeyError:
new_exc = InvalidResourceError(method, uri)
new_exc.__cause__ = None
raise new_exc # zhmcclient_mock.InvalidResourceError
cpc = lpar.manager.parent
if cpc.dpm_enabled:
raise CpcInDpmError(method, uri, cpc)
status = lpar.properties.get('status', None)
force = body.get('force', False) if body else False
if status == 'operating' and not force:
raise ServerError(method, uri, reason=263,
message="LPAR {!r} could not be activated "
"because the LPAR is in status {} "
"(and force was not specified).".
format(lpar.name, status))
act_profile_name = body.get('activation-profile-name', None)
if not act_profile_name:
act_profile_name = lpar.properties.get(
'next-activation-profile-name', None)
if act_profile_name is None:
act_profile_name = ''
# Perform the check between LPAR name and profile name
if act_profile_name != lpar.name:
raise ServerError(method, uri, reason=263,
message="LPAR {!r} could not be activated "
"because the name of the image activation "
"profile {!r} is different from the LPAR name.".
format(lpar.name, act_profile_name))
# Reflect the activation in the resource
lpar.properties['status'] = LparActivateHandler.get_status()
lpar.properties['last-used-activation-profile'] = act_profile_name
class LparDeactivateHandler(object):
"""
A handler class for the "Deactivate Logical Partition" operation.
"""
@staticmethod
def get_status():
"""
Status retrieval method that returns the status the faked Lpar will
have after completion of the "Deactivate Logical Partition" operation.
This method returns the successful status 'not-activated', and can be
mocked by testcases to return a different status (e.g. 'exceptions').
"""
return 'not-activated'
@staticmethod
def post(method, hmc, uri, uri_parms, body, logon_required,
wait_for_completion):
# pylint: disable=unused-argument
"""Operation: Deactivate Logical Partition (requires classic mode)."""
assert wait_for_completion is True # async not supported yet
lpar_oid = uri_parms[0]
lpar_uri = '/api/logical-partitions/' + lpar_oid
try:
lpar = hmc.lookup_by_uri(lpar_uri)
except KeyError:
new_exc = InvalidResourceError(method, uri)
new_exc.__cause__ = None
raise new_exc # zhmcclient_mock.InvalidResourceError
cpc = lpar.manager.parent
if cpc.dpm_enabled:
raise CpcInDpmError(method, uri, cpc)
status = lpar.properties.get('status', None)
force = body.get('force', False) if body else False
if status == 'not-activated' and not force:
# Note that the current behavior (on EC12) is that force=True
# still causes this error to be returned (different behavior
# compared to the Activate and Load operations).
raise ServerError(method, uri, reason=263,
message="LPAR {!r} could not be deactivated "
"because the LPAR is already deactivated "
"(and force was not specified).".
format(lpar.name))
if status == 'operating' and not force:
raise ServerError(method, uri, reason=263,
message="LPAR {!r} could not be deactivated "
"because the LPAR is in status {} "
"(and force was not specified).".
format(lpar.name, status))
# Reflect the deactivation in the resource
lpar.properties['status'] = LparDeactivateHandler.get_status()
class LparLoadHandler(object):
"""
A handler class for the "Load Logical Partition" operation.
"""
@staticmethod
def get_status():
"""
Status retrieval method that returns the status the faked Lpar will
have after completion of the "Load Logical Partition" operation.
This method returns the successful status 'operating', and can be
mocked by testcases to return a different status (e.g. 'acceptable' or
'exceptions').
"""
return 'operating'
@staticmethod
def post(method, hmc, uri, uri_parms, body, logon_required,
wait_for_completion):
# pylint: disable=unused-argument
"""Operation: Load Logical Partition (requires classic mode)."""
assert wait_for_completion is True # async not supported yet
lpar_oid = uri_parms[0]
lpar_uri = '/api/logical-partitions/' + lpar_oid
try:
lpar = hmc.lookup_by_uri(lpar_uri)
except KeyError:
new_exc = InvalidResourceError(method, uri)
new_exc.__cause__ = None
raise new_exc # zhmcclient_mock.InvalidResourceError
cpc = lpar.manager.parent
if cpc.dpm_enabled:
raise CpcInDpmError(method, uri, cpc)
status = lpar.properties.get('status', None)
force = body.get('force', False) if body else False
clear_indicator = body.get('clear-indicator', True) if body else True
store_status_indicator = body.get('store-status-indicator',
False) if body else False
if status == 'not-activated':
raise ConflictError(method, uri, reason=0,
message="LPAR {!r} could not be loaded "
"because the LPAR is in status {}.".
format(lpar.name, status))
if status == 'operating' and not force:
raise ServerError(method, uri, reason=263,
message="LPAR {!r} could not be loaded "
"because the LPAR is already loaded "
"(and force was not specified).".
format(lpar.name))
load_address = body.get('load-address', None) if body else None
if not load_address:
# Starting with z14, this parameter is optional and a last-used
# property is available.
load_address = lpar.properties.get('last-used-load-address', None)
if load_address is None:
# TODO: Verify actual error for this case on a z14.
raise BadRequestError(method, uri, reason=5,
message="LPAR {!r} could not be loaded "
"because a load address is not specified "
"in the request or in the Lpar last-used "
"property".
format(lpar.name))
load_parameter = body.get('load-parameter', None) if body else None
if not load_parameter:
# Starting with z14, a last-used property is available.
load_parameter = lpar.properties.get(
'last-used-load-parameter', None)
if load_parameter is None:
load_parameter = ''
# Reflect the load in the resource
if clear_indicator:
lpar.properties['memory'] = ''
if store_status_indicator:
lpar.properties['stored-status'] = status
else:
lpar.properties['stored-status'] = None
lpar.properties['status'] = LparLoadHandler.get_status()
lpar.properties['last-used-load-address'] = load_address
lpar.properties['last-used-load-parameter'] = load_parameter
class ResetActProfilesHandler(object):
"""
Handler class for HTTP methods on set of ResetActProfile resources.
"""
@staticmethod
def get(method, hmc, uri, uri_parms, logon_required):
# pylint: disable=unused-argument
"""
Operation: List Reset Activation Profiles.
In case of DPM mode, an empty list is returned.
"""
cpc_oid = uri_parms[0]
query_str = uri_parms[1]
try:
cpc = hmc.cpcs.lookup_by_oid(cpc_oid)
except KeyError:
new_exc = InvalidResourceError(method, uri)
new_exc.__cause__ = None
raise new_exc # zhmcclient_mock.InvalidResourceError
result_profiles = []
if not cpc.dpm_enabled:
filter_args = parse_query_parms(method, uri, query_str)
for profile in cpc.reset_activation_profiles.list(filter_args):
result_profile = {}
for prop in profile.properties:
if prop in ('element-uri', 'name'):
result_profile[prop] = profile.properties[prop]
result_profiles.append(result_profile)
return {'reset-activation-profiles': result_profiles}
class ResetActProfileHandler(GenericGetPropertiesHandler,
GenericUpdatePropertiesHandler):
"""
Handler class for HTTP methods on single ResetActProfile resource.
"""
pass
class ImageActProfilesHandler(object):
"""
Handler class for HTTP methods on set of ImageActProfile resources.
"""
@staticmethod
def get(method, hmc, uri, uri_parms, logon_required):
# pylint: disable=unused-argument
"""
Operation: List Image Activation Profiles.
In case of DPM mode, an empty list is returned.
"""
cpc_oid = uri_parms[0]
query_str = uri_parms[1]
try:
cpc = hmc.cpcs.lookup_by_oid(cpc_oid)
except KeyError:
new_exc = InvalidResourceError(method, uri)
new_exc.__cause__ = None
raise new_exc # zhmcclient_mock.InvalidResourceError
result_profiles = []
if not cpc.dpm_enabled:
filter_args = parse_query_parms(method, uri, query_str)
for profile in cpc.image_activation_profiles.list(filter_args):
result_profile = {}
for prop in profile.properties:
if prop in ('element-uri', 'name'):
result_profile[prop] = profile.properties[prop]
result_profiles.append(result_profile)
return {'image-activation-profiles': result_profiles}
class ImageActProfileHandler(GenericGetPropertiesHandler,
GenericUpdatePropertiesHandler):
"""
Handler class for HTTP methods on single ImageActProfile resource.
"""
pass
class LoadActProfilesHandler(object):
"""
Handler class for HTTP methods on set of LoadActProfile resources.
"""
@staticmethod
def get(method, hmc, uri, uri_parms, logon_required):
# pylint: disable=unused-argument
"""
Operation: List Load Activation Profiles.
In case of DPM mode, an empty list is returned.
"""
cpc_oid = uri_parms[0]
query_str = uri_parms[1]
try:
cpc = hmc.cpcs.lookup_by_oid(cpc_oid)
except KeyError:
new_exc = InvalidResourceError(method, uri)
new_exc.__cause__ = None
raise new_exc # zhmcclient_mock.InvalidResourceError
result_profiles = []
if not cpc.dpm_enabled:
filter_args = parse_query_parms(method, uri, query_str)
for profile in cpc.load_activation_profiles.list(filter_args):
result_profile = {}
for prop in profile.properties:
if prop in ('element-uri', 'name'):
result_profile[prop] = profile.properties[prop]
result_profiles.append(result_profile)
return {'load-activation-profiles': result_profiles}
class LoadActProfileHandler(GenericGetPropertiesHandler,
GenericUpdatePropertiesHandler):
"""
Handler class for HTTP methods on single LoadActProfile resource.
"""
pass
# URIs to be handled
# Note: This list covers only the HMC operations implemented in the zhmcclient.
# The HMC supports several more operations.
URIS = (
# (uri_regexp, handler_class)
# In all modes:
(r'/api/version', VersionHandler),
(r'/api/console', ConsoleHandler),
(r'/api/console/operations/restart', ConsoleRestartHandler),
(r'/api/console/operations/shutdown', ConsoleShutdownHandler),
(r'/api/console/operations/make-primary', ConsoleMakePrimaryHandler),
(r'/api/console/operations/reorder-user-patterns',
ConsoleReorderUserPatternsHandler),
(r'/api/console/operations/get-audit-log(?:\?(.*))?',
ConsoleGetAuditLogHandler),
(r'/api/console/operations/get-security-log(?:\?(.*))?',
ConsoleGetSecurityLogHandler),
(r'/api/console/operations/list-unmanaged-cpcs(?:\?(.*))?',
ConsoleListUnmanagedCpcsHandler),
(r'/api/console/operations/list-permitted-partitions(?:\?(.*))?',
ConsoleListPermittedPartitionsHandler),
(r'/api/console/operations/list-permitted-logical-partitions(?:\?(.*))?',
ConsoleListPermittedLparsHandler),
(r'/api/console/users(?:\?(.*))?', UsersHandler),
(r'/api/users/([^/]+)', UserHandler),
(r'/api/users/([^/]+)/operations/add-user-role',
UserAddUserRoleHandler),
(r'/api/users/([^/]+)/operations/remove-user-role',
UserRemoveUserRoleHandler),
(r'/api/console/user-roles(?:\?(.*))?', UserRolesHandler),
(r'/api/user-roles/([^/]+)', UserRoleHandler),
(r'/api/user-roles/([^/]+)/operations/add-permission',
UserRoleAddPermissionHandler),
(r'/api/user-roles/([^/]+)/operations/remove-permission',
UserRoleRemovePermissionHandler),
(r'/api/console/tasks(?:\?(.*))?', TasksHandler),
(r'/api/console/tasks/([^/]+)', TaskHandler),
(r'/api/console/user-patterns(?:\?(.*))?', UserPatternsHandler),
(r'/api/console/user-patterns/([^/]+)', UserPatternHandler),
(r'/api/console/password-rules(?:\?(.*))?', PasswordRulesHandler),
(r'/api/console/password-rules/([^/]+)', PasswordRuleHandler),
(r'/api/console/ldap-server-definitions(?:\?(.*))?',
LdapServerDefinitionsHandler),
(r'/api/console/ldap-server-definitions/([^/]+)',
LdapServerDefinitionHandler),
(r'/api/cpcs(?:\?(.*))?', CpcsHandler),
(r'/api/cpcs/([^/]+)', CpcHandler),
(r'/api/cpcs/([^/]+)/operations/set-cpc-power-save',
CpcSetPowerSaveHandler),
(r'/api/cpcs/([^/]+)/operations/set-cpc-power-capping',
CpcSetPowerCappingHandler),
(r'/api/cpcs/([^/]+)/energy-management-data',
CpcGetEnergyManagementDataHandler),
(r'/api/services/metrics/context', MetricsContextsHandler),
(r'/api/services/metrics/context/([^/]+)', MetricsContextHandler),
# Only in DPM mode:
(r'/api/cpcs/([^/]+)/operations/start', CpcStartHandler),
(r'/api/cpcs/([^/]+)/operations/stop', CpcStopHandler),
(r'/api/cpcs/([^/]+)/operations/activate', CpcActivateHandler),
(r'/api/cpcs/([^/]+)/operations/deactivate', CpcDeactivateHandler),
(r'/api/cpcs/([^/]+)/operations/export-port-names-list',
CpcExportPortNamesListHandler),
(r'/api/cpcs/([^/]+)/adapters(?:\?(.*))?', AdaptersHandler),
(r'/api/adapters/([^/]+)', AdapterHandler),
(r'/api/adapters/([^/]+)/operations/change-crypto-type',
AdapterChangeCryptoTypeHandler),
(r'/api/adapters/([^/]+)/operations/change-adapter-type',
AdapterChangeAdapterTypeHandler),
(r'/api/adapters/([^/]+)/network-ports/([^/]+)', NetworkPortHandler),
(r'/api/adapters/([^/]+)/storage-ports/([^/]+)', StoragePortHandler),
(r'/api/cpcs/([^/]+)/partitions(?:\?(.*))?', PartitionsHandler),
(r'/api/partitions/([^/]+)', PartitionHandler),
(r'/api/partitions/([^/]+)/operations/start', PartitionStartHandler),
(r'/api/partitions/([^/]+)/operations/stop', PartitionStopHandler),
(r'/api/partitions/([^/]+)/operations/scsi-dump',
PartitionScsiDumpHandler),
(r'/api/partitions/([^/]+)/operations/start-dump-program',
PartitionStartDumpProgramHandler),
(r'/api/partitions/([^/]+)/operations/psw-restart',
PartitionPswRestartHandler),
(r'/api/partitions/([^/]+)/operations/mount-iso-image(?:\?(.*))?',
PartitionMountIsoImageHandler),
(r'/api/partitions/([^/]+)/operations/unmount-iso-image',
PartitionUnmountIsoImageHandler),
(r'/api/partitions/([^/]+)/operations/increase-crypto-configuration',
PartitionIncreaseCryptoConfigHandler),
(r'/api/partitions/([^/]+)/operations/decrease-crypto-configuration',
PartitionDecreaseCryptoConfigHandler),
(r'/api/partitions/([^/]+)/operations/change-crypto-domain-configuration',
PartitionChangeCryptoConfigHandler),
(r'/api/partitions/([^/]+)/hbas(?:\?(.*))?', HbasHandler),
(r'/api/partitions/([^/]+)/hbas/([^/]+)', HbaHandler),
(r'/api/partitions/([^/]+)/hbas/([^/]+)/operations/'\
'reassign-storage-adapter-port', HbaReassignPortHandler),
(r'/api/partitions/([^/]+)/nics(?:\?(.*))?', NicsHandler),
(r'/api/partitions/([^/]+)/nics/([^/]+)', NicHandler),
(r'/api/partitions/([^/]+)/virtual-functions(?:\?(.*))?',
VirtualFunctionsHandler),
(r'/api/partitions/([^/]+)/virtual-functions/([^/]+)',
VirtualFunctionHandler),
(r'/api/cpcs/([^/]+)/virtual-switches(?:\?(.*))?', VirtualSwitchesHandler),
(r'/api/virtual-switches/([^/]+)', VirtualSwitchHandler),
(r'/api/virtual-switches/([^/]+)/operations/get-connected-vnics',
VirtualSwitchGetVnicsHandler),
(r'/api/storage-groups(?:\?(.*))?', StorageGroupsHandler),
(r'/api/storage-groups/([^/]+)', StorageGroupHandler),
(r'/api/storage-groups/([^/]+)/operations/delete',
StorageGroupDeleteHandler),
(r'/api/storage-groups/([^/]+)/operations/modify',
StorageGroupModifyHandler),
(r'/api/storage-groups/([^/]+)/operations/request-fulfillment',
StorageGroupRequestFulfillmentHandler),
(r'/api/storage-groups/([^/]+)/operations/add-candidate-adapter-ports',
StorageGroupAddCandidatePortsHandler),
(r'/api/storage-groups/([^/]+)/operations/remove-candidate-adapter-ports',
StorageGroupRemoveCandidatePortsHandler),
(r'/api/storage-groups/([^/]+)/storage-volumes(?:\?(.*))?',
StorageVolumesHandler),
(r'/api/storage-groups/([^/]+)/storage-volumes/([^/]+)',
StorageVolumeHandler),
(r'/api/cpcs/([^/]+)/capacity-groups(?:\?(.*))?', CapacityGroupsHandler),
(r'/api/cpcs/([^/]+)/capacity-groups/([^/]+)', CapacityGroupHandler),
(r'/api/cpcs/([^/]+)/capacity-groups/([^/]+)/operations/add-partition',
CapacityGroupAddPartitionHandler),
(r'/api/cpcs/([^/]+)/capacity-groups/([^/]+)/operations/remove-partition',
CapacityGroupRemovePartitionHandler),
# Only in classic (or ensemble) mode:
(r'/api/cpcs/([^/]+)/operations/import-profiles',
CpcImportProfilesHandler),
(r'/api/cpcs/([^/]+)/operations/export-profiles',
CpcExportProfilesHandler),
(r'/api/cpcs/([^/]+)/operations/add-temp-capacity',
CpcAddTempCapacityHandler),
(r'/api/cpcs/([^/]+)/operations/remove-temp-capacity',
CpcRemoveTempCapacityHandler),
(r'/api/cpcs/([^/]+)/operations/set-auto-start-list',
CpcSetAutoStartListHandler),
(r'/api/cpcs/([^/]+)/logical-partitions(?:\?(.*))?', LparsHandler),
(r'/api/logical-partitions/([^/]+)', LparHandler),
(r'/api/logical-partitions/([^/]+)/operations/activate',
LparActivateHandler),
(r'/api/logical-partitions/([^/]+)/operations/deactivate',
LparDeactivateHandler),
(r'/api/logical-partitions/([^/]+)/operations/load', LparLoadHandler),
(r'/api/cpcs/([^/]+)/reset-activation-profiles(?:\?(.*))?',
ResetActProfilesHandler),
(r'/api/cpcs/([^/]+)/reset-activation-profiles/([^/]+)',
ResetActProfileHandler),
(r'/api/cpcs/([^/]+)/image-activation-profiles(?:\?(.*))?',
ImageActProfilesHandler),
(r'/api/cpcs/([^/]+)/image-activation-profiles/([^/]+)',
ImageActProfileHandler),
(r'/api/cpcs/([^/]+)/load-activation-profiles(?:\?(.*))?',
LoadActProfilesHandler),
(r'/api/cpcs/([^/]+)/load-activation-profiles/([^/]+)',
LoadActProfileHandler),
) | zhmcclient | /zhmcclient-1.3.2.tar.gz/zhmcclient-1.3.2/zhmcclient_mock/_urihandler.py | _urihandler.py |
====
Zhon
====
.. image:: https://badge.fury.io/py/zhon.svg
:target: https://pypi.org/project/zhon
.. image:: https://github.com/tsroten/zhon/actions/workflows/ci.yml/badge.svg
:target: https://github.com/tsroten/zhon/actions/workflows/ci.yml
Zhon is a Python library that provides constants commonly used in Chinese text
processing.
* Documentation: https://tsroten.github.io/zhon/
* GitHub: https://github.com/tsroten/zhon
* Support: https://github.com/tsroten/zhon/issues
* Free software: `MIT license <http://opensource.org/licenses/MIT>`_
About
-----
Zhon's constants can be used in Chinese text processing, for example:
* Find CJK characters in a string:
.. code:: python
>>> re.findall('[{}]'.format(zhon.hanzi.characters), 'I broke a plate: 我打破了一个盘子.')
['我', '打', '破', '了', '一', '个', '盘', '子']
* Validate Pinyin syllables, words, or sentences:
.. code:: python
>>> re.findall(zhon.pinyin.syllable, 'Yuànzi lǐ tíngzhe yí liàng chē.', re.I)
['Yuàn', 'zi', 'lǐ', 'tíng', 'zhe', 'yí', 'liàng', 'chē']
>>> re.findall(zhon.pinyin.word, 'Yuànzi lǐ tíngzhe yí liàng chē.', re.I)
['Yuànzi', 'lǐ', 'tíngzhe', 'yí', 'liàng', 'chē']
>>> re.findall(zhon.pinyin.sentence, 'Yuànzi lǐ tíngzhe yí liàng chē.', re.I)
['Yuànzi lǐ tíngzhe yí liàng chē.']
Features
--------
Zhon includes the following commonly-used constants:
* CJK characters and radicals
* Chinese punctuation marks
* Chinese sentence regular expression pattern
* Pinyin vowels, consonants, lowercase, uppercase, and punctuation
* Pinyin syllable, word, and sentence regular expression patterns
* Zhuyin characters and marks
* Zhuyin syllable regular expression pattern
* CC-CEDICT characters
Getting Started
---------------
* `Install Zhon <https://tsroten.github.io/zhon/installation.html>`_
* `Learn how to use Zhon <https://tsroten.github.io/zhon/api.html>`_
* `Contribute <https://github.com/tsroten/zhon/blob/develop/CONTRIBUTING.rst>`_ documentation, code, or feedback
| zhon | /zhon-2.0.2.tar.gz/zhon-2.0.2/README.rst | README.rst |
import enum
import json
import logging
import socket
from typing import Callable, List
from . import hub, protocol
logger = logging.getLogger(__name__)
class HVAC:
def __init__(self, gw: hub.ZhongHongGateway, addr_out: int, addr_in: int):
self.gw = gw
self.addr_out = addr_out
self.addr_in = addr_in
self.ac_addr = protocol.AcAddr(self.addr_out, self.addr_in)
self.gw.add_status_callback(self.ac_addr, self._status_update)
self.status_callback = [] # type: List[Callable]
self.switch_status = None
self.target_temperature = None
self.current_operation = None
self.current_fan_mode = None
self.current_temperature = None
self.error_code = None
self.gw.add_device(self)
def _call_status_update(self):
for func in self.status_callback:
if callable(func):
func(self)
def _status_update(self, ac_status: protocol.AcStatus) -> bool:
assert self.ac_addr == ac_status.ac_addr
dirty = False
for _attr in ("switch_status", "target_temperature",
"current_operation", "current_fan_mode",
"current_temperature", "error_code"):
value = getattr(ac_status, _attr)
if isinstance(value, enum.Enum):
value = value.name
if getattr(self, _attr) != value:
setattr(self, _attr, value)
dirty = True
if dirty:
logger.debug("[callback]hvac %s status updated: %s", self.ac_addr,
self.status())
self._call_status_update()
else:
logger.debug("[callback]hvac %s status remains the same: %s",
self.ac_addr, self.status())
def set_attr(self, func_code, value) -> bool:
if func_code == protocol.FuncCode.CTL_POWER:
self.switch_status = value.name
elif func_code == protocol.FuncCode.CTL_TEMPERATURE:
self.target_temperature = value
elif func_code == protocol.FuncCode.CTL_OPERATION:
self.current_operation = value.name
elif func_code == protocol.FuncCode.CTL_FAN_MODE:
self.current_fan_mode = value.name
self._call_status_update()
def register_update_callback(self, _callable: Callable) -> bool:
if callable(_callable):
self.status_callback.append(_callable)
return True
return False
def send(self, ac_data: protocol.AcData) -> None:
self.gw.send(ac_data)
def update(self) -> bool:
message = protocol.AcData()
message.header = protocol.Header(
self.gw_addr, protocol.FuncCode.STATUS, protocol.CtlStatus.ONE, 1)
message.add(self.ac_addr)
self.gw.query_status(self.ac_addr)
return True
def status(self):
return json.dumps({
"switch_status": self.switch_status,
"target_temperature": self.target_temperature,
"current_operation": self.current_operation,
"current_fan_mode": self.current_fan_mode,
"current_temperature": self.current_temperature,
"error_code": self.error_code
})
@property
def operation_list(self):
return [x.name for x in list(protocol.StatusOperation)]
@property
def fan_list(self):
return [x.name for x in list(protocol.StatusFanMode)]
@property
def gw_addr(self):
return self.gw.gw_addr
@property
def is_on(self):
return self.switch_status == protocol.StatusSwitch.ON.name
@property
def min_temp(self):
return 16
@property
def max_temp(self):
return 30
def _ctrl_ac(self, func_code, ctrl_code):
request_data = protocol.AcData()
request_data.header = protocol.Header(
self.gw_addr, func_code, ctrl_code, protocol.CtlStatus.ONE)
request_data.add(self.ac_addr)
self.send(request_data)
def turn_on(self) -> None:
self._ctrl_ac(protocol.FuncCode.CTL_POWER, protocol.StatusSwitch.ON)
def turn_off(self) -> None:
self._ctrl_ac(protocol.FuncCode.CTL_POWER, protocol.StatusSwitch.OFF)
def set_temperature(self, temperature: str) -> None:
self._ctrl_ac(protocol.FuncCode.CTL_TEMPERATURE, temperature)
def set_fan_mode(self, fan_mode: str) -> None:
self._ctrl_ac(protocol.FuncCode.CTL_FAN_MODE,
protocol.StatusFanMode[fan_mode])
def set_operation_mode(self, operation_mode: str) -> None:
self._ctrl_ac(protocol.FuncCode.CTL_OPERATION,
protocol.StatusOperation[operation_mode]) | zhong-hong-hvac | /zhong_hong_hvac-1.0.9.tar.gz/zhong_hong_hvac-1.0.9/zhong_hong_hvac/hvac.py | hvac.py |
import copy
import logging
import socket
import struct
from .protocol import (AcData, AcOnline, AcStatus, ChecksumError, CtlStatus,
FuncCode, Header, AcAddr)
logger = logging.getLogger(__name__)
def validate(data_frame):
"""checksum of data frame."""
header = Header.get_header_from_frame(data_frame)
if not header.is_valid:
return False
data_checksum = sum(
struct.unpack('B' * header.checksum_position,
data_frame[:header.checksum_position])) % 256
pos = header.checksum_position
data_checksum = struct.unpack('B', data_frame[pos:pos + 1])[0]
return data_checksum == data_checksum
def get_data_frame(data):
"""find frame in raw data.
Arguments:
data {bytes} -- raw bytes read from wire
Yields:
{bytes} -- a valid frame (checksum checked)
Returns:
None -- [description]
"""
data = copy.copy(data)
while data:
try:
if len(data) <= 5:
return data
header = Header.get_header_from_frame(data)
header.check()
except ValueError:
logger.debug("header code unknown: %s", data[:4])
data = data[1:]
continue
payload_length = header.payload_length
total_length = header.length + payload_length + 1
if len(data) < total_length:
logger.error("date length not enough")
return data
date_frame = data[:total_length]
if validate(date_frame):
yield date_frame
else:
logger.error("checksum error and drop this frame: %s", date_frame)
data = data[total_length:]
def parse_data(data_frame):
if not validate(data_frame):
raise ChecksumError("checksum error")
ac_data = AcData(request=False)
ac_data.header = header = Header.get_header_from_frame(data_frame)
logger.debug(str(header))
if header.func_code == FuncCode.STATUS:
if header.ctl_code in (CtlStatus.ONE, CtlStatus.MULTI, CtlStatus.ALL):
for idx in range(header.ac_num):
start = 4 + idx * 10
end = 4 + (idx + 1) * 10
ac_status = AcStatus(
*struct.unpack('B' * 10, data_frame[start:end]))
ac_data.add(ac_status)
elif header.ctl_code == CtlStatus.ONLINE:
for idx in range(header.ac_num):
start = 4 + idx * 3
end = 4 + (idx + 1) * 3
ac_address = AcOnline(
*struct.unpack('BBB', data_frame[start:end]))
ac_data.add(ac_address)
else:
raise TypeError("not support type: %s" % header)
elif header.func_code in (FuncCode.CTL_POWER, FuncCode.CTL_OPERATION,
FuncCode.CTL_FAN_MODE, FuncCode.CTL_TEMPERATURE):
if header.ac_num != 1:
raise TypeError("not support ac control more than one: %s",
header.ac_num)
start = 4
end = start + 2
ac_addr = AcAddr(*struct.unpack('BB', data_frame[start:end]))
ac_data.add(ac_addr)
else:
raise TypeError("not support type: %s" % header)
return ac_data
def get_ac_data(data: bytes) -> AcData:
for data_frame in get_data_frame(data):
yield parse_data(data_frame) | zhong-hong-hvac | /zhong_hong_hvac-1.0.9.tar.gz/zhong_hong_hvac-1.0.9/zhong_hong_hvac/helper.py | helper.py |
import logging
import socket
import time
from collections import defaultdict
from sys import platform
from threading import Thread
from typing import Callable, DefaultDict, List
import attr
from . import helper, protocol
logger = logging.getLogger(__name__)
SOCKET_BUFSIZE = 1024
class ZhongHongGateway:
def __init__(self, ip_addr: str, port: int, gw_addr: int):
self.gw_addr = gw_addr
self.ip_addr = ip_addr
self.port = port
self.sock = None
self.ac_callbacks = defaultdict(
list) # type DefaultDict[protocol.AcAddr, List[Callable]]
self.devices = {}
self._listening = False
self._threads = []
self.max_retry = 5
def __get_socket(self) -> socket.socket:
logger.debug("Opening socket to (%s, %s)", self.ip_addr, self.port)
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
if platform in ('linux', 'linux2'):
s.setsockopt(socket.IPPROTO_TCP, socket.TCP_KEEPIDLE, 1) # pylint: disable=E1101
if platform in ('darwin', 'linux', 'linux2'):
s.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1)
s.setsockopt(socket.IPPROTO_TCP, socket.TCP_KEEPINTVL, 3)
s.setsockopt(socket.IPPROTO_TCP, socket.TCP_KEEPCNT, 5)
s.connect((self.ip_addr, self.port))
return s
def open_socket(self):
if self.sock:
self.sock.close()
self.sock = None
time.sleep(1)
self.sock = self.__get_socket()
return self.sock
def add_status_callback(self, ac_addr: protocol.AcAddr,
func: Callable) -> None:
logger.debug("%s adding status callback", ac_addr)
self.ac_callbacks[ac_addr].append(func)
def add_device(self, device) -> None:
logger.debug("device %s add to hub %s", device.ac_addr, self.gw_addr)
self.devices[attr.astuple(device.ac_addr)] = device
def get_device(self, addr: protocol.AcAddr):
return self.devices.get(attr.astuple(addr))
def query_status(self, ac_addr: protocol.AcAddr) -> bool:
message = protocol.AcData()
message.header = protocol.Header(self.gw_addr,
protocol.FuncCode.STATUS.value,
protocol.CtlStatus.ONE.value, 1)
message.add(ac_addr)
return self.send(message)
def send(self, ac_data: protocol.AcData) -> None:
def _send(retry_count):
try:
self.sock.settimeout(10.0)
logger.debug("send >> %s", ac_data.hex())
self.sock.send(ac_data.encode())
self.sock.settimeout(None)
except socket.timeout:
logger.error("Connot connect to gateway %s:%s", self.ip_addr,
self.port)
return
except OSError as e:
if e.errno == 32: # Broken pipe
logger.error("OSError 32 raise, Broken pipe", exc_info=e)
if retry_count < self.max_retry:
retry_count += 1
self.open_socket()
_send(retry_count)
_send(0)
def _validate_data(self, data):
if data is None:
logger.error('No data in response from hub %s', data)
return False
return True
def _get_data(self):
if self.sock is None:
self.open_socket()
try:
return self.sock.recv(SOCKET_BUFSIZE)
except ConnectionResetError:
logger.debug("Connection reset by peer")
self.open_socket()
except socket.timeout as e:
logger.error("timeout error", exc_info=e)
except OSError as e:
if e.errno == 9: # when socket close, errorno 9 will raise
logger.debug("OSError 9 raise, socket is closed")
else:
logger.error("unknown error when recv", exc_info=e)
except Exception as e:
logger.error("unknown error when recv", exc_info=e)
return None
def _listen_to_msg(self):
while self._listening:
data = self._get_data()
if not data:
continue
logger.debug("recv data << %s", protocol.bytes_debug_str(data))
for ac_data in helper.get_ac_data(data):
logger.debug("get ac_data << %s", ac_data)
if ac_data.func_code == protocol.FuncCode.STATUS:
for payload in ac_data:
if not isinstance(payload, protocol.AcStatus):
continue
logger.debug("get payload << %s", payload)
for func in self.ac_callbacks[payload.ac_addr]:
func(payload)
elif ac_data.func_code in (protocol.FuncCode.CTL_POWER,
protocol.FuncCode.CTL_TEMPERATURE,
protocol.FuncCode.CTL_OPERATION,
protocol.FuncCode.CTL_FAN_MODE):
header = ac_data.header
for payload in ac_data:
device = self.get_device(payload)
device.set_attr(header.func_code, header.ctl_code)
def start_listen(self):
"""Start listening."""
if self._listening:
logger.info("Hub %s is listening", self.gw_addr)
return True
if self.sock is None:
self.open_socket()
self._listening = True
thread = Thread(target=self._listen_to_msg, args=())
self._threads.append(thread)
thread.daemon = True
thread.start()
logger.info("Start message listen thread %s", thread.ident)
return True
def stop_listen(self):
logger.debug("Stopping hub %s", self.gw_addr)
self._listening = False
if self.sock:
logger.info('Closing socket.')
self.sock.close()
self.sock = None
for thread in self._threads:
thread.join()
def discovery_ac(self):
assert not self._listening
if self.sock is None:
self.open_socket()
ret = []
request_data = protocol.AcData()
request_data.header = protocol.Header(
self.gw_addr, protocol.FuncCode.STATUS, protocol.CtlStatus.ONLINE,
protocol.CtlStatus.ALL)
request_data.add(protocol.AcAddr(0xff, 0xff))
discovered = False
count_down = 10
while not discovered and count_down >= 0:
count_down -= 1
logger.debug("send discovery request: %s", request_data.hex())
self.send(request_data)
data = self._get_data()
if data is None:
logger.error("No response from gateway")
for ac_data in helper.get_ac_data(data):
if ac_data.header != request_data.header:
logger.debug("header not match: %s != %s",
request_data.header, ac_data.header)
continue
for ac_online in ac_data:
assert isinstance(ac_online, protocol.AcOnline)
ret.append((ac_online.addr_out, ac_online.addr_in))
discovered = True
return ret
def query_all_status(self) -> None:
request_data = protocol.AcData()
request_data.header = protocol.Header(
self.gw_addr, protocol.FuncCode.STATUS, protocol.CtlStatus.ALL,
protocol.CtlStatus.ALL)
request_data.add(
protocol.AcAddr(protocol.CtlStatus.ALL, protocol.CtlStatus.ALL))
self.send(request_data) | zhong-hong-hvac | /zhong_hong_hvac-1.0.9.tar.gz/zhong_hong_hvac-1.0.9/zhong_hong_hvac/hub.py | hub.py |
import collections
import enum
import logging
import struct
from functools import reduce
from typing import Iterator, List
import attr
logger = logging.getLogger(__name__)
def bytes_debug_str(data: bytes):
return '[%s]' % ' '.join([hex(x) for x in bytearray(data)])
class ChecksumError(Exception):
pass
class FuncCode(enum.Enum):
STATUS = 0x50
CTL_POWER = 0x31
CTL_TEMPERATURE = 0x32
CTL_OPERATION = 0x33
CTL_FAN_MODE = 0x34
class CtlStatus(enum.Enum):
ONE = 0x01
MULTI = 0x0f
ONLINE = 0x02
ALL = 0xff
class StatusSwitch(enum.Enum):
ON = 0x01
OFF = 0x00
@classmethod
def new_status_switch(cls, value):
return cls(value % 2)
class StatusOperation(enum.Enum):
COOL = 0x01
DRY = 0x02
FAN_ONLY = 0x04
HEAT = 0x08
class StatusFanMode(enum.Enum):
HIGH = 0x01
MID = 0x02
LOW = 0x04
STATUS_PAYLOAD_LEN = 10
STATUS_ONLINE_PAYLOAD_LEN = 3
AC_ADDR_LEN = 2
@attr.s
class ZhongHongDataStruct:
@staticmethod
def _to_value(element):
if isinstance(element, enum.Enum):
return int(element.value)
return int(element)
def export(self):
return list(map(self._to_value, attr.astuple(self)))
@staticmethod
def _sum(init, element):
return init + ZhongHongDataStruct._to_value(element)
@property
def checksum(self):
return reduce(self._sum, self.export(), 0) % 256
def encode(self):
length = len(self.export())
return struct.pack("B" * length, *self.export())
@attr.s(slots=True, hash=True)
class Header(ZhongHongDataStruct):
gw_addr = attr.ib()
_func_code = attr.ib(converter=ZhongHongDataStruct._to_value)
_ctl_code = attr.ib(converter=ZhongHongDataStruct._to_value)
ac_num = attr.ib(cmp=False)
@property
def is_valid(self):
try:
self.check()
except ValueError:
logger.debug("header not valid: %s", self.export())
return False
return True
def check(self):
self.func_code
self.ctl_code
@classmethod
def get_header_from_frame(cls, data_frame):
if len(data_frame) < 4:
return None
return cls(*struct.unpack("BBBB", data_frame[:4]))
@property
def func_code(self):
return FuncCode(self._func_code)
@property
def ctl_code(self):
if self.func_code == FuncCode.STATUS:
return CtlStatus(self._ctl_code)
elif self.func_code == FuncCode.CTL_POWER:
return StatusSwitch(self._ctl_code)
elif self.func_code == FuncCode.CTL_TEMPERATURE:
return self._ctl_code
elif self.func_code == FuncCode.CTL_FAN_MODE:
return StatusFanMode(self._ctl_code)
elif self.func_code == FuncCode.CTL_OPERATION:
return StatusOperation(self._ctl_code)
return None
def __str__(self):
return "Header: gw_addr %s, func: %s, ctl: %s, ac_num: %s" % (
self.gw_addr, self.func_code, self.ctl_code, self.ac_num)
def is_status_update(self):
if self.func_code != FuncCode.STATUS:
return False
if self.ctl_code not in (CtlStatus.ALL, CtlStatus.ONE,
CtlStatus.MULTI):
return False
return True
@property
def length(self):
return 4
@property
def payload_length(self):
if self.func_code == FuncCode.STATUS:
if self.ctl_code in (CtlStatus.ONE, CtlStatus.MULTI,
CtlStatus.ALL):
payload_length = STATUS_PAYLOAD_LEN * self.ac_num
elif self.ctl_code == CtlStatus.ONLINE:
payload_length = STATUS_ONLINE_PAYLOAD_LEN * self.ac_num
else:
raise Exception("unknown ctrl code: %s", self.header.export())
elif self.func_code in (FuncCode.CTL_POWER, FuncCode.CTL_TEMPERATURE,
FuncCode.CTL_OPERATION, FuncCode.CTL_FAN_MODE):
payload_length = AC_ADDR_LEN * self.ac_num
else:
raise Exception("unknown func code: %s", self.header.export())
return payload_length
@property
def checksum_position(self):
return self.length + self.payload_length
@attr.s(slots=True, hash=True)
class AcAddr(ZhongHongDataStruct):
addr_out = attr.ib()
addr_in = attr.ib()
def __str__(self):
return "AC %s-%s" % (self.addr_out, self.addr_in)
@attr.s(slots=True, hash=True)
class AcOnline(ZhongHongDataStruct):
addr_out = attr.ib()
addr_in = attr.ib()
online_status = attr.ib(cmp=False)
@property
def ac_addr(self):
return AcAddr(self.addr_out, self.addr_in)
def __str__(self):
return "%s online_status: %s" % (self.ac_addr, self.online_status)
@attr.s(slots=True)
class AcStatus(ZhongHongDataStruct):
addr_out = attr.ib()
addr_in = attr.ib()
switch_status = attr.ib(converter=StatusSwitch.new_status_switch)
target_temperature = attr.ib()
current_operation = attr.ib(converter=StatusOperation)
current_fan_mode = attr.ib(converter=StatusFanMode)
current_temperature = attr.ib()
error_code = attr.ib()
padding1 = attr.ib()
padding2 = attr.ib()
@property
def ac_addr(self):
return AcAddr(self.addr_out, self.addr_in)
def __str__(self):
return "AC %s-%s power %s, current_operation %s, speed %s, target_temp %s, room_temp %s" % (
self.addr_out, self.addr_in, self.switch_status,
self.current_operation, self.current_fan_mode,
self.target_temperature, self.current_temperature)
@attr.s(slots=True)
class AcData(collections.Iterable):
header = attr.ib(init=False) # type: Header
payload = attr.ib(
attr.Factory(collections.deque),
init=False) # type: List[ZhongHongDataStruct]
request = attr.ib(True)
def add(self, data):
self.payload.append(data)
def __str__(self):
return '\n'.join([str(self.header)] + [str(x) for x in self.payload])
def __iter__(self) -> Iterator[ZhongHongDataStruct]:
yield from iter(self.payload)
@property
def length(self):
header_length = self.header.length
checksum_length = 1
if self.func_code == FuncCode.STATUS:
if self.ctl_code in (CtlStatus.ONE, CtlStatus.MULTI,
CtlStatus.ALL):
payload_length = STATUS_PAYLOAD_LEN * self.ac_num
elif self.ctl_code in (CtlStatus.ONLINE):
payload_length = STATUS_ONLINE_PAYLOAD_LEN * self.ac_num
else:
raise Exception("unknown ctrl code: %s", self.header.export())
elif self.func_code in (FuncCode.CTL_POWER, FuncCode.CTL_TEMPERATURE,
FuncCode.CTL_OPERATION, FuncCode.CTL_FAN_MODE):
payload_length = AcAddr * self.ac_num
else:
raise Exception("unknown func code: %s", self.header.export())
return header_length + checksum_length + payload_length
@property
def ac_num(self):
return self.header.ac_num
@property
def func_code(self):
return self.header.func_code
@property
def ctl_code(self):
return self.header.ctl_code
@property
def is_request(self):
'''Is this data a Request or Response.'''
return self.request
@property
def checksum(self):
return (self.header.checksum +
sum([item.checksum for item in self.payload])) % 256
@property
def bin_checksum(self):
return struct.pack('B', self.checksum)
def encode(self):
return b''.join([self.header.encode()] +
[x.encode()
for x in self.payload] + [self.bin_checksum])
def hex(self):
return bytes_debug_str(self.encode()) | zhong-hong-hvac | /zhong_hong_hvac-1.0.9.tar.gz/zhong_hong_hvac-1.0.9/zhong_hong_hvac/protocol.py | protocol.py |
import enum
import json
import logging
import socket
from typing import Callable, List
from . import hub, protocol
logger = logging.getLogger(__name__)
class HVAC:
def __init__(self, gw: hub.ZhongHongGateway, addr_out: int, addr_in: int):
self.gw = gw
self.addr_out = addr_out
self.addr_in = addr_in
self.ac_addr = protocol.AcAddr(self.addr_out, self.addr_in)
self.gw.add_status_callback(self.ac_addr, self._status_update)
self.status_callback = [] # type: List[Callable]
self.switch_status = None
self.target_temperature = None
self.current_operation = None
self.current_fan_mode = None
self.current_temperature = None
self.error_code = None
self.gw.add_device(self)
def call_status_update(self):
for func in self.status_callback:
if callable(func):
func(self)
def _status_update(self, ac_status: protocol.AcStatus) -> bool:
assert self.ac_addr == ac_status.ac_addr
dirty = False
for _attr in ("switch_status", "target_temperature",
"current_operation", "current_fan_mode",
"current_temperature", "error_code"):
value = getattr(ac_status, _attr)
if isinstance(value, enum.Enum):
value = value.name
if getattr(self, _attr) != value:
setattr(self, _attr, value)
dirty = True
if dirty:
logger.debug("[callback]hvac %s status updated: %s", self.ac_addr,
self.status())
self.call_status_update()
else:
logger.debug("[callback]hvac %s status remains the same: %s",
self.ac_addr, self.status())
def set_attr(self, func_code, value) -> bool:
if func_code == protocol.FuncCode.CTL_POWER:
self.switch_status = value.name
elif func_code == protocol.FuncCode.CTL_TEMPERATURE:
self.target_temperature = value
elif func_code == protocol.FuncCode.CTL_OPERATION:
self.current_operation = value.name
elif func_code == protocol.FuncCode.CTL_FAN_MODE:
self.current_fan_mode = value.name
self.call_status_update()
def register_update_callback(self, _callable: Callable) -> bool:
if callable(_callable):
self.status_callback.append(_callable)
return True
return False
def send(self, ac_data: protocol.AcData) -> None:
try:
self.gw.send(ac_data)
except Exception as e:
self.error_code = "lost gateway"
def update(self) -> bool:
message = protocol.AcData()
message.header = protocol.Header(
self.gw_addr, protocol.FuncCode.STATUS, protocol.CtlStatus.ONE, 1)
message.add(self.ac_addr)
self.gw.query_status(self.ac_addr)
return True
def status(self):
return json.dumps({
"switch_status": self.switch_status,
"target_temperature": self.target_temperature,
"current_operation": self.current_operation,
"current_fan_mode": self.current_fan_mode,
"current_temperature": self.current_temperature,
"error_code": self.error_code
})
@property
def operation_list(self):
return [x.name for x in list(protocol.StatusOperation)]
@property
def fan_list(self):
return [x.name for x in list(protocol.StatusFanMode)]
@property
def gw_addr(self):
return self.gw.gw_addr
@property
def is_on(self):
return self.switch_status == protocol.StatusSwitch.ON.name
@property
def min_temp(self):
return 16
@property
def max_temp(self):
return 30
def _ctrl_ac(self, func_code, ctrl_code):
request_data = protocol.AcData()
request_data.header = protocol.Header(
self.gw_addr, func_code, ctrl_code, protocol.CtlStatus.ONE)
request_data.add(self.ac_addr)
self.send(request_data)
def turn_on(self) -> None:
self._ctrl_ac(protocol.FuncCode.CTL_POWER, protocol.StatusSwitch.ON)
def turn_off(self) -> None:
self._ctrl_ac(protocol.FuncCode.CTL_POWER, protocol.StatusSwitch.OFF)
def set_temperature(self, temperature: str) -> None:
self._ctrl_ac(protocol.FuncCode.CTL_TEMPERATURE, temperature)
def set_fan_mode(self, fan_mode: str) -> None:
self._ctrl_ac(protocol.FuncCode.CTL_FAN_MODE,
protocol.StatusFanMode[fan_mode])
def set_operation_mode(self, operation_mode: str) -> None:
self._ctrl_ac(protocol.FuncCode.CTL_OPERATION,
protocol.StatusOperation[operation_mode]) | zhonghong-climate | /zhonghong_climate-1.0.5.tar.gz/zhonghong_climate-1.0.5/zhonghong_climate/hvac.py | hvac.py |
import copy
import logging
import struct
from .protocol import (AcData, AcOnline, AcStatus, ChecksumError, CtlStatus,
FuncCode, Header, AcAddr)
logger = logging.getLogger(__name__)
def validate(data_frame):
"""checksum of data frame."""
header = Header.get_header_from_frame(data_frame)
if not header.is_valid:
return False
data_checksum = sum(
struct.unpack('B' * header.checksum_position,
data_frame[:header.checksum_position])) % 256
pos = header.checksum_position
data_checksum = struct.unpack('B', data_frame[pos:pos + 1])[0]
return data_checksum == data_checksum
def get_data_frame(data):
"""find frame in raw data.
Arguments:
data {bytes} -- raw bytes read from wire
Yields:
{bytes} -- a valid frame (checksum checked)
Returns:
None -- [description]
"""
data = copy.copy(data)
while data:
try:
if len(data) <= 5:
return data
header = Header.get_header_from_frame(data)
header.check()
except ValueError:
logger.debug("header code unknown: %s", data[:4])
data = data[1:]
continue
payload_length = header.payload_length
total_length = header.length + payload_length + 1
if len(data) < total_length:
logger.error("date length not enough")
return data
date_frame = data[:total_length]
if validate(date_frame):
yield date_frame
else:
logger.error("checksum error and drop this frame: %s", date_frame)
data = data[total_length:]
def parse_data(data_frame):
if not validate(data_frame):
raise ChecksumError("checksum error")
ac_data = AcData(request=False)
ac_data.header = header = Header.get_header_from_frame(data_frame)
logger.debug(str(header))
if header.func_code == FuncCode.STATUS:
if header.ctl_code in (CtlStatus.ONE, CtlStatus.MULTI, CtlStatus.ALL):
for idx in range(header.ac_num):
start = 4 + idx * 10
end = 4 + (idx + 1) * 10
ac_status = AcStatus(
*struct.unpack('B' * 10, data_frame[start:end]))
ac_data.add(ac_status)
elif header.ctl_code == CtlStatus.ONLINE:
for idx in range(header.ac_num):
start = 4 + idx * 3
end = 4 + (idx + 1) * 3
ac_address = AcOnline(
*struct.unpack('BBB', data_frame[start:end]))
ac_data.add(ac_address)
else:
raise TypeError("not support type: %s" % header)
elif header.func_code in (FuncCode.CTL_POWER, FuncCode.CTL_OPERATION,
FuncCode.CTL_FAN_MODE, FuncCode.CTL_TEMPERATURE):
if header.ac_num != 1:
raise TypeError("not support ac control more than one: %s",
header.ac_num)
start = 4
end = start + 2
ac_addr = AcAddr(*struct.unpack('BB', data_frame[start:end]))
ac_data.add(ac_addr)
else:
raise TypeError("not support type: %s" % header)
return ac_data
def get_ac_data(data: bytes) -> AcData:
for data_frame in get_data_frame(data):
yield parse_data(data_frame) | zhonghong-climate | /zhonghong_climate-1.0.5.tar.gz/zhonghong_climate-1.0.5/zhonghong_climate/helper.py | helper.py |
import logging
import socket
import time
from collections import defaultdict
from sys import platform
from threading import Thread
from typing import Callable, DefaultDict, List
import attr
from . import helper, protocol
logger = logging.getLogger(__name__)
SOCKET_BUFSIZE = 1024
class ZhongHongGateway:
def __init__(self, ip_addr: str, port: int, gw_addr: int):
self.gw_addr = gw_addr
self.ip_addr = ip_addr
self.port = port
self.available = True
self.sock = None
self.ac_callbacks = defaultdict(
list) # type DefaultDict[protocol.AcAddr, List[Callable]]
self.devices = {}
self._listening = False
self._threads = []
self.max_retry = 5
def __get_socket(self) -> socket.socket:
logger.debug("Opening socket to (%s, %s)", self.ip_addr, self.port)
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
if platform in ('linux', 'linux2'):
s.setsockopt(socket.IPPROTO_TCP, socket.TCP_KEEPIDLE, 1) # pylint: disable=E1101
if platform in ('darwin', 'linux', 'linux2'):
s.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1)
s.setsockopt(socket.IPPROTO_TCP, socket.TCP_KEEPINTVL, 3)
s.setsockopt(socket.IPPROTO_TCP, socket.TCP_KEEPCNT, 5)
s.connect((self.ip_addr, self.port))
return s
def open_socket(self):
if self.sock:
self.sock.close()
self.sock = None
time.sleep(1)
self.sock = self.__get_socket()
return self.sock
def add_status_callback(self, ac_addr: protocol.AcAddr,
func: Callable) -> None:
logger.debug("%s adding status callback", ac_addr)
self.ac_callbacks[ac_addr].append(func)
def add_device(self, device) -> None:
logger.debug("device %s add to hub %s", device.ac_addr, self.gw_addr)
self.devices[attr.astuple(device.ac_addr)] = device
def get_device(self, addr: protocol.AcAddr):
return self.devices.get(attr.astuple(addr))
def query_status(self, ac_addr: protocol.AcAddr) -> bool:
message = protocol.AcData()
message.header = protocol.Header(self.gw_addr,
protocol.FuncCode.STATUS.value,
protocol.CtlStatus.ONE.value, 1)
message.add(ac_addr)
return self.send(message)
def update_gw_status(self, status: bool) -> None:
if self.available == status:
return
self.available = status
if not self.available:
for addr in self.devices:
self.devices[addr].error_code = -1
self.devices[addr].call_status_update()
def send(self, ac_data: protocol.AcData) -> None:
def _send(retry_count):
try:
if self.sock is None:
self.open_socket()
self.sock.settimeout(10.0)
logger.debug("send >> %s", ac_data.hex())
self.sock.send(ac_data.encode())
self.sock.settimeout(None)
except socket.timeout:
logger.debug("Connot connect to gateway %s:%s", self.ip_addr,
self.port)
self.update_gw_status(False)
return
except OSError as e:
if e.errno == 32: # Broken pipe
logger.debug("OSError 32 raise, Broken pipe", exc_info=e)
if retry_count < self.max_retry:
retry_count += 1
self.open_socket()
_send(retry_count)
self.update_gw_status(False)
_send(0)
def _validate_data(self, data):
if data is None:
logger.error('No data in response from hub %s', data)
return False
return True
def _get_data(self):
try:
if self.sock is None:
self.open_socket()
return self.sock.recv(SOCKET_BUFSIZE)
except ConnectionResetError:
logger.debug("Connection reset by peer")
self.open_socket()
self.update_gw_status(False)
except socket.timeout as e:
logger.debug("timeout error", exc_info=e)
self.update_gw_status(False)
except OSError as e:
if e.errno == 9: # when socket close, errorno 9 will raise
logger.debug("OSError 9 raise, socket is closed")
else:
logger.debug("unknown error when recv", exc_info=e)
self.update_gw_status(False)
except Exception as e:
logger.debug("unknown error when recv", exc_info=e)
self.update_gw_status(False)
return None
def _listen_to_msg(self):
while self._listening:
data = self._get_data()
if not data:
continue
logger.debug("recv data << %s", protocol.bytes_debug_str(data))
self.update_gw_status(True)
for ac_data in helper.get_ac_data(data):
logger.debug("get ac_data << %s", ac_data)
if ac_data.func_code == protocol.FuncCode.STATUS:
for payload in ac_data:
if not isinstance(payload, protocol.AcStatus):
continue
logger.debug("get payload << %s", payload)
for func in self.ac_callbacks[payload.ac_addr]:
func(payload)
elif ac_data.func_code in (protocol.FuncCode.CTL_POWER,
protocol.FuncCode.CTL_TEMPERATURE,
protocol.FuncCode.CTL_OPERATION,
protocol.FuncCode.CTL_FAN_MODE):
header = ac_data.header
for payload in ac_data:
device = self.get_device(payload)
device.set_attr(header.func_code, header.ctl_code)
def start_listen(self):
"""Start listening."""
if self._listening:
logger.info("Hub %s is listening", self.gw_addr)
return True
if self.sock is None:
self.open_socket()
self._listening = True
thread = Thread(target=self._listen_to_msg, args=())
self._threads.append(thread)
thread.daemon = True
thread.start()
logger.info("Start message listen thread %s", thread.ident)
return True
def stop_listen(self):
logger.debug("Stopping hub %s", self.gw_addr)
self._listening = False
if self.sock:
logger.info('Closing socket.')
self.sock.close()
self.sock = None
for thread in self._threads:
thread.join()
def device_in_list(self, addr_out, addr_in, device_list) -> bool:
for (item_addr_out, item_addr_in) in device_list:
if addr_out == item_addr_out and item_addr_in == addr_in:
return True
return False
def discovery_ac(self):
assert not self._listening
if self.sock is None:
self.open_socket()
ret = []
request_data = protocol.AcData()
request_data.header = protocol.Header(
self.gw_addr, protocol.FuncCode.STATUS, protocol.CtlStatus.ONLINE,
protocol.CtlStatus.ALL)
request_data.add(protocol.AcAddr(0xff, 0xff))
discovered = False
count_down = 10
while not discovered and count_down >= 0:
count_down -= 1
logger.debug("send discovery request: %s", request_data.hex())
self.send(request_data)
data = self._get_data()
if data is None:
logger.error("No response from gateway")
for ac_data in helper.get_ac_data(data):
if ac_data.header != request_data.header:
logger.debug("header not match: %s != %s",
request_data.header, ac_data.header)
continue
for ac_online in ac_data:
assert isinstance(ac_online, protocol.AcOnline)
if not self.device_in_list(ac_online.addr_out, ac_online.addr_in, ret):
ret.append((ac_online.addr_out, ac_online.addr_in))
discovered = True
return ret
def query_all_status(self) -> None:
request_data = protocol.AcData()
request_data.header = protocol.Header(
self.gw_addr, protocol.FuncCode.STATUS, protocol.CtlStatus.ALL,
protocol.CtlStatus.ALL)
request_data.add(
protocol.AcAddr(protocol.CtlStatus.ALL, protocol.CtlStatus.ALL))
self.send(request_data) | zhonghong-climate | /zhonghong_climate-1.0.5.tar.gz/zhonghong_climate-1.0.5/zhonghong_climate/hub.py | hub.py |
import collections
import enum
import logging
import struct
from functools import reduce
from typing import Iterator, List
import attr
logger = logging.getLogger(__name__)
def bytes_debug_str(data: bytes):
return '[%s]' % ' '.join([hex(x) for x in bytearray(data)])
class ChecksumError(Exception):
pass
class FuncCode(enum.Enum):
STATUS = 0x50
CTL_POWER = 0x31
CTL_TEMPERATURE = 0x32
CTL_OPERATION = 0x33
CTL_FAN_MODE = 0x34
class CtlStatus(enum.Enum):
ONE = 0x01
MULTI = 0x0f
ONLINE = 0x02
ALL = 0xff
class StatusSwitch(enum.Enum):
ON = 0x01
OFF = 0x00
@classmethod
def new_status_switch(cls, value):
return cls(value % 2)
class StatusOperation(enum.Enum):
COOL = 0x01
DRY = 0x02
FAN_ONLY = 0x04
HEAT = 0x08
class StatusFanMode(enum.Enum):
AUTO = 0x00
HIGH = 0x01
MID = 0x02
MID_HIGH = 0x03
LOW = 0x04
MID_LOW = 0x05
STATUS_PAYLOAD_LEN = 10
STATUS_ONLINE_PAYLOAD_LEN = 3
AC_ADDR_LEN = 2
@attr.s
class ZhongHongDataStruct:
@staticmethod
def _to_value(element):
if isinstance(element, enum.Enum):
return int(element.value)
return int(element)
def export(self):
return list(map(self._to_value, attr.astuple(self)))
@staticmethod
def _sum(init, element):
return init + ZhongHongDataStruct._to_value(element)
@property
def checksum(self):
return reduce(self._sum, self.export(), 0) % 256
def encode(self):
length = len(self.export())
return struct.pack("B" * length, *self.export())
@attr.s(slots=True, hash=True)
class Header(ZhongHongDataStruct):
gw_addr = attr.ib()
_func_code = attr.ib(converter=ZhongHongDataStruct._to_value)
_ctl_code = attr.ib(converter=ZhongHongDataStruct._to_value)
ac_num = attr.ib(cmp=False)
@property
def is_valid(self):
try:
self.check()
except ValueError:
logger.debug("header not valid: %s", self.export())
return False
return True
def check(self):
self.func_code
self.ctl_code
@classmethod
def get_header_from_frame(cls, data_frame):
if len(data_frame) < 4:
return None
return cls(*struct.unpack("BBBB", data_frame[:4]))
@property
def func_code(self):
return FuncCode(self._func_code)
@property
def ctl_code(self):
if self.func_code == FuncCode.STATUS:
return CtlStatus(self._ctl_code)
elif self.func_code == FuncCode.CTL_POWER:
return StatusSwitch(self._ctl_code)
elif self.func_code == FuncCode.CTL_TEMPERATURE:
return self._ctl_code
elif self.func_code == FuncCode.CTL_FAN_MODE:
return StatusFanMode(self._ctl_code)
elif self.func_code == FuncCode.CTL_OPERATION:
return StatusOperation(self._ctl_code)
return None
def __str__(self):
return "Header: gw_addr %s, func: %s, ctl: %s, ac_num: %s" % (
self.gw_addr, self.func_code, self.ctl_code, self.ac_num)
def is_status_update(self):
if self.func_code != FuncCode.STATUS:
return False
if self.ctl_code not in (CtlStatus.ALL, CtlStatus.ONE,
CtlStatus.MULTI):
return False
return True
@property
def length(self):
return 4
@property
def payload_length(self):
if self.func_code == FuncCode.STATUS:
if self.ctl_code in (CtlStatus.ONE, CtlStatus.MULTI,
CtlStatus.ALL):
payload_length = STATUS_PAYLOAD_LEN * self.ac_num
elif self.ctl_code == CtlStatus.ONLINE:
payload_length = STATUS_ONLINE_PAYLOAD_LEN * self.ac_num
else:
raise Exception("unknown ctrl code: %s", self.header.export())
elif self.func_code in (FuncCode.CTL_POWER, FuncCode.CTL_TEMPERATURE,
FuncCode.CTL_OPERATION, FuncCode.CTL_FAN_MODE):
payload_length = AC_ADDR_LEN * self.ac_num
else:
raise Exception("unknown func code: %s", self.header.export())
return payload_length
@property
def checksum_position(self):
return self.length + self.payload_length
@attr.s(slots=True, hash=True)
class AcAddr(ZhongHongDataStruct):
addr_out = attr.ib()
addr_in = attr.ib()
def __str__(self):
return "AC %s-%s" % (self.addr_out, self.addr_in)
@attr.s(slots=True, hash=True)
class AcOnline(ZhongHongDataStruct):
addr_out = attr.ib()
addr_in = attr.ib()
online_status = attr.ib(cmp=False)
@property
def ac_addr(self):
return AcAddr(self.addr_out, self.addr_in)
def __str__(self):
return "%s online_status: %s" % (self.ac_addr, self.online_status)
@attr.s(slots=True)
class AcStatus(ZhongHongDataStruct):
addr_out = attr.ib()
addr_in = attr.ib()
switch_status = attr.ib(converter=StatusSwitch.new_status_switch)
target_temperature = attr.ib()
current_operation = attr.ib(converter=StatusOperation)
current_fan_mode = attr.ib(converter=StatusFanMode)
current_temperature = attr.ib()
error_code = attr.ib()
padding1 = attr.ib()
padding2 = attr.ib()
@property
def ac_addr(self):
return AcAddr(self.addr_out, self.addr_in)
def __str__(self):
return "AC %s-%s power %s, current_operation %s, speed %s, target_temp %s, room_temp %s" % (
self.addr_out, self.addr_in, self.switch_status,
self.current_operation, self.current_fan_mode,
self.target_temperature, self.current_temperature)
@attr.s(slots=True)
class AcData(collections.abc.Iterable):
header = attr.ib(init=False) # type: Header
payload = attr.ib(
attr.Factory(collections.deque),
init=False) # type: List[ZhongHongDataStruct]
request = attr.ib(True)
def add(self, data):
self.payload.append(data)
def __str__(self):
return '\n'.join([str(self.header)] + [str(x) for x in self.payload])
def __iter__(self) -> Iterator[ZhongHongDataStruct]:
yield from iter(self.payload)
@property
def length(self):
header_length = self.header.length
checksum_length = 1
if self.func_code == FuncCode.STATUS:
if self.ctl_code in (CtlStatus.ONE, CtlStatus.MULTI,
CtlStatus.ALL):
payload_length = STATUS_PAYLOAD_LEN * self.ac_num
elif self.ctl_code in (CtlStatus.ONLINE):
payload_length = STATUS_ONLINE_PAYLOAD_LEN * self.ac_num
else:
raise Exception("unknown ctrl code: %s", self.header.export())
elif self.func_code in (FuncCode.CTL_POWER, FuncCode.CTL_TEMPERATURE,
FuncCode.CTL_OPERATION, FuncCode.CTL_FAN_MODE):
payload_length = AcAddr * self.ac_num
else:
raise Exception("unknown func code: %s", self.header.export())
return header_length + checksum_length + payload_length
@property
def ac_num(self):
return self.header.ac_num
@property
def func_code(self):
return self.header.func_code
@property
def ctl_code(self):
return self.header.ctl_code
@property
def is_request(self):
'''Is this data a Request or Response.'''
return self.request
@property
def checksum(self):
return (self.header.checksum +
sum([item.checksum for item in self.payload])) % 256
@property
def bin_checksum(self):
return struct.pack('B', self.checksum)
def encode(self):
return b''.join([self.header.encode()] +
[x.encode()
for x in self.payload] + [self.bin_checksum])
def hex(self):
return bytes_debug_str(self.encode()) | zhonghong-climate | /zhonghong_climate-1.0.5.tar.gz/zhonghong_climate-1.0.5/zhonghong_climate/protocol.py | protocol.py |
import os
import hashlib
import logging
from pathlib import Path
from .exception import ZhongkuiParseError, ZhongkuiInvalidFile
from .utils import calculateEntropy
from .parse import exiftool
from .corpus import EXIFTOOL
log = logging.getLogger(__name__)
FILE_CHUNK_SIZE = 16 * 1024
class File(object):
"""zhongkui basic file class"""
def __init__(self, fpath, chunk_size=FILE_CHUNK_SIZE):
"""
Args:
fpath: os.pathLike.
chuck_size: default is 16 * 1024
Raise:
ZhongkuiInvalidFile
"""
self.fpath = fpath
self.chunk_size = chunk_size
# check valid
if not self.isValidFile():
raise ZhongkuiInvalidFile(f'{fpath} is not a valid file')
# for cache property
self._file_data = None
self._md5 = None
self._sha256 = None
self._sha1 = None
self._is_probably_packed = None
# for cache info
self._basic = None
try:
self._exiftool = exiftool(fpath)
except ZhongkuiParseError:
raise ZhongkuiInvalidFile(f'{fpath} is not a valid file')
def isValidFile(self):
return (Path(self.fpath).exists() and Path(self.fpath).is_file()
and os.path.getsize(self.fpath) != 0)
def genChunk(self):
"""Read file contents in chunks (generator)."""
with open(self.fpath, "rb") as fd:
while True:
chunk = fd.read(self.chunk_size)
if not chunk:
break
yield chunk
def getChunkEntropy(self):
entropy = []
for data in self.genChunk():
entropy.append(calculateEntropy(data))
return entropy
def _calHash(self):
"""Calculate all possible hashes for this file."""
md5 = hashlib.md5()
sha1 = hashlib.sha1()
sha256 = hashlib.sha256()
for chunk in self.genChunk():
md5.update(chunk)
sha1.update(chunk)
sha256.update(chunk)
self._md5 = md5.hexdigest()
self._sha1 = sha1.hexdigest()
self._sha256 = sha256.hexdigest()
@property
def fileName(self):
return Path(self.fpath).name
@property
def fileType(self):
file_type = self._exiftool.get(EXIFTOOL.FILETYPE)
if file_type is not None:
file_type = file_type.replace(' ', '').lower()
return file_type
@property
def fileData(self):
if self._file_data is None:
with open(self.fpath, "rb") as f:
self._file_data = f.read()
return self._file_data
@property
def md5(self):
if self._md5 is None:
self._calHash()
return self._md5
@property
def sha1(self):
if self._sha1 is None:
self._calHash()
return self._sha1
@property
def sha256(self):
if self._sha256 is None:
self._calHash()
return self._sha256
@property
def isProbablyPacked(self) -> bool:
"""A file is probably packed:
1. entropy of at least 20% data > 7.4.
"""
if self._is_probably_packed is not None:
return self._is_probably_packed
total_file_data = len(self.fileData)
total_compressed_data = 0
for data in self.genChunk():
ck_entropy = calculateEntropy(data)
ck_length = len(data)
if ck_entropy > 7.4:
total_compressed_data += ck_length
if ((1.0 * total_compressed_data) / total_file_data) > 0.2:
self._is_probably_packed = True
else:
self._is_probably_packed = False
return self._is_probably_packed
def fileSize(self, easy_read=True):
if easy_read:
return self._exiftool.get(EXIFTOOL.FILESIZE)
else:
return os.path.getsize(self.fpath)
def basicInfo(self):
"""file basic info"""
if self._basic is None:
self._basic = {
'name': self.fileName,
'md5': self.md5,
'sha1': self.sha1,
'sha256': self.sha256,
'fileType': self.fileType,
'fileSize': self.fileSize(easy_read=False),
'isProbablyPacked': self.isProbablyPacked
}
return self._basic | zhongkui-core | /zhongkui_core-0.1.0-py3-none-any.whl/zhongkui/core/file.py | file.py |
# 給懂中文的程式設計師
## 中文數字模組
運用中文文字處理程式庫的程式設計師自然懂中文,
所以本程式庫設計哲學就是函數以中文命名且能簡明表達功能,
另以簡體名稱表示處理簡體中文情形,繁體名稱表示處理繁體中文情形,
如以中文數字處理功能為例:
from zhongwen.number import 中文數字, 中文数字, 大寫中文數字
中文數字(10600)
>>> '一萬零六百'
中文数字(10600)
>>> '一万零六百'
大寫中文數字(23232.00518)
>>> '貳萬參仟貳佰參拾貳點零零伍壹捌'
## 民國日期處理
民國日期係目前仍在臺灣地區使用之日期格式,
本模組之【取日期】函數可將民國日期字串轉成日期時間(datetime)類型,
而【民國日期】可將日期時間依指定格式轉成字串,示例如次:
from zhongwen.date import 取日期
取日期('111.9.23')
>>> datetime(2022,9,23,0,0)
取日期('110/12/27')
>>> datetime(2021,12,27,0,0)
from zhongwen.date import 民國日期
民國日期(datetime(2021,12,27,0,0), '%Y年%M月%d日')
>>> '110年12月27日'
## 中文字元判斷
中文字元判斷功能示例如次:
是否為中文字元('繁')
>>> True
是否為中文字元('简')
>>> True
是否為中文字元('a')
>>> False
| zhongwen | /zhongwen-3.14219.tar.gz/zhongwen-3.14219/README.md | README.md |
.. image:: http://img.shields.io/:version-0.2.1-green.svg
Zhot
======
I had some fun teaching myself Perl with my Rock-Paper-Scissors script
over at https://bitbucket.org/AmyDeeDempster/rock-paper-scissors
This is a more extensible version in Python, done in a more mathematical
and object-oriented manner.
Usage
------
### Establishing game rules
Supply a Comma-Separated Values file as a command-line argument for this
script. For example:
``zhot moves/moves-5.csv``
If not supplied, a default set of three-move rules will be used.
### Gameplay and user input
In the game, type the name of the move you wish to play. This can be
abbreviated.
Other commands available include:
- ``rules`` or ``help``
- ``score``
- ``rounds``
- ``diagram``
- ``exit`` or ``quit``
You can also just hit Return to quit the game.
### Rule diagram
The ``diagram`` command generates, from the rules of the current game
a vector diagram illustrating those rules.
Dependencies
------------
### Built-in
- csv
- random
- sys
- re
- math
### Third-party (pyPI)
- numpy
- svgwrite
- scour
Interpreter
-----------
Python > 3.6
(Tested on macOS and Korora Linux installations of CPython 3.6.5 and
3.7.0b4)
Licence
-------
Creative Commons
See text in LICENCE.html
Further information at
https://creativecommons.org/licences/by-sa/3.0/au/
| zhot | /zhot-0.2.1.tar.gz/zhot-0.2.1/README.rst | README.rst |
# Resource object code
#
# Created by: The Resource Compiler for PyQt5 (Qt v5.9.7)
#
# WARNING! All changes made in this file will be lost!
from PyQt5 import QtCore
qt_resource_data = b"\
\x00\x00\x00\xee\
\x89\
\x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\
\x00\x00\x30\x00\x00\x00\x30\x08\x06\x00\x00\x00\x57\x02\xf9\x87\
\x00\x00\x00\x01\x73\x52\x47\x42\x00\xae\xce\x1c\xe9\x00\x00\x00\
\xa8\x49\x44\x41\x54\x68\x43\xed\x95\xc1\x0d\xc3\x30\x10\xc3\x92\
\x65\xbc\xff\x3c\x5e\xa6\xfd\x15\x09\xf2\x13\x85\x18\x2e\xe8\xbf\
\x74\x67\xd2\x80\xcf\x63\xf3\x73\x6e\xbe\xff\xe1\x05\x56\x1b\xd4\
\x80\x06\x20\x01\x9f\x10\x04\x88\xe3\x1a\xc0\x08\x61\x81\x06\x20\
\x40\x1c\xd7\x00\x46\x08\x0b\x34\x00\x01\xe2\xb8\x06\x30\x42\x58\
\xa0\x01\x08\x10\xc7\x6f\x06\xc6\x18\x1f\xdc\xf8\x42\xc1\x9c\xf3\
\xb7\xb7\x17\x78\x01\xf8\x63\x84\x06\x56\x50\xbf\xce\xfc\x5f\x03\
\xab\xc9\x26\xf3\xfd\xc8\x12\x6a\xcd\x8c\x06\x9a\x34\x93\x2e\x0d\
\x24\xd4\x9a\x19\x0d\x34\x69\x26\x5d\x1a\x48\xa8\x35\x33\x1a\x68\
\xd2\x4c\xba\x34\x90\x50\x6b\x66\x34\xd0\xa4\x99\x74\x69\x20\xa1\
\xd6\xcc\x68\xa0\x49\x33\xe9\xfa\x02\x91\x35\x18\x31\x01\x84\x3f\
\x85\x00\x00\x00\x00\x49\x45\x4e\x44\xae\x42\x60\x82\
\x00\x00\x04\xa3\
\x89\
\x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\
\x00\x00\x20\x00\x00\x00\x20\x08\x06\x00\x00\x00\x73\x7a\x7a\xf4\
\x00\x00\x00\x04\x67\x41\x4d\x41\x00\x00\xd6\xd8\xd4\x4f\x58\x32\
\x00\x00\x00\x19\x74\x45\x58\x74\x53\x6f\x66\x74\x77\x61\x72\x65\
\x00\x41\x64\x6f\x62\x65\x20\x49\x6d\x61\x67\x65\x52\x65\x61\x64\
\x79\x71\xc9\x65\x3c\x00\x00\x04\x35\x49\x44\x41\x54\x58\xc3\xe5\
\x97\xcd\x8f\x54\x45\x14\xc5\x7f\xb7\xea\xd6\x7b\xaf\xdb\x6e\xc7\
\xf9\x40\x9d\x89\x46\x4d\x34\x99\x44\x8d\x1a\x48\x98\xc4\x8c\x1f\
\x1b\xfe\x02\x4c\x5c\xf1\x07\x18\x16\x2e\x4d\x5c\x6b\x58\xc3\x8e\
\xc4\x8d\x1b\x17\xce\x82\x68\x74\x41\x5c\x18\x0d\xe2\xc4\xc6\x00\
\x3d\x60\x50\x51\x19\x60\x02\xa2\x0e\x0c\x83\xd3\xfd\x5e\xf7\x94\
\x8b\xaa\xee\xf9\x60\xe6\x0d\x84\x51\x16\x56\x52\xa9\xce\x7b\xb7\
\xeb\x9e\x3a\xf7\xd4\xa9\x7a\xea\xbd\xe7\x7e\x36\xe5\x3e\xb7\x3e\
\x80\x5d\xbb\x76\xbd\x03\xec\xfd\x8f\xf2\x4e\x35\x1a\x8d\x03\xeb\
\x19\xd8\xbb\xef\xbd\xa3\x3b\x1f\x1f\x76\x00\x9c\x3c\x3a\xcf\xcc\
\x97\x37\x58\x9c\xef\xdc\x53\xa6\xda\xa0\xf2\xdc\x6b\x03\xbc\xb8\
\x67\x10\x80\x8b\x7f\x16\x7c\xf8\xee\x1e\x80\xdb\x00\x70\xfc\xec\
\x1c\xdf\x3f\x30\x04\x78\x2e\xfd\xb8\xc0\xfe\xb7\xce\x6f\xcb\x72\
\x0f\x1d\x79\x9a\x0b\x23\x96\xd3\x9f\x1f\x64\xfc\xd5\x7d\x9b\x6b\
\x40\x45\xb0\x16\x40\x78\x70\x2c\x23\xcb\xb2\x6d\x01\x30\x30\x96\
\x61\x8d\x50\x1b\x7c\x14\x23\x25\x22\x14\x2b\xd8\x18\x91\xd5\x95\
\x73\xe7\xce\x83\x2a\xb8\x04\xd2\x14\xb2\x0c\xd2\x2c\x8c\x49\x0a\
\x49\x12\xde\x77\x3a\x90\xe7\x90\xb7\xa1\xd5\x82\x76\x2b\x8e\x6d\
\x28\x72\xb2\xfa\x38\xd6\x0a\xe3\xaf\xbc\x49\x6b\xf1\xfa\xe6\x00\
\xac\x15\xac\x15\x04\xb0\x46\xd8\xbd\x7b\xe7\x16\x6b\xeb\x86\xae\
\x80\x5a\xa8\x56\x81\xea\x6d\x51\x8d\xaf\x04\xb5\x82\xf7\xa0\xa6\
\x84\x01\x67\x05\x35\x82\x08\xa8\x0a\x95\x2c\xc3\x23\x20\x1e\x08\
\xc0\xf0\x1e\x2f\x02\xde\x23\x12\x26\x15\x7c\x88\x23\xc4\x21\x1e\
\x3c\x21\x5e\x40\x4d\x58\x18\x40\xd7\x4a\x89\x06\xac\xa0\xda\x63\
\x00\x9a\x33\xbf\x05\x8a\x53\x07\x69\x02\x95\x04\xb2\x34\xf6\x04\
\x12\x07\x4e\xa1\xe8\x40\x5e\x40\x2b\x8f\xbd\x05\x4b\x39\xb4\x73\
\xc8\x0b\x54\x87\x71\x3d\x00\x2a\xe5\x25\x70\x31\x40\xd5\x30\x39\
\xf9\xd2\xd6\x0a\xf3\x3e\xd0\xaf\x16\xaa\x1b\x8b\xf6\xd8\x27\x61\
\x61\xbd\x1c\x25\x25\x20\x00\xf0\x81\x8d\x34\x4d\xa3\x3a\xc3\xb3\
\x98\x11\x89\x6c\x07\xda\x63\x09\x56\x98\x5f\x29\x46\xfc\x61\xcd\
\x72\x7f\x61\x1d\x2d\xd1\x80\x3a\x09\x54\x49\x18\x4f\x34\x2f\xe0\
\x9d\x85\xc4\x21\x89\xc3\x67\x09\x92\x69\xd8\x11\x89\xe2\x13\x87\
\x58\x8b\xef\x76\x91\xbc\x80\xbc\x03\xed\x02\xdf\x6a\x23\xed\x02\
\xf2\x02\x9f\x77\x50\x1d\x45\xd5\x20\x78\x3a\xeb\x54\x78\x9b\x06\
\x9c\x33\x78\x0f\x03\x8f\x24\xbc\xfe\xf2\xf3\x77\x68\xe8\x36\x68\
\xa4\xbe\xf1\xeb\xc6\xfc\xdf\xb1\x04\x52\x5e\x82\x44\x4d\x5f\x84\
\x8f\x0d\xa5\x38\xe7\xb6\xc5\x88\x9e\x18\x4b\xb9\x76\xb3\x03\x08\
\x9d\x52\x11\xaa\x90\xb8\x50\xef\x5a\xc5\x30\x7d\xb1\xcb\x40\xc5\
\xb0\x0e\xf4\x26\xad\x57\xf9\x55\x2e\xe1\xe1\xc6\xd2\x32\xf5\xcc\
\x70\x7d\xc9\x84\x2d\xe9\x4a\x19\x10\x9c\x1a\xc0\x73\xe5\x66\x97\
\x2b\x37\xbb\xac\x51\x57\x3f\xd7\xaa\x64\x7e\xc5\x27\xa2\x29\xac\
\x05\x15\xc3\x9c\x0b\xb5\x77\xa6\x6c\x17\xa8\xc1\xa9\x20\xc8\x1a\
\x35\xaf\x9b\x35\x1a\x8f\x59\x31\x9e\xfe\x7b\xe9\xef\x14\x00\xf1\
\x82\xef\x9b\x58\x30\x2b\x57\x56\x02\x55\x21\xd1\x90\xfc\xe7\x53\
\xdf\xf2\xeb\x99\x13\x2c\x2d\xde\xb8\xa7\xfa\x57\x6a\x03\x3c\xf5\
\xec\x4e\x9e\x79\x61\x02\x0f\xa8\x33\x5b\x31\x10\x03\x7c\x87\xf7\
\xf7\xbf\xc1\xc2\xc2\x02\xb7\x6e\xdd\xa2\x28\x0a\x44\x04\x6b\x2d\
\xd6\x5a\x54\x15\x55\xc5\x39\x87\xaa\x62\xad\xc5\x98\xf0\xdf\xe5\
\xe5\x65\xf2\x3c\xef\xf7\x23\xcd\xf9\xb8\xf2\x2d\x18\x70\x56\x50\
\x17\x18\xdc\x31\x3a\xb6\x72\x4f\x38\x7e\x9c\xe9\xe9\x69\x8c\x31\
\x78\xef\x99\x98\x98\x60\x72\x72\xf2\x8e\x59\xd8\x31\x3a\xd6\xdf\
\x86\xae\xd4\x09\x55\x70\x36\xac\xa2\x56\xaf\xf7\x6b\x39\x33\x33\
\xc3\xd0\xd0\x10\xd6\x5a\xbc\xf7\x34\x9b\xcd\xbb\x02\x50\xab\xd7\
\x70\xd1\x88\xb4\xd4\x88\x14\x9c\x0b\x27\x5c\xa0\x2a\x00\xa8\x56\
\xab\x64\x59\xd6\xa7\xb8\x37\xde\x69\x73\x1a\xa9\x17\x41\x4b\xad\
\x38\x1e\xc7\xbd\x23\xb4\xd7\x8c\x31\x88\x44\xdf\x8f\x3a\xb8\xab\
\x9b\xaf\x35\xa8\x0d\xf3\xf6\x18\x2e\x3d\x8e\x83\x29\x6d\xe3\xd5\
\xdb\x12\xa9\xf7\xe5\x56\x6c\xad\xf4\x91\x0e\x8e\x0c\xc3\xf2\xef\
\xdb\x02\xe0\xa1\x91\x61\xd4\xc2\xb5\x2b\x97\x59\x9c\xbf\xbe\x05\
\x03\x36\xf8\xc0\x60\xad\x02\x0b\xdb\xc3\xc0\x50\xad\xc2\xec\xc5\
\x4b\x9c\xfd\xee\x1b\xce\x9f\x9c\x9e\x03\xa6\x36\x04\x60\x24\x5e\
\x4a\x05\x12\x0b\xed\x91\x27\xa9\x3d\x0c\x6f\x1f\x38\xc8\x66\xc7\
\x81\x27\x3a\xf1\x2a\xe7\x35\x1e\x32\x81\x14\x28\xba\x70\xf9\xea\
\x55\xce\x34\x8e\xd1\xfc\xfa\x8b\xb9\xd9\x1f\x4e\x1d\x02\x0e\x6f\
\x08\xe0\xb3\x8f\x3e\xe0\xa7\xd3\x27\x57\x99\xe9\xda\xa3\x86\x55\
\xe6\xbb\x1e\x04\x1b\x3c\x5f\x1d\x6f\x7c\x77\xee\x8f\xd9\x5f\x0e\
\x01\x87\x1b\x8d\xc6\x5f\x1b\x01\x98\x9a\xfe\xf4\xe3\x7f\xf5\x73\
\x6c\x7d\xf2\x35\x00\xe2\xb7\xda\x81\xff\xdd\xd7\xf1\x3f\x4d\xf0\
\x4b\xb9\xe8\x46\x89\xaf\x00\x00\x00\x00\x49\x45\x4e\x44\xae\x42\
\x60\x82\
\x00\x00\x00\xf4\
\x89\
\x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\
\x00\x00\x30\x00\x00\x00\x30\x08\x06\x00\x00\x00\x57\x02\xf9\x87\
\x00\x00\x00\x01\x73\x52\x47\x42\x00\xae\xce\x1c\xe9\x00\x00\x00\
\xae\x49\x44\x41\x54\x68\x43\xed\x99\x41\x0e\x80\x20\x0c\x04\xcb\
\x67\xf8\xff\x7b\xf8\x8c\xc6\x1b\x26\x90\xb4\xd9\xf6\x00\x19\x8f\
\x06\x2b\x9d\x5d\x6a\xc1\x66\x87\x5f\xed\xf0\xf9\x1b\x09\xac\x14\
\xec\xbd\x3f\xab\xfb\x63\x8c\x74\x60\xe9\x01\xbf\x89\x93\x40\x60\
\x61\xa2\x00\x6b\x20\x60\x97\xd5\x50\x2c\x84\x85\xb0\x90\x48\x00\
\x0b\x89\x00\xa9\x42\x58\x08\x0b\x89\x04\xb0\x90\x08\xf0\x57\x85\
\x76\x1b\x11\xf1\x1d\xe9\x8f\xcf\x3b\x3b\x12\x48\xc7\xeb\x08\x88\
\x02\x0e\x48\xa5\x43\xee\x55\x20\x0b\x1b\xc7\x2a\x01\x92\x74\xa3\
\xb4\x12\x01\xbb\x70\xac\xe2\x85\x45\x15\xf2\x92\x32\xab\xf9\xc1\
\x81\x02\x28\x10\x20\xc0\x77\x40\x84\x45\x2b\x81\x85\xb0\x90\x48\
\xe0\x4a\x0b\x15\x30\xd9\x86\x2c\xa9\x42\x24\x10\x20\xf0\x02\xa7\
\x1a\xa8\x31\xa1\x6c\x00\xdd\x00\x00\x00\x00\x49\x45\x4e\x44\xae\
\x42\x60\x82\
\x00\x00\x07\xe2\
\x89\
\x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\
\x00\x00\x30\x00\x00\x00\x30\x08\x06\x00\x00\x00\x57\x02\xf9\x87\
\x00\x00\x00\x04\x67\x41\x4d\x41\x00\x00\xaf\xc8\x37\x05\x8a\xe9\
\x00\x00\x00\x19\x74\x45\x58\x74\x53\x6f\x66\x74\x77\x61\x72\x65\
\x00\x41\x64\x6f\x62\x65\x20\x49\x6d\x61\x67\x65\x52\x65\x61\x64\
\x79\x71\xc9\x65\x3c\x00\x00\x07\x74\x49\x44\x41\x54\x78\xda\xd5\
\x59\x69\x4c\x54\x57\x18\x35\x21\xe2\x8a\xfc\x20\x10\x11\xca\xa8\
\xd8\x61\x70\x60\x06\x4a\x91\xaa\x58\x94\x68\x54\xa4\x2e\x35\x26\
\x60\x63\xa8\x68\x30\x06\xe2\xb8\x47\x62\x0d\xee\x8a\x8e\xe2\x86\
\xbb\x8e\xbb\xb8\xa2\xe3\x2e\x28\x6e\xe3\x8a\x8e\xbb\xb8\x27\x28\
\x7f\xc4\xa4\x09\x86\x5f\xfe\x38\xbd\xe7\xbe\xe2\x65\x22\x10\x41\
\x68\x99\x97\x7c\x2c\x61\xde\x7b\xe7\x7c\xdf\xf9\xce\xfd\xee\xa5\
\x05\x80\x16\x8d\x75\xf5\x8a\x8a\xf2\x16\x61\x13\x51\x24\x22\xa5\
\x45\x13\x5f\x00\xe4\x97\xc6\x02\x6f\xee\x63\x36\xbf\x1d\xd3\xa6\
\x0d\x2c\x9e\x9e\x18\xa0\xd7\xe3\x07\x7f\x7f\x9d\x3b\x10\x20\x78\
\x0b\x01\x67\xb4\x6d\x0b\xc7\xd0\xa1\xb0\xf7\xe8\x81\xe1\xfe\xfe\
\xe8\xa6\xd3\xa5\x35\x5f\x02\x4a\x32\xf9\x04\x9b\xd9\xa1\x03\x4a\
\x4f\x9e\x04\x4e\x9f\x76\x03\x02\x4a\x32\xce\x3f\xda\xb7\x47\x6e\
\x74\x34\x2a\x5f\xbf\x06\x00\xf7\x20\xc0\x06\x8d\x37\x18\xfe\xfe\
\x53\x68\xbd\x30\x3d\x1d\xa8\xa8\x00\x3e\x7f\xd6\x42\x54\xa1\x99\
\x12\x50\x2e\xf3\x5b\x60\x20\x2c\xad\x5a\xa1\x74\xff\x7e\xe0\xd3\
\x27\xd7\x38\x76\xac\x59\x12\x20\x78\x1d\x25\x93\xe4\xed\x0d\xab\
\x68\xd8\xca\x9b\x37\x81\x8f\x1f\x81\xf2\x72\x15\x1f\x3e\x00\x87\
\x0f\x7f\x21\x10\xd1\xbd\xfb\x26\xde\xf7\x7f\x13\x20\xf8\x61\x94\
\x0c\x2d\xd2\x9e\x9c\x0c\xbc\x79\x03\x94\x95\x01\xef\xdf\x6b\xf1\
\xee\x9d\x16\xa5\xa5\x40\x5e\x9e\x24\x40\x79\xfd\xee\xe7\x87\x04\
\x9d\x0e\x24\x2e\x9e\x91\x45\x32\xff\x35\x01\x82\xcf\x21\x88\x8c\
\x76\xed\x50\xb2\x6a\x95\x06\x9e\x0d\xcb\x78\xf5\x4a\x8b\x97\x2f\
\x81\x17\x2f\xb4\x78\xfe\x1c\x28\x29\x41\x69\x7e\x3e\x1c\x8b\x16\
\xc1\x36\x68\x10\x2c\x5e\x5e\x60\xe5\x06\x06\x07\x83\xae\x25\x22\
\xae\x89\x09\x28\xc9\x8c\xf2\xf1\xc1\xfc\xa0\x20\x94\x9f\x38\x21\
\x81\xe1\xd9\x33\xe0\xe9\x53\x2d\x9e\x3c\x01\x1e\x3f\x06\x1e\x3d\
\x02\x1e\x3e\xd4\xe2\xc1\x03\xe0\xfe\x7d\xc0\xe9\x04\xee\xdd\xd3\
\xe2\xee\x5d\x38\x17\x2e\x84\xd5\x68\x64\x65\x64\x55\xd8\x4b\x7c\
\x47\x93\x10\x60\x86\xe2\xc2\xc2\xa4\x64\xf2\x12\x12\x80\x5b\xb7\
\x6a\x04\x47\x60\x28\x2e\x06\xee\xdc\x01\x6e\xdf\xd6\x3e\xc7\xde\
\xb8\x71\x03\xb8\x7e\x5d\x0b\x87\x03\xb8\x76\x0d\xb8\x7a\x15\xb8\
\x72\x45\x56\xd1\xda\xb5\x2b\x68\xbf\x94\x25\x1d\xad\x51\x09\x50\
\xab\x2c\x75\x86\x78\x81\x73\xce\x9c\xda\x40\xd6\x08\x0e\x97\x2f\
\x03\x97\x2e\x01\x45\x45\xc0\xc5\x8b\xc0\x85\x0b\x40\x61\x21\x70\
\xfe\xbc\x16\x67\xcf\x02\x67\xce\xc8\xb5\xc2\x39\x71\x22\x2c\x2d\
\x5b\x82\x8e\xc6\x6a\x7c\x1f\x01\x65\x91\x45\x6c\x3c\x4a\xa6\x74\
\xeb\xd6\x9a\x80\xd6\x0c\xb2\xa0\x40\x03\x78\xee\x9c\x02\x79\xea\
\x94\x5c\x13\x40\xe9\xd9\xed\xd2\x5e\x21\xfa\x02\x47\x8f\x02\x47\
\x8e\x00\x87\x0e\xa1\x72\xdd\x3a\x58\x85\x5b\x51\xa6\x94\x2b\x31\
\x34\x88\x00\xb5\x18\x6b\x36\x4b\xc9\xd8\xfa\xf4\x41\xe5\xf1\xe3\
\x35\x81\x65\x36\x6b\x07\x4a\x90\xbc\x8f\x40\x35\x90\xd2\x52\x71\
\xf0\x20\x70\xe0\x80\x74\x27\xec\xdb\x07\xec\xdd\x0b\xec\xde\x0d\
\xec\xda\x05\xec\xdc\x09\x6c\xde\x0c\x9b\xe8\x0d\xbe\x5b\x91\xa8\
\x3f\x01\x0b\x5d\xa2\xd0\x62\xd1\x40\xec\xd9\x43\x70\x5a\x66\x5d\
\xc1\x32\xa3\x0a\xac\x96\x51\x02\x65\x46\xab\x03\xe5\x33\x14\xd0\
\x1d\x3b\x80\xed\xdb\x81\x6d\xdb\x80\x2d\x5b\x24\x68\x6c\xdc\x08\
\x6c\xd8\x00\xac\x5f\x0f\xe4\xe4\xc0\x11\x1b\xeb\x42\xa2\xbe\x04\
\xe2\xe8\x0c\x16\xa1\xfb\x92\x4d\x9b\x98\x79\xbe\x80\x2f\xd4\xc0\
\xa9\xcc\xba\x66\x95\x2b\xb1\x06\x56\x65\xd4\x66\xd3\xee\xa3\x04\
\xab\x03\xcd\xcd\x05\xd6\xae\x05\xd6\xac\x01\x68\xc7\x2b\x57\x02\
\x2b\x56\x00\xcb\x97\x03\xd9\xd9\x80\x70\xa9\x3c\xa3\x51\x36\x77\
\x4c\x64\xe4\x29\x81\xcf\xa3\xbe\x3d\x90\xd2\xcf\x60\xa8\x90\xf3\
\xcd\xd4\xa9\xd4\xbf\x06\xc8\x6a\xd5\x5e\x46\x62\xfc\x5d\x49\x40\
\x65\x56\x03\xcb\xcf\xb8\x82\x5d\xbd\x5a\x81\xe5\x73\x96\x2d\x03\
\x96\x2e\x05\x16\x2f\x06\xc4\x1a\x81\x05\x0b\x80\x79\xf3\x80\xac\
\x2c\x80\x86\x21\xde\x6b\xeb\xd4\x09\xec\xc3\x68\xb3\x39\xbb\xde\
\x2e\x24\x96\xfe\x9e\xbd\xc3\xc3\x9f\xc8\x09\x33\x2e\x0e\x95\xd4\
\x3e\x25\x43\x00\x4b\x96\xf0\xa5\xcc\x14\x7f\x66\xf6\x08\x4e\x03\
\x29\x9a\x51\x65\x56\x81\x65\x66\xf9\x59\xde\xf3\x15\xd8\xd9\xb3\
\x81\x19\x33\x80\x49\x93\x80\x71\xe3\x00\xae\xf0\x62\x2f\x51\x19\
\x1f\x8f\xf9\xe2\xfd\x74\x42\x81\x27\xb1\xde\x36\x1a\xda\xad\x9b\
\x1f\x4b\x28\x67\x7c\x11\xa5\xcc\x38\x1b\x9a\x20\x49\x60\xfe\x7c\
\x60\xee\x5c\x05\x62\xd6\x2c\x60\xe6\x4c\x60\xda\x34\xfe\xac\x81\
\x62\x05\x27\x4f\xd6\xc0\x65\x64\x00\xc2\x32\x31\x61\x02\x30\x7e\
\x3c\x90\x9a\xaa\x81\x1d\x35\x0a\x18\x39\x12\x18\x3e\x5c\x02\xc7\
\x90\x21\xc0\xe0\xc1\xc0\xc0\x81\x28\x8d\x89\xc1\x38\x0f\x0f\xf4\
\x36\x99\xca\x04\xa4\x36\x0d\x59\x89\x3d\xa2\x4d\xa6\x4c\xee\xb2\
\xf8\x20\x07\xb3\x56\x5c\xcc\x06\xad\x9b\xc0\x94\x29\xb5\x83\x4e\
\x49\x01\xc6\x8c\x01\x46\x8f\x06\x92\x92\xea\x24\x80\xfe\xfd\x61\
\xef\xd2\x45\xda\x6b\x54\x78\x78\x4e\x83\x67\xa1\x70\x83\x21\x96\
\x59\xa0\xa4\x6c\x89\x89\x94\x14\x5d\x89\xd2\xa8\x8b\x00\x6d\x95\
\x84\x55\xd0\x04\xd8\x27\xd4\xfe\xd8\xb1\xdf\x44\xa0\x52\xd8\x79\
\x66\xeb\xd6\x60\x5f\x76\xf4\xf5\xd5\x37\x80\x80\x92\xd4\x2f\x91\
\x91\x0e\xb9\xc0\xe9\xf5\x94\x14\x47\x05\xda\x5f\xad\x04\x4a\x84\
\x8c\xec\x69\x69\x55\x41\x67\x23\x79\x45\x88\xee\xc5\xcf\xd7\x41\
\x00\xfd\xfa\xc1\x11\x12\x22\x5d\x29\xd2\x68\xcc\xa5\x2a\xbe\x67\
\x3f\xe0\x41\x57\x90\x23\x86\xb7\xb7\xab\xa4\x58\x09\x57\x02\x1c\
\xa7\x59\x7e\x39\x26\xb0\x97\xb8\xc6\x08\x77\x63\x02\xe8\x70\x8a\
\x0c\x17\x3b\xca\xac\x16\x02\x10\x46\x92\xd9\xaa\x95\xaa\xc2\xf7\
\xee\xc8\x7e\x0a\x0b\x1b\x1c\x67\x34\x56\xc8\x21\x8f\x8d\x48\x10\
\x85\x85\x94\xc6\x57\x04\x08\xdc\x64\x30\x58\xb9\x2b\xe3\x77\xd1\
\x53\x45\x04\x22\x2a\xc9\xbe\xa2\x24\x15\x11\x1a\x44\x2d\x04\x1c\
\x82\x34\x93\x61\x0e\x0d\xcd\x66\x22\xbf\x7b\x4f\x2c\x00\xf9\xc7\
\x44\x44\xdc\xe4\x43\x99\xd1\x72\xbb\x9d\x20\xb8\x0e\x28\x02\x5f\
\x6f\x29\xdb\x8b\xf0\x11\xa1\x0b\x0b\x09\xf9\xab\xa7\xd1\x58\xcc\
\xbf\xb3\x9a\xac\x08\xef\x97\x76\x3d\x62\xc4\x57\x04\x2a\x7b\xf6\
\x84\x45\x39\x92\x4f\x63\x9d\x4a\x78\xf4\x30\x9b\x37\x71\xf5\x26\
\x08\x27\x7d\x9f\x20\x38\x6a\x4c\x9f\xfe\x2d\x7b\x62\xef\xf0\x90\
\x90\xb4\x5f\xbb\x77\x2f\x73\x59\x73\x0a\x0a\xe8\x56\x2e\x04\x20\
\x9a\xd9\xa6\xed\xec\xd0\x35\x28\x68\x58\xa3\x9e\x0b\x09\x8b\x4b\
\xa6\x2c\x28\x29\x36\x2b\x49\x70\x7a\xb5\x8b\x5d\x98\x22\x50\x77\
\x35\x45\x6f\x1d\x66\xaf\xb0\x9a\x24\x21\xd7\x9c\xc4\x44\x17\x02\
\x4e\x83\x41\xf6\x91\x90\xf0\x5e\x71\x9b\x67\xa3\x9e\xcc\x09\x49\
\x84\x72\xf5\xa6\xa4\xac\x51\x51\x04\x41\x32\xb5\x11\xa8\x75\xcd\
\x11\x19\x56\x24\x28\xcb\x84\x84\x2f\x04\xd0\xab\x57\x75\x19\xf9\
\x35\xc5\xd9\xa8\x67\x8f\x88\x88\x3c\x66\x92\x03\x21\x89\x10\x90\
\x2e\x20\x20\xf9\x5b\x1f\x20\xac\x3a\xd5\x85\x04\x67\x2a\x45\x00\
\x36\x5f\x5f\xf9\xcc\x80\x8e\x1d\xe3\x1b\x99\x80\xba\x7e\x36\x99\
\x26\x52\x52\x72\x97\x15\x1e\x5e\x42\x95\xd5\xf7\x8c\x95\x95\x63\
\x4f\x48\x39\xa6\xa7\x7f\x21\xe0\x10\x16\xce\x2a\x1b\xf5\xfa\x2c\
\x56\xad\x49\x08\xf0\xe2\x89\xf4\x8f\x9d\x3b\xa7\xfe\x0b\x5e\xd7\
\x80\x53\x3f\x5b\x52\x95\x3b\xb1\x1f\x06\x0c\x90\x04\xca\x23\x22\
\xb8\x5f\xe0\x68\x61\xa7\x09\x34\x11\x01\xa5\x6b\x0e\x61\x0d\x3d\
\x30\xe6\x31\x3d\xdd\x8d\x16\xcd\xa9\x97\x04\x10\x13\xc3\x3e\xa8\
\xaa\x6c\xa0\x0b\x81\x66\x76\x55\x6d\xb0\x94\x94\x84\x2b\x91\x80\
\xd5\xcb\x8b\xff\x77\x20\x60\x7d\xf3\x25\xa0\x48\xe4\x53\x32\x9c\
\xa3\xb8\x41\x22\x01\x7b\x40\x00\xd8\x5f\x34\x07\x77\x20\xa0\x63\
\xb6\xe9\x68\xf2\xc8\xa6\x6f\x5f\x38\xc4\x88\xcd\x26\x37\x04\x07\
\x4f\xa5\xf3\x35\x63\x02\xaa\xa1\x59\x05\x4e\xc0\x9c\x7a\x4b\x0c\
\x06\x4e\xa7\xf2\xd0\x98\xe3\x89\x3b\x10\x30\xb3\x17\x38\xf8\xf1\
\x50\xa1\xdc\x64\xaa\x4e\xc0\xaf\x79\x13\x50\x24\x9c\x9c\x5e\xe5\
\xe2\x26\x46\x94\x34\x01\x59\xcc\x61\xc5\xe2\x4f\xfe\xee\x42\x20\
\x85\x0b\x18\xf7\x20\xdc\x77\x54\x23\x10\xe8\x2e\x04\xbc\xb9\x91\
\xa2\xa5\xf2\x0c\x2a\xd3\xd3\xb3\x8a\x80\xde\x0d\x08\x28\x4b\xad\
\x92\x11\xd7\x02\x8e\xe1\xee\x46\xc0\x42\x19\x71\xdf\x41\x5b\xa5\
\xbd\xba\x1b\x01\x1d\xdd\x88\xdb\x58\xb7\x24\xc0\xab\xaf\xd1\xf8\
\x96\xa3\x76\x35\x02\x61\x6e\x45\x80\x7d\xc0\x93\x0d\xee\x37\x38\
\xb2\x73\xda\x75\x37\x02\x66\xfe\xdb\x2b\x3e\x34\x54\xee\x09\xdc\
\xab\x02\xea\x6a\x43\xe0\x22\xcc\x22\x7c\x88\xfd\x1f\xf9\x92\x41\
\x48\x3f\x71\x1a\xd8\x00\x00\x00\x00\x49\x45\x4e\x44\xae\x42\x60\
\x82\
\x00\x00\x08\x19\
\x89\
\x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\
\x00\x00\x20\x00\x00\x00\x20\x08\x06\x00\x00\x00\x73\x7a\x7a\xf4\
\x00\x00\x00\x04\x67\x41\x4d\x41\x00\x00\xd6\xd8\xd4\x4f\x58\x32\
\x00\x00\x00\x19\x74\x45\x58\x74\x53\x6f\x66\x74\x77\x61\x72\x65\
\x00\x41\x64\x6f\x62\x65\x20\x49\x6d\x61\x67\x65\x52\x65\x61\x64\
\x79\x71\xc9\x65\x3c\x00\x00\x07\xab\x49\x44\x41\x54\x58\xc3\xad\
\x57\x5b\x50\x93\x67\x1a\xf6\xca\xce\xec\xcc\xf6\x62\x2f\xbc\xd9\
\xe9\xce\xec\x6e\xbd\xda\xd9\x9b\xb5\xce\xba\x3b\x7b\xb0\xad\xcc\
\x7a\xb1\xce\xce\x3a\xb3\x76\x54\x70\x75\xdb\xe2\x81\xd6\xb6\x54\
\x04\xbb\xa5\x20\x6d\xc1\x82\x06\x08\x07\x51\x42\x80\x80\x80\x02\
\x21\x81\x10\x92\x40\x48\x10\x73\x24\x21\x67\x72\x80\x04\x42\x20\
\x9c\x09\x47\xb5\x54\x78\xf6\xfb\x7e\x13\x16\x30\x58\x8b\x7d\x67\
\x9e\xf9\x2f\x92\xfc\xcf\xfb\x3e\xcf\xfb\xbe\xdf\x97\x5d\x00\x76\
\xfd\x98\x20\xf1\x0b\x82\x14\x02\x03\xc1\x75\x82\x03\xcf\xfd\xfe\
\x8f\x48\xbc\x9b\x20\xe1\x57\xaf\xef\xb5\x2a\x8c\xd6\x65\xdb\x02\
\x60\x19\x1e\x5b\x09\x27\xf1\x33\xfa\x19\x81\x22\xfc\xdc\x3e\x76\
\x48\x7e\x8a\xa0\xb9\xb6\x59\x1c\x32\xcf\xad\x42\x39\xfe\x1d\x44\
\xf6\x51\xd8\xc7\xe6\xe8\x87\x86\x3d\x7b\xf6\x58\x53\x52\xae\x2c\
\xca\x3a\x3a\x10\x4e\xe2\xe5\x49\xc3\xc4\x31\x04\xb7\x3e\x49\xf9\
\x2c\x60\x9b\x5d\x59\x53\x4d\x03\x4d\xb6\x11\x34\xeb\xfb\x20\x31\
\x79\x60\x19\x9d\xc5\xbb\xef\xbe\x3f\xc5\xab\xbe\x83\xf1\x89\x29\
\x4c\x4f\xcf\xae\x92\xef\xd7\xbc\x74\x02\x11\x9f\x0f\xbe\x1d\xe3\
\xb2\x04\x43\x4f\xb4\x33\x40\x8b\x7b\x06\xcd\x3d\x2e\x34\xeb\xec\
\xa8\x57\xf6\x20\x87\x53\x85\x32\x5e\x35\x43\xbc\xb0\xf4\x90\x81\
\xc1\x60\x5c\x26\xbf\x4b\x7c\xe1\x04\x48\x1c\x24\x38\x41\xfd\xdd\
\xea\x73\x27\xf1\xb9\x27\x04\x48\x87\x97\xc1\xd7\xbb\x20\x22\x55\
\x37\xdc\x37\xa2\xb8\x4e\x88\x2c\x56\x3e\xcc\x56\xdb\x3a\x71\x04\
\x2c\x16\x6b\x2c\xfc\xce\xe7\x27\x10\x91\x36\x93\x95\x3f\x46\x7d\
\xa5\xfe\x12\xc4\x6f\xf4\x59\x31\xb6\x02\x7e\xef\x20\x5a\x7b\x9c\
\xe0\x3f\x30\xa1\x4c\x28\x43\x46\x0e\x1b\xb2\x0e\xf9\x26\xd2\xf9\
\xc5\x65\xcc\x2d\x2c\x21\x34\xbf\x88\xbd\x7b\xf7\x5a\xc9\x3b\x7e\
\xba\x6d\x02\x24\x7e\x43\x90\x46\x3d\x35\x13\x69\x75\xb3\x80\xd2\
\x3f\x0f\xcb\xc4\xe2\x9a\x50\xa1\x5a\xb4\x6c\xf1\x59\xa0\xb6\xa0\
\xa6\x5d\x8d\x2f\xb2\x73\x71\xb7\x9e\xff\x0c\x31\x25\x9d\x09\xcd\
\x63\x62\x6a\x06\x83\x43\x81\x27\xe4\xdd\xbc\x2d\xd3\xb0\x3b\x92\
\x03\x33\x26\xd4\x53\xb5\xd3\xfb\x58\x4f\x88\xc5\x03\x21\x88\x2c\
\x43\x50\xba\x46\xd0\xed\x09\x42\xe5\x9b\x42\x9b\x73\xfc\xa9\xcf\
\x5a\x1b\xee\x2a\x74\xc8\xbc\xc9\x45\x09\xa7\x6c\x93\xcf\x9b\x88\
\x27\xa7\x11\x18\x1d\xc3\x80\x6f\x08\xa2\xd6\xd6\x25\xc2\x51\xdb\
\x28\x12\x87\xc6\x1f\xaf\x82\x2f\x62\x94\x4d\x89\x24\x90\x22\xea\
\x52\x2d\x9a\x42\xab\xe8\x18\x79\x04\xa1\xc5\xcf\x10\x53\x74\xf6\
\x0d\xa3\xd3\xe1\x87\xd4\x3c\x80\x16\xbd\x03\x0d\x5d\x06\x14\xd5\
\x0a\x90\x91\x95\x0d\x2f\x79\xf1\xc6\xaa\xa9\xd4\xb3\x73\x0b\x4c\
\xc5\x94\xd8\xdd\xef\x85\xc9\x62\x05\xb7\xbc\x12\xa5\xe5\x95\x4b\
\x13\xf3\xcb\xab\x23\x0f\x01\x37\xd9\x11\xe6\xd9\x15\x84\x97\x15\
\x13\x06\xcb\x3c\xd0\x68\xf2\xa3\xdd\xee\x5f\x27\x96\x3b\x86\x20\
\xb3\x78\xd7\x7d\xe6\x08\xa4\xf8\x3c\x33\x1b\x2a\x8d\x36\xaa\xdc\
\x53\x33\x21\x8c\x8e\x8d\x33\x15\xd3\x26\xe4\x37\x09\xf1\xc1\xc5\
\x8f\x51\x73\xaf\x01\xbe\x65\x60\xfc\x11\xa0\x23\x13\x23\xf2\xce\
\xa1\xbe\x5d\xb9\xb8\x51\x01\x83\x81\x74\x74\x4d\xa7\x1e\x0a\x67\
\x80\xa9\xb8\xdd\xea\x83\xd8\xe8\x42\x93\xca\xcc\xf8\x7c\xe5\xcb\
\x2c\x88\xda\x24\x51\x89\xa7\x67\xe7\x18\x1b\x86\x86\x47\x60\x77\
\x38\x49\x82\x3a\x24\x7c\xf8\x21\xae\xb3\x0b\xe1\x99\x5c\x80\x6f\
\x09\xd0\x90\xde\xe1\x0f\x2c\x81\xab\x1f\xc4\x7d\xef\x04\xdd\x07\
\x1d\x61\xeb\xff\x9f\xc0\x1d\xb9\x16\x1d\xf6\x21\x48\xcc\xfd\x4f\
\x7d\xee\xd4\x22\x9d\x55\x84\xaa\x9a\xba\x4d\x3e\x47\xe4\x8e\xf8\
\x3c\x3c\x12\x84\xd3\xdd\x0f\xbd\xc1\x88\xc2\xe2\x62\x9c\x7e\x2f\
\x1e\x3d\x03\x01\xf4\x2f\x02\x83\x84\xbc\xc5\xff\x2d\xee\x3a\x43\
\x28\x51\x91\xf7\xf6\x05\xf1\x4e\xdc\xbf\x7d\x84\x33\x69\xe3\x20\
\x18\xf4\x33\xab\xe0\xc9\x54\x68\x35\x38\xd1\xd8\xdd\x0b\x9e\x58\
\x89\xac\x5c\xf6\x33\x3e\x47\xaa\x9e\x9c\x9e\x65\xe4\xee\xf7\x0e\
\xa2\xd7\x6c\x41\x43\x03\x1f\x27\x62\xe3\x20\xe9\xd6\xc0\x45\xcf\
\x01\x52\x90\x24\xb8\x86\xb2\x9e\x00\x6e\xb4\xdb\x50\xd1\x1b\x44\
\x85\xce\x8b\x4a\x7e\x0b\x6d\xbe\x9b\x5b\x27\xd1\xa0\x99\xf8\x16\
\x65\x22\x05\xee\x29\xf4\x28\x13\xc8\x90\x78\x35\x0b\x1a\xad\x3e\
\xaa\xdc\x63\x13\x93\xf0\x0d\x0d\xc3\x66\xef\x83\xb4\x5d\x8e\xc4\
\x4b\x97\x90\xc3\xca\xc3\xd4\x63\xc0\x4e\x7a\x49\x31\x4e\xfa\x89\
\x94\x7f\x5b\x3b\x84\x7c\x85\x13\x25\x6a\x1f\x4a\xd5\x03\xe8\xf2\
\x30\xa3\x28\x22\xf8\xf9\x33\x09\x74\x8f\x2e\xa1\xa8\xbe\x15\xa5\
\x7c\x09\xb2\x4a\x2a\xf0\xcf\xe3\x71\x51\xe5\xf6\x07\x46\xd1\xe7\
\xf2\x40\xab\x37\x20\xfd\x6a\x06\x92\xbf\x48\x83\xcd\x37\x02\x27\
\xa9\xda\x40\x1a\x4c\xe0\x7b\x88\x52\x9d\x1f\x45\xdd\xfd\x0c\x71\
\x41\x97\x1b\xc5\xdd\x1e\x88\x9c\x41\xfc\xf9\xcd\xb7\x5d\x84\xeb\
\x6c\xb4\x43\xd0\x28\xf7\x4e\x23\xa7\xfc\x1e\xb2\x4b\xab\xf1\x51\
\xea\x57\x48\xfe\x6f\xea\xfa\x58\x51\xb9\x47\x82\xe3\xf0\x0c\xf8\
\x60\x34\x99\x51\xc9\xab\xc2\xfb\x67\xcf\x41\xfe\x40\x03\x3f\xe9\
\x6e\xb2\x8d\x19\xb9\x6f\x69\x06\x19\xd2\x9b\x2a\x2f\x72\xe5\x0e\
\xe4\x75\xf6\xa1\xf0\xbe\x1b\x1c\x95\x1b\xf9\x9c\xca\x29\xc2\x53\
\xb8\xdd\x29\xdc\x2b\x76\x04\x90\x51\xc8\xc5\x95\x6b\x79\x38\x11\
\x9f\x80\x9b\xb7\x6e\x33\x63\x15\x91\xdb\x6a\x73\x40\x22\x6d\xc7\
\x85\x84\x0f\x50\x74\xbb\x0c\xf3\x2b\x80\x9f\x34\x58\xf7\x24\x20\
\x1c\x7c\x84\x4a\xd3\x18\x38\xfa\x61\x86\x9c\x56\xfd\x55\xb3\x1e\
\xac\x0e\x3b\xb8\x3a\x1f\xd9\x21\x1e\x7a\x2f\xe0\x13\xbc\xba\x5d\
\x02\x26\xbe\xc1\x83\x94\x6f\xd8\x38\x9f\x9c\x8a\x03\x7f\x3d\x04\
\x63\xaf\x99\xe9\x6e\x2a\xb7\x46\xd7\x83\xa4\xcb\xc9\x48\xff\x3a\
\x8b\x8c\xd5\x3c\x53\xb5\x71\xf6\xa9\xdc\x35\xf6\x69\x5c\x97\x59\
\x19\xd9\xbf\x6e\x21\xa7\xa0\xd4\x82\x74\xbe\x1a\x57\x9b\x34\x60\
\xc9\xcc\x10\xbb\x82\xf8\xe5\xaf\x5f\xa7\x67\xc0\x3b\xe1\x75\x1f\
\x35\xcc\x35\xdd\x66\x7c\x94\x96\x85\xb8\x73\x17\xf1\x97\x43\x31\
\x4c\xd5\x74\x99\xf0\xaa\xaa\x71\xfa\xf4\x19\x68\xcc\x0e\x8c\x92\
\x2d\x36\x14\x1e\xab\x5a\xc7\x0c\x78\xe6\x71\x70\x0d\x23\x4c\xa3\
\x65\x8a\x0c\x8c\xec\xb4\xfa\x9c\xb6\x5e\x94\x74\x39\xd0\x66\xf7\
\xaf\x1e\x3d\x11\x4b\x47\x2e\x6f\xc3\x79\x13\x35\x2c\x5c\x99\x1a\
\xf1\x97\x3e\xc7\xd1\xd8\x33\xf8\x38\x31\x09\x86\x5e\x13\x1a\x9b\
\x04\xf8\xdd\x1b\xfb\x51\x4f\xd4\xf1\x90\x99\xee\x9a\x00\xaa\xad\
\x93\x60\x2b\x5d\x0c\x39\xf5\xbc\xf0\xbe\x67\xbd\xea\xcc\x16\x3d\
\x4a\x55\x1e\x08\x6d\x01\x94\xd4\xf1\x43\xe1\x65\x53\x40\xf0\xca\
\xf7\x25\x60\x2b\x6e\x6a\xc7\xa9\x84\x44\xc4\x1c\x39\x8a\xdc\x7c\
\x36\x5a\x5a\xc5\x38\x14\x13\x83\x2f\x39\x35\xc8\x14\x6a\x98\xe6\
\xa2\xd5\xd2\x27\xf5\x9a\x7a\x4c\x13\xa1\x49\x64\xb7\x99\x90\xdb\
\x6e\x46\xb9\xda\x8d\x06\xa5\x76\x39\x2c\x39\x3d\xf9\x4e\x13\xec\
\xd9\x72\xd4\x47\x0d\x3b\xab\x46\x88\x63\xff\x39\x8f\xdf\xee\xfb\
\x3d\x1a\xf9\x02\x9c\xbf\x90\x80\x93\xf1\x17\x70\xa3\xad\x07\x19\
\xc4\x4f\x4a\x14\xe9\x6e\xba\x58\xa8\xef\x2c\xfa\x94\x98\x50\x28\
\xb7\x40\xe9\x0e\x3c\xf9\x57\xec\x29\x2a\x77\x2d\xc1\x67\x04\xfb\
\xb6\xb9\xe4\x44\x8d\xbe\xcc\xb2\x5a\xfc\xe3\xe4\x19\x1c\x3c\xf4\
\x37\xb0\x72\xf3\xb0\xef\xc0\x1f\x50\x20\xd1\x21\x89\x27\x65\x2a\
\xa6\x4b\x85\x3e\xbf\x21\xd5\x46\xe4\x2e\x90\x5b\x21\xb0\x0c\xae\
\xe5\xdc\xe2\xd2\x11\x13\x13\xe4\x87\x6f\x3c\xaf\x3c\xe7\x96\x15\
\x35\x9c\x69\x45\xe5\xf8\xfb\xb1\x58\x1c\x3f\x19\x87\x37\xf6\xef\
\xc7\x8d\x3a\x11\x92\xab\xa4\x0c\x21\xed\x70\xea\x35\x55\x21\x8b\
\x34\x5b\xc9\x03\x37\x2a\x34\x6e\xd4\x49\x3a\x17\xc3\x72\x73\x08\
\x8e\x6d\x95\xfb\x87\x24\xe0\x4a\x65\x73\x70\xe4\xf8\x29\x1c\x3e\
\x7c\x98\x8c\x63\x2e\x32\x05\x2a\x5c\x22\xd5\xd3\x5d\x7e\x4d\xdc\
\x0b\x36\xe9\x74\x76\xa7\x1d\x77\x8c\xe4\x88\xb6\xf9\x9e\x84\xb7\
\x1a\x95\xfb\x22\xbd\x49\xfd\x80\x0b\x6d\xf4\x04\x32\x4a\x78\x4c\
\x0f\x9c\x4b\x49\xc3\xb5\xa6\x2e\x7c\xc2\x6d\x65\x36\x59\xf1\x83\
\x01\x5c\x97\x9a\xc1\x51\x7b\x20\xf3\x04\xd7\xce\x25\x26\x05\x36\
\xc8\xfd\xc7\x9d\xc8\x1d\xd5\x82\xdc\x1a\x01\xce\x5e\x4e\x45\x81\
\x58\x85\x78\xf6\x5d\x5c\xa9\x55\x90\xaa\xfb\xc0\x96\xdb\x50\xad\
\x75\xe3\xae\x54\x41\x2f\x10\xca\x0d\x72\xbf\xba\xd3\x6a\xa3\x05\
\xb7\xa2\x51\xf8\x1d\xaf\x43\x8d\x4f\xb9\x2d\x88\xcb\xe6\xe1\x9a\
\x48\x8f\xaa\x1e\x2f\x9a\x35\xe6\xc7\x7f\x7a\xf3\x2d\x57\x78\xac\
\xa8\xdc\xaf\xbd\xac\xdc\xd1\xe2\x08\xdd\x05\x5c\x75\x1f\xde\xcb\
\xaf\x45\xb9\x76\x00\x32\x67\x60\xf5\xc2\xa7\x97\xa9\xdc\xf7\x08\
\xd2\xa9\xdc\x3b\xf8\x03\xf3\xc2\xf1\x13\x82\xca\x1c\xee\x9d\x50\
\x0b\x39\x94\xb8\x0d\xc2\xc8\x16\xa3\x17\x87\xc3\x2f\x22\xf7\x0e\
\xff\xda\x6d\x8a\xdd\x61\x99\xd5\x1b\xb6\xd8\x6b\xbb\x5e\x32\xbe\
\x2f\x89\xff\x01\x66\xb9\x5f\xfc\x11\x80\x3d\xcf\x00\x00\x00\x00\
\x49\x45\x4e\x44\xae\x42\x60\x82\
\x00\x00\x02\x0c\
\x89\
\x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\
\x00\x00\x30\x00\x00\x00\x30\x08\x06\x00\x00\x00\x57\x02\xf9\x87\
\x00\x00\x00\x01\x73\x52\x47\x42\x00\xae\xce\x1c\xe9\x00\x00\x01\
\xc6\x49\x44\x41\x54\x68\x43\xed\xd9\xbb\x4a\x04\x31\x14\xc6\xf1\
\xff\x3e\x86\xbd\x6f\xe0\x13\x58\xfa\x00\xda\x78\xc1\x0b\x58\x58\
\x68\x61\x2b\x78\x01\x5b\x0b\x2d\x16\x1b\x41\x0b\x11\x0b\xaf\x85\
\x28\x08\x6a\x21\x88\x08\x0a\x36\x82\x22\x36\x62\x21\x6a\xa9\xa0\
\x28\x47\xce\x80\x58\xcd\x4c\x72\x92\x09\xcc\xd4\xd9\xf0\xfd\x72\
\x4e\x36\x9b\x9d\x06\x89\x3f\x8d\xc4\xf3\x53\x03\x62\x57\xb0\xae\
\x40\xea\x15\x68\x01\xda\x80\x2f\x60\x2f\x06\xc6\xb5\x85\xd6\x81\
\x4e\x0d\xbe\x05\x0c\x00\x6f\x21\x21\xae\x80\xef\x7f\x61\x8f\x80\
\x7e\xe0\x21\x14\xc2\x37\x40\x72\x5f\x2a\xe2\x2a\x04\xc2\x02\x20\
\xb9\xef\xb5\x9d\x8e\xad\x11\x56\x00\xc9\xfd\xaa\x95\xd8\xb1\x44\
\x58\x02\x24\xb7\xec\x11\xd9\x13\x2b\x56\x08\x6b\x40\x96\x7b\x0c\
\x98\xb7\x40\x84\x02\x48\xf6\x49\x60\xc6\x37\x22\x24\x40\xb2\xcf\
\x01\xe3\x3e\x11\xa1\x01\x92\x7d\x09\x18\xf2\x85\x88\x01\x90\xec\
\x1b\x40\x37\xf0\xee\x0a\x89\x05\x90\xdc\x87\x40\x0f\xf0\xe4\x82\
\x88\x09\x90\xdc\x17\x5a\x89\x9b\xb2\x88\xd8\x00\xc9\x7d\xa7\x88\
\xb3\x32\x88\x2a\x00\x24\xf7\xb3\xb6\xd3\x7e\x51\x44\x55\x00\x92\
\xfb\x13\xe8\x02\x36\x8b\x20\xaa\x04\xc8\xda\xa9\x35\x65\x80\xdc\
\x27\xda\x53\x06\x4c\x01\xd3\xa9\x02\x46\x81\x85\x22\xe1\x65\x6c\
\x15\xf6\x80\xfc\xe4\x96\x03\x6d\xb5\x68\xf8\x2a\x00\x4a\x7f\x7d\
\x66\xd8\x98\x15\x90\xd3\xb7\x17\x38\x2f\xb3\xf2\xb1\x01\xa7\x40\
\x9f\x9e\xc2\x2e\xf9\xa3\xec\x81\x5d\xbd\x66\xbe\x38\x25\xd7\x0f\
\x87\x6e\xa1\x65\x0d\xef\x23\xfb\xef\x1c\x21\x01\xde\x6f\x63\x21\
\x01\x13\xc0\xac\xb7\x65\xff\x33\x51\x88\x0a\x8c\x00\x4d\x8b\xf0\
\xd6\x15\xf8\xd0\x7e\x5f\xb3\x0a\x6f\x09\x78\xd4\xbf\x16\x0f\x2c\
\xc3\x5b\x01\xae\x81\x41\xd7\x03\x2a\x2f\xdc\xf7\x1e\x38\xd1\xf0\
\x72\x4d\x0c\xf2\xb8\x02\x16\x81\x61\x4d\xba\xad\xe1\xbd\x1c\x50\
\x79\xf5\xae\x00\xb9\x3d\x75\x00\xb7\xa9\xbe\x62\xca\xbb\x50\x66\
\xe3\x5c\x2b\x60\x16\x2c\xef\xc4\x35\x20\xef\x4a\x59\x8d\xab\x2b\
\x60\xb5\xb2\x79\xe7\xfd\x01\xfc\xdd\x5e\x31\x0e\x44\x91\x8a\x00\
\x00\x00\x00\x49\x45\x4e\x44\xae\x42\x60\x82\
\x00\x00\x04\x44\
\x89\
\x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\
\x00\x00\x18\x00\x00\x00\x18\x08\x06\x00\x00\x00\xe0\x77\x3d\xf8\
\x00\x00\x00\x06\x62\x4b\x47\x44\x00\xff\x00\xff\x00\xff\xa0\xbd\
\xa7\x93\x00\x00\x00\x09\x70\x48\x59\x73\x00\x00\x0b\x12\x00\x00\
\x0b\x12\x01\xd2\xdd\x7e\xfc\x00\x00\x00\x07\x74\x49\x4d\x45\x07\
\xd3\x0a\x17\x0a\x12\x16\x5e\xb7\xf3\xf2\x00\x00\x00\x1d\x74\x45\
\x58\x74\x43\x6f\x6d\x6d\x65\x6e\x74\x00\x43\x72\x65\x61\x74\x65\
\x64\x20\x77\x69\x74\x68\x20\x54\x68\x65\x20\x47\x49\x4d\x50\xef\
\x64\x25\x6e\x00\x00\x03\xa8\x49\x44\x41\x54\x78\xda\xad\x96\x5f\
\x68\x1c\x55\x18\xc5\x7f\x77\x76\x36\x9b\x34\xe9\xda\x6e\xda\xb2\
\x2a\x98\x27\x31\x55\xfa\x66\xdb\x6d\x82\x42\x5e\x35\x45\xf0\x29\
\x46\x10\x82\xe0\x83\xa2\x82\xb5\x11\x04\x8b\xcf\xea\x8b\x0f\xe2\
\x9f\x60\x7c\x08\x41\x6a\x71\x5b\xcd\xa3\x88\x6d\xd3\xc4\x9a\x36\
\x69\x35\x14\x2a\xad\x94\xe2\x96\x4d\x28\x49\x48\xc8\xee\xce\xec\
\xcc\xbd\x9f\x0f\x33\x3b\xfb\x67\x92\xda\xa2\x1f\x5c\x66\x19\x66\
\xce\x39\xdf\xf9\xee\x3d\xb3\xf0\x3f\xd4\xd0\xd0\x90\x75\x3f\xcf\
\xbd\x04\xc8\x03\xac\x32\xb0\x91\x4e\xa7\xa5\xb7\xb7\x57\x46\x46\
\x46\x6e\x00\xed\x80\xda\x8e\x40\x16\x17\x17\xa5\x5c\x2e\xcb\xbf\
\x95\xeb\xba\xe2\xba\x8e\xb8\xae\x23\xc5\x62\x51\x0a\x85\x82\xcc\
\xcf\xcf\x4b\x4f\x4f\x4f\x01\xd8\x03\x6c\xd9\xd1\x7d\x81\xd7\x49\
\x1c\x59\x5b\x5b\x95\xd3\xa7\xf3\xb2\xbc\xbc\x2c\x22\x22\x53\x53\
\x53\x92\xcb\xe5\x34\x90\xae\x91\xd8\x8d\x0c\x1d\x1d\x1d\x00\x4c\
\x4f\x4f\xc7\xd9\x45\x50\x4a\x21\x22\x88\x08\x5a\xfb\x38\x8e\x4b\
\xa9\x54\x22\x95\x4a\x01\x30\x30\x30\x80\xe3\x38\xd6\xf0\xf0\xf0\
\xc7\x9e\xe7\xbd\x13\xda\xd8\x88\x11\x2f\x63\x8c\x68\xad\xc5\xf7\
\xbd\xe8\xea\x79\x9e\xb8\xae\x23\x1b\x1b\x1b\x32\x3e\x3e\xbe\xdd\
\x8c\x46\x80\xa4\x6a\x21\x88\x3a\xa8\x29\x15\x31\x04\xb7\x25\x9c\
\x5f\x70\x1f\xc0\xf7\x35\x4b\x4b\x45\xb2\xfb\xf6\xf1\xdb\xa5\x39\
\x4e\x9c\xf8\x10\xaf\xd8\x4f\xe5\x4a\x86\xf4\x73\x53\x00\x9d\x31\
\x82\x56\xe0\xe0\x2a\x4d\x16\x21\x82\xb2\x2c\xaa\xae\xcb\xca\xea\
\x2a\x9e\xeb\xf0\xf8\x13\xfb\xf1\xef\x3e\x83\x37\x97\xe2\xb5\xb1\
\xa2\x4c\x9c\xb9\xa6\x80\xbd\x76\xab\xd7\xc6\x68\xce\x9f\x9f\x8e\
\x94\x06\x80\x20\x08\x0a\x85\x84\x0e\x18\x2d\x18\xa3\x31\xc6\xf0\
\xfc\xe0\x51\xfc\xe5\x7e\xbc\xb9\x76\x8e\x7f\xb7\x46\x08\xfe\x3e\
\xd0\x1e\xeb\xc0\xf3\xaa\x0d\xc0\x21\x09\x82\x18\x41\x59\x0a\x31\
\x75\x8b\x44\x84\x9d\xe9\x87\xa8\x16\xfa\xa8\x2e\xec\xe0\xcd\xc9\
\x4d\xbe\x39\x79\xb1\x86\xf7\x32\x70\x6e\x8b\x0e\x0c\x17\xa6\x2f\
\x44\x4a\x03\xc0\x78\x07\xbe\xaf\x19\x1c\x3c\x8a\x7b\xeb\x08\xee\
\xa5\x76\x5e\x9f\x58\x65\x22\xbf\xc0\xdb\x6f\xbc\xca\xa7\x9f\x7d\
\x0d\xb0\x0e\x38\x71\x02\xad\x39\xd2\x97\x43\x4c\xa8\xb2\xc1\xaa\
\x68\x9b\x1a\xc3\xae\xdd\xdd\xb8\x37\x73\x94\xaf\xed\xe1\xbd\x53\
\x77\x98\xc8\x2f\x30\xf6\xe5\xe7\xdc\xbe\x75\xbd\x06\xb5\x06\xb8\
\x5b\x77\x30\x33\x13\xf8\x1c\xda\x61\x85\xc0\x4a\xa9\xc8\x73\xe7\
\xfa\x61\x2a\xf3\x29\x46\xcf\xfc\xcd\xd8\xb7\x57\x19\x1d\x3d\x46\
\xa6\x3b\x43\x69\x3d\x53\x83\xaa\x00\xda\xde\xea\x48\xf7\xf7\xf5\
\x21\x08\xc6\x98\x26\x02\x80\x5d\xbb\xbb\x29\xff\x71\x88\xd2\xe5\
\x14\x6f\xe5\xd7\x99\xcc\x5f\x65\xec\xab\x2f\xc8\x64\x32\x3c\x9c\
\xcd\xb2\xbe\x72\x27\xd2\x1a\x3b\xc9\x00\xbe\xf6\x99\x99\x99\xad\
\xcd\x3d\x1c\x74\x30\x50\xcf\xf3\x00\xb8\x7d\xf3\x59\x3e\xfa\xf1\
\x27\x26\xf3\xbf\x73\xfc\xdd\x63\x14\x97\x96\x10\x84\x03\x07\x9e\
\x42\xb5\x44\x5d\x8c\x40\x6b\x4d\xee\xf0\xa1\x48\x71\x6d\xb7\x78\
\x5e\x95\x47\x1e\x7d\x8c\x17\x06\xf6\xb2\xff\xc5\x4f\x00\xf8\xfe\
\xd4\x49\xba\x76\x76\x91\xcd\x66\x49\xa5\xda\x50\x4a\x61\x29\xeb\
\xde\x04\x46\x6b\x66\x67\x67\x43\x60\xa2\xf3\xe0\xba\x2e\x00\x3f\
\xfc\x72\x97\x73\x67\x7f\x46\x59\x8a\xae\xce\x4e\x6c\x3b\x49\x5b\
\x5b\x92\x44\x22\x81\x52\xf1\xa4\x8e\x5b\xe4\x7b\x1c\x3c\xf8\x74\
\x93\x7a\x31\x06\x5f\xfb\xfc\x75\xe3\x4f\x94\x05\x95\x8a\x43\x32\
\x69\x07\xf3\xb1\xac\xe8\xcc\x68\xad\x31\x46\x6f\x4f\x50\xad\x56\
\xd1\x5a\x87\xf6\x04\xfb\x1e\x15\xcc\xc5\x18\x83\x36\x3e\xc6\x37\
\x24\x93\x36\x89\x44\xa2\x1e\x1b\x4a\x45\xef\x35\x38\x1b\x27\xd8\
\xdc\xdc\x44\x6b\x1d\x78\xa3\x68\x52\xa8\x94\x42\x29\x85\x6d\xdb\
\xc1\xef\xb0\xbb\xda\xdc\x1a\xb3\xab\xb1\x9a\x26\x52\x28\x14\x40\
\x0c\x22\x06\x63\xea\xab\x51\xa9\xa5\x14\xad\x32\x83\x38\x21\xcc\
\x26\xd9\xbe\x83\x8b\xbf\x9e\x65\x47\x9b\x0a\x22\x41\x24\x8a\x68\
\x23\x26\x0c\x6a\x1a\x94\xab\xc8\xef\x1a\xa8\x11\xc3\xe5\x85\x2b\
\xd4\xf7\x78\x73\xbd\xf2\x80\x1f\xfd\x7b\xad\x0f\x80\x27\x5b\xd3\
\xd4\x06\xba\xc3\x95\xfc\x8f\xff\x64\x3c\x60\x05\x58\xf9\x07\xbb\
\x78\x04\xf0\x58\x0d\x4c\x09\x00\x00\x00\x00\x49\x45\x4e\x44\xae\
\x42\x60\x82\
"
qt_resource_name = b"\
\x00\x05\
\x00\x6f\xa6\x53\
\x00\x69\
\x00\x63\x00\x6f\x00\x6e\x00\x73\
\x00\x09\
\x05\xc6\xbf\x47\
\x00\x6d\
\x00\x69\x00\x6e\x00\x75\x00\x73\x00\x2e\x00\x70\x00\x6e\x00\x67\
\x00\x08\
\x08\xc8\x58\x67\
\x00\x73\
\x00\x61\x00\x76\x00\x65\x00\x2e\x00\x70\x00\x6e\x00\x67\
\x00\x08\
\x03\xc6\x59\xa7\
\x00\x70\
\x00\x6c\x00\x75\x00\x73\x00\x2e\x00\x70\x00\x6e\x00\x67\
\x00\x08\
\x04\xb2\x58\xc7\
\x00\x75\
\x00\x6e\x00\x64\x00\x6f\x00\x2e\x00\x70\x00\x6e\x00\x67\
\x00\x08\
\x06\xc1\x59\x87\
\x00\x6f\
\x00\x70\x00\x65\x00\x6e\x00\x2e\x00\x70\x00\x6e\x00\x67\
\x00\x09\
\x09\x6a\x86\x67\
\x00\x61\
\x00\x72\x00\x72\x00\x6f\x00\x77\x00\x2e\x00\x70\x00\x6e\x00\x67\
\x00\x08\
\x0b\x07\x5a\x27\
\x00\x65\
\x00\x64\x00\x69\x00\x74\x00\x2e\x00\x70\x00\x6e\x00\x67\
"
qt_resource_struct_v1 = b"\
\x00\x00\x00\x00\x00\x02\x00\x00\x00\x01\x00\x00\x00\x01\
\x00\x00\x00\x00\x00\x02\x00\x00\x00\x01\x00\x00\x00\x02\
\x00\x00\x00\x00\x00\x02\x00\x00\x00\x07\x00\x00\x00\x03\
\x00\x00\x00\x3e\x00\x00\x00\x00\x00\x01\x00\x00\x05\x99\
\x00\x00\x00\x54\x00\x00\x00\x00\x00\x01\x00\x00\x06\x91\
\x00\x00\x00\x10\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\
\x00\x00\x00\x6a\x00\x00\x00\x00\x00\x01\x00\x00\x0e\x77\
\x00\x00\x00\x28\x00\x00\x00\x00\x00\x01\x00\x00\x00\xf2\
\x00\x00\x00\x80\x00\x00\x00\x00\x00\x01\x00\x00\x16\x94\
\x00\x00\x00\x98\x00\x00\x00\x00\x00\x01\x00\x00\x18\xa4\
"
qt_resource_struct_v2 = b"\
\x00\x00\x00\x00\x00\x02\x00\x00\x00\x01\x00\x00\x00\x01\
\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x02\x00\x00\x00\x01\x00\x00\x00\x02\
\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x02\x00\x00\x00\x07\x00\x00\x00\x03\
\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x3e\x00\x00\x00\x00\x00\x01\x00\x00\x05\x99\
\x00\x00\x01\x83\x7a\x88\xbe\x06\
\x00\x00\x00\x54\x00\x00\x00\x00\x00\x01\x00\x00\x06\x91\
\x00\x00\x01\x83\x70\x1e\xc2\xd5\
\x00\x00\x00\x10\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\
\x00\x00\x01\x83\x7a\x88\xe7\x56\
\x00\x00\x00\x6a\x00\x00\x00\x00\x00\x01\x00\x00\x0e\x77\
\x00\x00\x01\x83\x70\x1e\xc2\xd4\
\x00\x00\x00\x28\x00\x00\x00\x00\x00\x01\x00\x00\x00\xf2\
\x00\x00\x01\x83\x70\x1e\xc2\xd5\
\x00\x00\x00\x80\x00\x00\x00\x00\x00\x01\x00\x00\x16\x94\
\x00\x00\x01\x83\x7a\x80\xa2\xac\
\x00\x00\x00\x98\x00\x00\x00\x00\x00\x01\x00\x00\x18\xa4\
\x00\x00\x01\x83\x70\x1e\xc2\xd2\
"
qt_version = QtCore.qVersion().split('.')
if qt_version < ['5', '8', '0']:
rcc_version = 1
qt_resource_struct = qt_resource_struct_v1
else:
rcc_version = 2
qt_resource_struct = qt_resource_struct_v2
def qInitResources():
QtCore.qRegisterResourceData(rcc_version, qt_resource_struct, qt_resource_name, qt_resource_data)
def qCleanupResources():
QtCore.qUnregisterResourceData(rcc_version, qt_resource_struct, qt_resource_name, qt_resource_data)
qInitResources() | zhou-stattool | /zhou_stattool-0.0.7.tar.gz/zhou_stattool-0.0.7/zhou_stattool/images_rc.py | images_rc.py |
import os
import openpyxl
import pickle
import numbers
import csv
import datetime
import json
from PyQt5.QtWidgets import QMainWindow, QToolBar, QFileDialog, QMessageBox, QListWidgetItem, QProgressDialog
from PyQt5.QtCore import Qt, QDate
from zhou_stattool.roomdata_window import Ui_MainWindow
from zhou_stattool.add_rule_window_main import AddRuleMainWindow
class RoomdataMainWindow(QMainWindow):
DATETIME_FORMAT = '%Y/%m/%d %H:%M:%S'
DATETIME_FORMAT_WITHOUT_SECOND = '%Y/%m/%d'
def __init__(self):
super(RoomdataMainWindow, self).__init__()
self.ui = Ui_MainWindow()
self.ui.setupUi(self)
self.add_toolbar()
self.bind_actions()
self.init_vars()
def add_toolbar(self):
self.toolbar = QToolBar()
self.addToolBar(Qt.TopToolBarArea, self.toolbar)
self.toolbar.addAction(self.ui.actionopen_file)
self.toolbar.addAction(self.ui.actionopen_dir)
def bind_actions(self):
self.ui.actionopen_dir.triggered.connect(self.open_dir)
self.ui.actionopen_file.triggered.connect(self.open_file)
self.ui.add_rule_btn.clicked.connect(self.add_rule)
self.ui.remove_rule_btn.clicked.connect(self.remove_rule)
self.ui.edit_rule_btn.clicked.connect(self.edit_rule)
self.ui.open_rule_btn.clicked.connect(self.open_rule)
self.ui.save_rule_btn.clicked.connect(self.save_rule)
self.ui.rules_widget.itemClicked.connect(self.rule_item_clicked)
self.ui.start_process_btn.clicked.connect(self.start_process)
def init_vars(self):
self.opened_files = list()
self.table_header = list()
self.rule = list()
self.ui.start_process_btn.setEnabled(False)
today = datetime.datetime.today()
tomorrow = datetime.datetime(today.year, today.month, today.day + 1)
self.ui.start_date_edit.setDate(today)
self.ui.end_date_edit.setDate(tomorrow)
self.ui.start_date_edit.setCalendarPopup(True)
self.ui.end_date_edit.setCalendarPopup(True)
self.ui.modeComboBox.addItems(['总表模式', '分表模式'])
def open_dir(self, checked):
file_dialog = QFileDialog(self)
file_dialog.setFileMode(QFileDialog.DirectoryOnly)
dir_name = file_dialog.getExistingDirectory()
file_names = os.listdir(dir_name)
for file_name in file_names:
if not file_name.endswith('xlsx'):
QMessageBox.critical(self, '错误', '文件夹中包含非表格文件', QMessageBox.Yes)
return
file_names = [os.path.join(dir_name, item) for item in file_names]
self.opened_files = file_names
self.show_opened_files()
def open_file(self, checked):
file_dialog = QFileDialog(self)
file_dialog.setFileMode(QFileDialog.ExistingFiles)
file_names = file_dialog.getOpenFileNames(filter='表格文件(*.xlsx)')[0]
if file_names and len(file_names) > 0:
self.opened_files = file_names
self.show_opened_files()
else:
QMessageBox.critical(self, '错误', '请选择正确文件', QMessageBox.Yes)
def check_file_headers(self):
if not self.opened_files:
return True, []
wb = openpyxl.load_workbook(self.opened_files[0])
sheet = wb['Sheet1']
num_columns = sheet.max_column
header = [sheet.cell(1, i).value.strip() for i in range(1, num_columns + 1)]
for i in range(1, len(self.opened_files)):
wb = openpyxl.load_workbook(self.opened_files[i])
sheet = wb['Sheet1']
if sheet.max_column != num_columns:
return False, []
for col in range(1, num_columns + 1):
val = sheet.cell(1, col).value.strip()
if val != header[col - 1]:
return False, []
return True, header
def show_opened_files(self):
ret, header = self.check_file_headers()
if ret and header:
self.ui.opened_files_widget.clear()
for file_name in self.opened_files:
show_name = os.path.basename(file_name)
self.ui.opened_files_widget.addItem(show_name)
self.table_header = header
self.ui.excel_header_widget.clear()
for item in header:
self.ui.excel_header_widget.addItem(item)
else:
self.opened_files.clear()
self.ui.opened_files_widget.clear()
QMessageBox.critical(self, '错误', '选择文件的表格头不一致', QMessageBox.Yes)
def add_rule(self):
if self.opened_files:
self.add_rule_window = AddRuleMainWindow(self.table_header)
self.add_rule_window.setWindowModality(Qt.ApplicationModal)
ret = self.add_rule_window.exec_()
if ret:
name, indexes = self.add_rule_window.get_result()
indexes = [item.row() for item in indexes]
if not name or not indexes:
QMessageBox.critical(self, '错误', '请输入规则名并选择对应项', QMessageBox.Yes)
else:
self.rule.append([name, [(i, self.table_header[i]) for i in indexes]])
self.refresh_rule_widget()
else:
QMessageBox.critical(self, '错误', '请先打开文件', QMessageBox.Yes)
def remove_rule(self):
selected_index = self.ui.rules_widget.selectedIndexes()
if selected_index:
selected_index = selected_index[0].row()
self.ui.rule_detail_widget.clear()
self.rule.pop(selected_index)
self.refresh_rule_widget()
def edit_rule(self):
selected_index = self.ui.rules_widget.selectedIndexes()
if selected_index:
selected_index = selected_index[0].row()
rule = self.rule[selected_index]
self.add_rule_window = AddRuleMainWindow(self.table_header, rule[0], [rule[1][i][0] for i in range(len(rule[1]))])
self.add_rule_window.setWindowModality(Qt.ApplicationModal)
ret = self.add_rule_window.exec_()
if ret:
name, indexes = self.add_rule_window.get_result()
indexes = [item.row() for item in indexes]
try:
self.rule[selected_index][0] = name
self.rule[selected_index][1] = [(i, self.table_header[i]) for i in indexes]
self.refresh_rule_widget()
except Exception as e:
print(e)
def open_rule(self):
if not self.opened_files:
QMessageBox.critical(self, '错误', '请首先打开文件', QMessageBox.Yes)
else:
file_dialog = QFileDialog(self)
file_dialog.setFileMode(QFileDialog.ExistingFile)
file_name, _ = file_dialog.getOpenFileName(filter='规则(*.pkl)')
if file_name:
ret = self.open_and_check_rule_file(file_name)
if not ret:
QMessageBox.critical(self, '错误', '所选文件不是规则文件或与所打开表格头不一致', QMessageBox.Yes)
else:
self.refresh_rule_widget()
def open_and_check_rule_file(self, file_path):
with open(file_path, 'rb') as f:
data = pickle.load(f)
if not isinstance(data, list):
return False
for item in data:
if not isinstance(item, list) or len(item) != 2 or not isinstance(item[0], str) or not isinstance(item[1], list):
return False
for it in item[1]:
if not isinstance(it, tuple) or not isinstance(it[0], numbers.Number) or it[1] not in self.table_header:
return False
self.rule = data
return True
def save_rule(self):
if not self.rule:
QMessageBox.critical(self, '错误', '请首先创建规则', QMessageBox.Yes)
else:
file_path, _ = QFileDialog.getSaveFileName(self, '保存规则', './', '规则文件(*.pkl)')
if file_path:
with open(file_path, 'wb') as f:
pickle.dump(self.rule, f)
def refresh_rule_widget(self):
self.ui.rule_detail_widget.clear()
self.ui.rules_widget.clear()
for rule in self.rule:
self.ui.rules_widget.addItem(rule[0])
if self.rule:
self.ui.start_process_btn.setEnabled(True)
def rule_item_clicked(self, item: QListWidgetItem):
selected_index = self.ui.rules_widget.selectedIndexes()[0].row()
data = self.rule[selected_index][1]
self.ui.rule_detail_widget.clear()
for item in data:
self.ui.rule_detail_widget.addItem(item[1])
def start_process(self):
start_time = self.ui.start_date_edit.date()
end_time = self.ui.end_date_edit.date()
if start_time >= end_time:
QMessageBox.critical(self, '错误', '结束日期要在开始日期之后', QMessageBox.Yes)
else:
csv_result = self.generate_csv()
if csv_result is not None:
self.generate_score(csv_result, self.ui.modeComboBox.currentIndex())
def calc_score(self, score_dict):
try:
score = 100 * (score_dict['满意'] + score_dict['较满意'] * 0.9 + score_dict['一般'] * 0.7 +
score_dict['不满意'] * 0.4) / (
score_dict['满意'] + score_dict['较满意'] + score_dict['一般'] + score_dict[
'不满意'] + score_dict['很不满意'])
except ZeroDivisionError as e:
score = 0
return score
def generate_score(self, csv_result, mode):
if mode == 0:
total_num = [len(c) * len(self.rule) for c in csv_result]
total_num = sum(total_num)
count = 0
progress = QProgressDialog(self)
progress.setWindowTitle("正在处理")
progress.setLabelText("正在生成结果")
progress.setMinimumDuration(1)
progress.setWindowModality(Qt.WindowModal)
progress.setRange(0, total_num)
dst_path = os.path.dirname(self.opened_files[0])
for index, file_name in enumerate(self.opened_files):
data = csv_result[index]
result = {}
for item in data:
address = item['address']
result[address] = {}
for key, value in self.rule:
score_dict = {'满意': 0, '较满意': 0, '一般': 0, '不满意': 0, '很不满意': 0, '未接触': 0}
for _, name in value:
try:
d = item[name]
except KeyError as e:
print(file_name, name)
d = d.replace("'", "\"")
d = json.loads(d)
result[address]['样本量'] = sum(d.values())
for k, v in d.items():
if k in score_dict:
score_dict[k] += v
score = self.calc_score(score_dict)
result[address][key] = score
count += 1
progress.setValue(count)
header = ['address'] + [item[0] for item in self.rule] + ['样本量']
result_work_book = openpyxl.Workbook()
result_sheet = result_work_book.active
for col in range(len(header)):
result_sheet.cell(1, col + 1).value = header[col]
for row_index, (address, v) in enumerate(result.items()):
result_sheet.cell(row_index + 2, 1).value = address
for col in range(1, len(header)):
result_sheet.cell(row_index + 2, col + 1).value = v[header[col]]
base_file_name = os.path.basename(self.opened_files[index])
result_work_book.save(os.path.join(dst_path, f"{base_file_name.split('.')[0]}_result.xlsx"))
elif mode == 1:
total_num = [len(c) * len(self.rule) for c in csv_result]
total_num = sum(total_num)
count = 0
progress = QProgressDialog(self)
progress.setWindowTitle('正在处理')
progress.setLabelText('正在生成结果')
progress.setMinimumDuration(1)
progress.setWindowModality(Qt.WindowModal)
progress.setRange(0, total_num)
dst_path = os.path.dirname(self.opened_files[0])
result_workbook = openpyxl.Workbook()
for index, filename in enumerate(self.opened_files):
data = csv_result[index]
for item in data:
address = item['address']
result_sheet = result_workbook.create_sheet(address, index=index)
result_sheet.cell(1, 1).value = address
result_sheet.cell(1, 3).value = '满意程度'
degree_name = ['满意', '较满意', '一般', '不满意', '很不满意', '未接触']
for i in range(len(degree_name)):
result_sheet.cell(2, i + 3).value = degree_name[i]
result_sheet.cell(1, 9).value = '得分'
degree_row_count = [0, ]
for rule_index, (key, value) in enumerate(self.rule):
result_sheet.cell(3 + sum(degree_row_count), 1).value = key
degree_row_count.append(len(value))
for value_index, (_, name) in enumerate(value):
try:
d = item[name]
except KeyError as e:
print(filename, name)
d = d.replace("'", "\"")
d = json.loads(d)
result_sheet.cell(3 + sum(degree_row_count[:-1]) + value_index, 2).value = name
for col in range(len(degree_name)):
result_sheet.cell(3 + sum(degree_row_count[:-1]) + value_index, 3 + col).value = d[degree_name[col]]
result_sheet.cell(3 + sum(degree_row_count[:-1]) + value_index, 9).value = self.calc_score(d)
count += 1
progress.setValue(count)
base_file_name = os.path.basename(self.opened_files[0])
result_workbook.save(os.path.join(dst_path, f"{base_file_name.split('.')[0]}_result.xlsx"))
def generate_csv(self):
dst_path = os.path.dirname(self.opened_files[0])
sheets = [openpyxl.load_workbook(name)['Sheet1'] for name in self.opened_files]
total_num = 0
for sheet in sheets:
total_num += (sheet.max_row - 1) * (len(self.table_header) - 9)
progress = QProgressDialog(self)
progress.setWindowTitle("正在处理")
progress.setLabelText("正在生成csv")
progress.setMinimumDuration(1)
progress.setWindowModality(Qt.WindowModal)
progress.setRange(0, total_num)
count = 0
final_result = []
for index, sheet in enumerate(sheets):
num_row = sheet.max_row
num_column = sheet.max_column
column_names = ['address']
for col in range(10, num_column + 1):
column_names.append(sheet.cell(1, col).value.strip())
result = {}
for row in range(2, num_row + 1):
submit_date = sheet.cell(row, 2).value.strip()
submit_date = datetime.datetime.strptime(submit_date, self.DATETIME_FORMAT)
submit_date = QDate(submit_date.year, submit_date.month, submit_date.day)
start_date = self.ui.start_date_edit.date()
end_date = self.ui.end_date_edit.date()
if submit_date < start_date or submit_date > end_date:
count += (len(self.table_header) - 9)
progress.setValue(count)
continue
address = sheet.cell(row, 9).value.strip()
if address not in result:
result[address] = {}
for col in range(10, num_column + 1):
value = sheet.cell(row, col).value.strip()
if col not in result[address]:
result[address][col] = {'满意': 0, '较满意': 0, '一般': 0, '不满意': 0, '很不满意': 0,
'未接触': 0}
if value in result[address][col].keys():
result[address][col][value] += 1
count += 1
progress.setValue(count)
if not result:
QMessageBox.critical(self, '错误', '选择时间范围内没有符合条件数据', QMessageBox.Yes)
return None
csv_result = []
file_name = os.path.basename(self.opened_files[index])
with open(os.path.join(dst_path, file_name.split('.')[0] + '.csv'), 'w', encoding='utf8') as f:
writer = csv.DictWriter(f, column_names, lineterminator='\n')
writer.writeheader()
for address, data in result.items():
write_data = {}
write_data['address'] = address
for col in range(10, num_column + 1):
write_data[column_names[col - 9]] = str(result[address][col])
writer.writerow(write_data)
csv_result.append(write_data)
final_result.append(csv_result)
return final_result | zhou-stattool | /zhou_stattool-0.0.7.tar.gz/zhou_stattool-0.0.7/zhou_stattool/roomdata_window_main.py | roomdata_window_main.py |
# ZhousfLib
python常用工具库:coco数据集、labelme数据集、segmentation数据集、classification数据集制作和转换脚本
### 数据集制作
* [x] datasets/classification
* [x] datasets/coco
* [x] datasets/labelme
* [x] datasets/segmentation
### 数据库
* [x] db/lmdb
### 装饰器
* [x] decorator(异常捕获,AOP)
### 字体
* [x] font(宋体、特殊符号)
### 并发压测工具
* [x] locust(demo)
### 表格文件工具
* [x] pandas(excel、csv、大文件读取)
### pdf文件工具
* [x] pdf(pdf导出图片、pdf文本和表格提取)
### web相关
* [x] web(flask日志工具、响应体、配置)
### 通用工具包
* [x] util
* [util/cv_util]:opencv读写中文路径图片
* [util/encrypt_util]:AES加密
* [util/iou_util]:IoU计算
* [util/json_util]:json读写
* [util/poly_util]:按照宽高对图片进行网格划分/切图
* [util/re_util]:re提取数字、字母、中文
* [util/singleton]:单例
* [util/string_util]:非空、包含、补齐
* [util/time_util]:日期、毫秒级时间戳、微秒级时间戳、日期比较
| zhousf-lib | /zhousf-lib-0.9.9.tar.gz/zhousf-lib-0.9.9/README.md | README.md |
import os
import shutil
from pathlib import Path
from distutils.core import setup
from Cython.Build import cythonize
# 工程根目录
project_dir = Path(__file__).parent.parent
# 过滤目录或文件-包含的文件目录下文件不生成so
exclude_dirs_or_files = [
"{}/venv".format(project_dir),
"{}/.idea".format(project_dir),
"{}/.svn".format(project_dir),
"{}/download".format(project_dir),
"{}/log".format(project_dir),
"{}/pid.txt".format(project_dir),
"{}/app.py".format(project_dir),
"{}/config.py".format(project_dir),
"{}/entry.py".format(project_dir),
"{}/multi_app.py".format(project_dir),
]
def copy_file(project_name, file_dir, root, current_file):
_, child_dir = root.split(project_name)
if len(child_dir) > 0:
target_dir = file_dir + "/" + project_name + child_dir
else:
target_dir = file_dir + "/" + project_name
if not os.path.exists(target_dir):
os.makedirs(target_dir)
shutil.copy(current_file, target_dir)
def distill_dirs_or_files(root):
for exclude in exclude_dirs_or_files:
if root.find(exclude) >= 0:
return True
return False
def main():
project_name = os.path.basename(project_dir)
file_dir = os.getcwd()
build_dir = file_dir + "/build"
# noinspection PyBroadException
try:
for root, dirs, files in os.walk(project_dir):
for file in files:
current_file = os.path.join(root, file)
# 过滤py编译文件
if file.endswith(".pyc"):
continue
if file.endswith(".c"):
continue
# 过滤当前文件
if current_file == __file__:
continue
# 过滤build文件夹
if root.find(build_dir) >= 0:
continue
# 过滤build_so文件夹
if root.find(file_dir) >= 0:
continue
# 过滤指定目录
if distill_dirs_or_files(root):
continue
# 过滤指定文件
if current_file in exclude_dirs_or_files:
continue
# 非py文件进行复制操作
if not file.endswith(".py"):
copy_file(project_name, file_dir, root, current_file)
continue
setup(ext_modules=cythonize([current_file]))
name, _ = file.split(".")
# 删除.c文件以保证每次都进行so文件生成
c_file = os.path.join(root, name + ".c")
if os.path.exists(c_file):
os.remove(c_file)
if os.path.exists(build_dir):
shutil.rmtree(build_dir)
print("done! Generating SO files completed.")
print("SO dir: " + file_dir)
except Exception:
if os.path.exists(build_dir):
shutil.rmtree(build_dir)
print("工程的所有py文件的当前目录以及所有上级目录中都要有__init__.py文件,若没有请新建!")
main() | zhousf-lib | /zhousf-lib-0.9.9.tar.gz/zhousf-lib-0.9.9/zhousflib/so/project_to_so.py | project_to_so.py |
import os
import time
import grequests
from requests import Response
class DownloadBatch(object):
def __init__(self, save_dir, concurrent=True):
"""
批量下载文件工具类
:param save_dir: 保存文件目录
:param concurrent: True并行 False串行
"""
self.save_dir = save_dir
self.req_list = []
if not os.path.exists(save_dir):
os.makedirs(save_dir)
self.consume_time = 0
self.file_names = []
self.task_num = 0
self.concurrent = concurrent
@staticmethod
def exception_handler(request, exception):
r = Response()
r.status_code = 408
r.reason = "download failed"
return r
def add(self, name, url, timeout=20.0):
"""
:param name:
:param url:
:param timeout:
:return:
"""
if url is None or name is None:
return False
if name in self.file_names:
return False
self.file_names.append(name)
self.req_list.append(grequests.get(url, timeout=timeout))
return True
def add_all(self, files):
"""
:param files: [{"name": "1.jpg", "url": ""}]
:return:
"""
for file in files:
name = file.get("name")
url = file.get("url")
if url is None or name is None:
continue
timeout = file.get("timeout", default=20.0)
self.req_list.append(grequests.get(url, timeout=timeout))
self.file_names.append(name)
def run(self):
"""
:return:
"""
result = {}
start = time.time()
self.task_num = len(self.req_list)
if self.task_num == 0:
return result
size = self.task_num if self.concurrent else 1
responses = grequests.map(requests=self.req_list, size=size, exception_handler=self.exception_handler)
for i in range(0, len(responses)):
response = responses[i]
try:
save_file = "{0}/{1}".format(self.save_dir, self.file_names[i])
target_type = self.file_names[i].split(".")[-1]
if 'Content-Type' in response.headers:
content_type = response.headers['Content-Type']
if target_type in ["jpg", "JPG", "JPEG", "jpeg", "png", "PNG", "gif", "GIF"]:
target_type = "image"
if content_type.find(target_type) < 0:
result[self.file_names[i]] = (False,
"The file type is {0}, but {1} is expected. {2}".format(
content_type,
target_type,
response.text),
self.req_list[i].url)
continue
if response.status_code != 200:
result[self.file_names[i]] = (
False, "{0}({1})".format(response.text, response.status_code), self.req_list[i].url)
continue
# 当信息流小于100字节,则不是文件
if len(response.text) <= 100:
result[self.file_names[i]] = (
False, "{0}({1})".format(response.text, response.status_code), self.req_list[i].url)
continue
with open(save_file, "wb") as f:
f.write(response.content)
result[self.file_names[i]] = (True, save_file, self.req_list[i].url)
except Exception as ex:
result[self.file_names[i]] = (
False, "{0}({1})".format(response.text, response.status_code), self.req_list[i].url)
continue
end = time.time()
self.consume_time = end - start
return result
if __name__ == "__main__":
url = ""
downloader = DownloadBatch(save_dir="/home/ubuntu/Downloads")
downloader.add(name="1.jpg", url=url, timeout=6)
downloader.add(name="2.jpg", url=url, timeout=0.005)
results = downloader.run()
for result in results:
success, save_file, url = results.get(result)
print(result, success, save_file, url)
print("cost time: {0}s | 共{1}项".format(downloader.consume_time, downloader.task_num)) | zhousf-lib | /zhousf-lib-0.9.9.tar.gz/zhousf-lib-0.9.9/zhousflib/download/download_util.py | download_util.py |
import os
import csv
import pandas as pd
from pathlib import Path
import mars.dataframe as md
from zhousflib.pandas import openpyxl_util
def read_csv(file_path: str, header="infer", title=None, encoding=None, nrows=None, dtype=None, sep="\t"):
"""
取值:
csv_data['column_name']
csv_data['column_name'].values
columns = df_data.columns.values.tolist()
data = df_data.values.tolist()
:param file_path:
:param header: 当None则不返回列名
:param title:
:param encoding: gbk/utf-8
:param nrows: 读取的行数
:param dtype: 指定数据类型
:param sep: 分隔标志
:return:
"""
if not os.path.exists(file_path):
raise Exception("file not exists: {}".format(file_path))
if header:
return pd.read_csv(file_path, header=header, usecols=title, encoding=encoding, nrows=nrows, dtype=dtype, sep=sep)
else:
return pd.read_csv(file_path, usecols=title, encoding=encoding, nrows=nrows, dtype=dtype, sep=sep)
def read_csv_mars(csv_file):
"""
大文件读取,采用mars
Mars 是基于张量的,用于进行大规模数据计算的统一计算框架
:param csv_file:
:return:
"""
if not os.path.exists(csv_file):
raise Exception("file not exists: {}".format(csv_file))
return md.read_csv(csv_file, low_memory=False).execute().fetch()
def write_csv(file_path: Path, data, columns=None, seq=None):
"""
按列写入csv
:param file_path: '/home/data.csv'
:param data:
当 columns=None时
data = {"周一": ['语文', '英语', '物理', '数学', '化学'],
"周二": ['音乐', '英语', '数学', '地理', '语文']}
当 columns=["周一", "周二"]
data = [['语文', '英语', '物理', '数学', '化学'], ['音乐', '英语', '数学', '地理', '语文']]
:param columns:
:param seq:
:return:
"""
data_frame = pd.DataFrame(data)
header = True if columns else False
data_frame.to_csv(file_path, header=header, columns=columns, index=False, sep=seq)
def read_excel(file_path, sheet_name=None, header=None):
"""
读取excel文件
:param file_path:
:param sheet_name: None第一张表
:param header:
:return:
"""
if sheet_name is None:
exc = pd.ExcelFile(file_path)
sheets = exc.sheet_names
if len(sheets) > 0:
sheet_name = sheets[0]
if header:
data_ = pd.read_excel(file_path, sheet_name=sheet_name, dtype=object, header=header)
else:
data_ = pd.read_excel(file_path, sheet_name=sheet_name, dtype=object)
# 若nan则替换成空字符串
data_ = data_.fillna("")
return data_
def read_excel_merge_cell(file_path: Path, sheet_name=None, delete_duplicates_rate: float = 1.0,
tmp_excel: Path = None, header=None):
"""
读取excel文件,并处理合并单元格
:param file_path: excel文件
:param sheet_name: None第一张表
:param delete_duplicates_rate: 对拆分合并单元格的结果去重的比例,默认为1.0(全相同时去重),0则不去重
:param tmp_excel: 临时文件,若为空则会更新源文件,合并单元格选项
:param header:
:return:
"""
excel_file = openpyxl_util.unmerge_and_fill_cells(excel_file=file_path, target_sheet_name=sheet_name,
delete_duplicates_rate=delete_duplicates_rate, tmp_excel=tmp_excel)
return read_excel(str(excel_file), sheet_name=sheet_name, header=header)
def write_excel(data, columns=None, save_file: Path = Path('output.xlsx'), sheet='Sheet1'):
"""
写入excel表格
:param data: [[1, 1], [2, 2]]
:param columns: ['col1', 'col2']
:param save_file:
:param sheet:
:return:
"""
writer = pd.ExcelWriter(save_file)
df1 = pd.DataFrame(data=data, columns=columns)
df1.to_excel(writer, sheet, index=False)
writer.close()
def print_shape(file_path):
"""
打印数据行列数量
:param file_path:
:return: (1377615, 330)
"""
if file_path.endswith("csv"):
data = pd.read_csv(file_path, sep='\t')
else:
data = pd.read_excel(file_path)
print(data.shape)
def fetch_row_csv(csv_file, save_csv, rows, reverse=False):
"""
读取数据并保存到save_csv中
:param csv_file:
:param save_csv:
:param rows:
:param reverse: 是否倒序
:return:
"""
reader = pd.read_csv(csv_file, sep='\t', iterator=True)
try:
df = reader.get_chunk(rows)
if reverse:
df = df.iloc[::-1]
df.to_csv(save_csv, index=False)
except StopIteration:
print("Iteration is stopped.")
# reader = pd.read_csv(csv_file, error_bad_lines=False, nrows=100)
# save_data = reader.iloc[1: 90]
# save_data.to_csv(save_file, index=False)
def write_row_csv(csv_path, data):
"""
按行写入csv文件
:param csv_path: csv文件
:param data: 二维数组 [[1, 2], [1, 2, 3]]
:return:
"""
with open(csv_path, "w") as file:
writer = csv.writer(file)
writer.writerows(data)
if __name__ == "__main__":
# read_excel_merge_cell(file_path=Path(r"C:\Users\zhousf-a\Desktop\4_ocr_data.xlsx"),
# tmp_excel=Path(r"C:\Users\zhousf-a\Desktop\4_ocr_data-tmp.xlsx"),
# delete_duplicates_rate=0.85)
pass | zhousf-lib | /zhousf-lib-0.9.9.tar.gz/zhousf-lib-0.9.9/zhousflib/pandas/pandas_util.py | pandas_util.py |
import openpyxl
from pathlib import Path
from zhousflib.util import re_util
def unmerge_and_fill_cells(excel_file: Path, delete_duplicates_rate: float = 1.0, tmp_excel: Path = None,
target_sheet_name=None) -> Path:
"""
拆分合并单元格并填充有效值
:param excel_file:
:param delete_duplicates_rate: 对拆分合并单元格的结果去重的比例,默认为1.0(全相同时去重),0则不去重
:param tmp_excel: 临时文件,若为空则会更新源文件
:param target_sheet_name: None第一张表
:return:
"""
wb = openpyxl.load_workbook(str(excel_file))
contain_merge_cells = False
for sheet_name in wb.sheetnames:
if target_sheet_name:
if target_sheet_name != sheet_name:
continue
worksheet = wb[sheet_name]
all_merged_cell_ranges = list(worksheet.merged_cells.ranges)
rows_deal = {}
"""
拆分合并单元格
"""
for merged_cell_range in all_merged_cell_ranges:
merged_cell = merged_cell_range.start_cell
worksheet.unmerge_cells(range_string=merged_cell_range.coord)
start, end = merged_cell_range.coord.split(":")
start = int(re_util.get_digit_char(start))
end = int(re_util.get_digit_char(end))
if (start, end) not in rows_deal:
rows_deal[(start, end)] = 1
else:
rows_deal[(start, end)] += 1
for row_index, col_index in merged_cell_range.cells:
cell = worksheet.cell(row=row_index, column=col_index)
cell.value = merged_cell.value
"""
找到符合拆分合并单元格条件的单元格rows
"""
need_fill = []
for i in rows_deal:
need_fill.append(i)
contain_merge_cells = True
if len(need_fill) > 0:
need_fill.sort(key=lambda x: x[0], reverse=False)
"""
拆分合并单元格后,对空单元格赋予有效值,仅对两个的合并单元格
"""
for cells in worksheet.iter_rows():
for cell in cells:
row = cell.row
column = cell.column
for fill in need_fill:
count = rows_deal.get(fill)
if not count:
continue
if row == fill[0] and abs(fill[1] - fill[0]) == 1:
next_cell = worksheet.cell(row=fill[1], column=column)
if not cell.value and next_cell.value:
cell.value = next_cell.value
rows_deal.pop(fill)
"""
拆分合并单元格后会有重复的两条,这里去重一下
"""
if delete_duplicates_rate > 0:
# 偏移量,记录删除row的个数
offset = 0
for fill in need_fill:
for i in range(fill[0], fill[1]+1):
current_data = []
next_data = []
for row_cells in worksheet.iter_rows(min_row=i-offset, max_row=i-offset):
current_data = [cell.value for cell in row_cells]
if i < fill[1]:
for row_cells in worksheet.iter_rows(min_row=i+1-offset, max_row=i+1-offset):
next_data = [cell.value for cell in row_cells]
if len(next_data) > 0 and len(current_data) > 0:
# 若下一行完全包含上一行或占比满足条件,则删除上一行
can_overwrite = True
same_cell_count = 0
available_cell_count = 0
for cell in current_data:
if not cell:
continue
available_cell_count += 1
if cell not in next_data:
can_overwrite = False
else:
same_cell_count += 1
if can_overwrite or same_cell_count >= delete_duplicates_rate * available_cell_count:
worksheet.delete_rows(idx=i-offset)
offset += 1
if tmp_excel:
wb.save(str(tmp_excel))
wb.close()
return tmp_excel
else:
if contain_merge_cells:
wb.save(str(excel_file))
wb.close()
return excel_file | zhousf-lib | /zhousf-lib-0.9.9.tar.gz/zhousf-lib-0.9.9/zhousflib/pandas/openpyxl_util.py | openpyxl_util.py |
import shutil
import json
from pathlib import Path
import numpy as np
from prettytable import PrettyTable
def train_test_split(data_dirs: list, dst_dir: Path, val_size=0.2, test_size=0.2, shuffle=True):
"""
训练集、验证集、测试集划分
:param data_dirs: 数据集目录
:param dst_dir: 生成训练集、测试集的目录
:param val_size: 验证集占比
:param test_size: 测试集占比
:param shuffle: 打乱数据集顺序
:return:
"""
def split_data(dir_save: Path, dataset: list, annotations: dict):
image_dir = dir_save.joinpath("images")
if not image_dir.exists():
image_dir.mkdir(parents=True)
annotations_save = []
for img_d in dataset:
if img_d["id"] not in image_files:
continue
shutil.copy(image_files.get(img_d["id"]), image_dir)
ann = annotations.get(img_d["id"])
if ann:
annotations_save.extend(ann)
json_file = dir_save.joinpath("result.json")
data_json = {
"images": dataset,
"categories": categories,
"annotations": annotations_save,
"info": info,
}
with json_file.open("w", encoding="utf-8") as f:
json.dump(data_json, f, ensure_ascii=False, indent=4)
images = []
categories = []
annotations_dict = {}
info = {}
image_files = {}
img_id = 0
for data_dir in data_dirs:
if not data_dir.is_dir():
continue
for label_json in data_dir.rglob("*.json"):
with label_json.open("r", encoding="utf-8") as f:
result_json = json.loads(f.read())
# 图片
_images = result_json["images"]
# 标签类别
_categories = result_json["categories"]
# 标注
_annotations = result_json["annotations"]
# info
_info = result_json["info"]
tmp_anno = {}
# 遍历标签
for anno in _annotations:
image_id = anno["image_id"]
if image_id not in tmp_anno:
tmp_anno[image_id] = [anno]
else:
tmp_anno[image_id].append(anno)
assert anno["category_id"] is not None
# 遍历图片
for img in _images:
if img["width"] is None or img["height"] is None:
continue
file_name = img["file_name"]
print(file_name)
id_tmp = img["id"]
file_name = str(file_name).rsplit("/")[-1]
img["file_name"] = file_name
img["id"] = img_id
img_file = data_dir.joinpath("images").joinpath(file_name)
if not img_file.exists():
continue
image_files[img_id] = img_file
images.append(img)
if id_tmp in tmp_anno:
ann_ = []
for anno in tmp_anno.get(id_tmp):
anno["image_id"] = img_id
ann_.append(anno)
annotations_dict[img_id] = ann_
img_id += 1
if len(info) == 0:
info = _info
if len(categories) == 0:
categories = _categories
# 打乱顺序
if shuffle:
state = np.random.get_state()
np.random.shuffle(images)
np.random.set_state(state)
# 开始数据集划分
dataset_val = []
dataset_test = []
split_index = 0
if 1 > val_size > 0:
split_index = int(len(images) * val_size)
dataset_val = images[:split_index]
if 1 > test_size > 0:
start = split_index
split_index += int(len(images) * test_size)
dataset_test = images[start:split_index]
dataset_train = images[split_index:]
# 训练集
if len(dataset_train) > 0:
split_data(dir_save=dst_dir.joinpath("train"), dataset=dataset_train, annotations=annotations_dict)
# 验证集
if len(dataset_val) > 0:
split_data(dir_save=dst_dir.joinpath("val"), dataset=dataset_val, annotations=annotations_dict)
# 测试集
if len(dataset_test) > 0:
split_data(dir_save=dst_dir.joinpath("test"), dataset=dataset_test, annotations=annotations_dict)
txt = "train: {0}, val: {1}, test: {2}, total: {3}".format(len(dataset_train), len(dataset_val),
len(dataset_test), len(images))
print(txt)
readme_txt = dst_dir.joinpath("readme.txt")
with readme_txt.open("w") as f:
f.write(txt)
def data_statistics(result_jsons: list):
"""
数据分布统计
:param result_jsons: 数据集目录
:return:
"""
image_total = 0
label_list = {}
for label_json in result_jsons:
with label_json.open("r", encoding="utf-8") as f:
result_json = json.loads(f.read())
# 标签类别
label_list = result_json["categories"]
break
if len(label_list) == 0:
print("标签类别无效")
return
label_names = {k.get("id"): k.get("name") for k in label_list}
statistics_total = {k.get("id"): 0 for k in label_list}
for label_json in result_jsons:
with label_json.open("r", encoding="utf-8") as f:
result_json = json.loads(f.read())
# 图片
image_total += len(result_json["images"])
# 标注
for label in result_json["annotations"]:
label_id = label.get("category_id")
statistics_total[label_id] += 1
statistics_total = {label_names.get(k): statistics_total.get(k) for k in statistics_total}
print(statistics_total)
table = PrettyTable(field_names=["label", "count"])
for key in statistics_total:
table.add_row([key, statistics_total.get(key)])
print(table)
if __name__ == "__main__":
pass | zhousf-lib | /zhousf-lib-0.9.9.tar.gz/zhousf-lib-0.9.9/zhousflib/datasets/coco/coco_dataset_split.py | coco_dataset_split.py |
import shutil
import json
from pathlib import Path
def merge_dataset_coco(dataset_dirs: list, dist_dir: Path, img_index=0):
"""
coco数据集合并
:param dataset_dirs: ["batch1/dataset_coco/test", "batch2/dataset_coco/test"]
:param dist_dir: union/dataset_coco/test
:param img_index:
:return:
"""
anno_new = []
img_new = []
categories_new = []
info_new = {}
anno_id = 0
for data_dir in dataset_dirs:
annotations = {}
label_json = data_dir.joinpath("result.json")
with label_json.open("r", encoding="utf-8") as f:
result_json = json.loads(f.read())
# 图片
_images = result_json["images"]
# 标签类别
_categories = result_json["categories"]
categories_new = _categories
# 标注
_annotations = result_json["annotations"]
# info
_info = result_json["info"]
info_new = _info
# 遍历标签
for anno in _annotations:
image_id = anno["image_id"]
if image_id not in annotations:
annotations[image_id] = [anno]
else:
annotations[image_id].append(anno)
img_dir = dist_dir.joinpath("images")
if not img_dir.exists():
img_dir.mkdir(parents=True)
# 遍历图片
for img in _images:
file_name = img["file_name"]
file_name = str(file_name).rsplit("/")[-1]
img_file = data_dir.joinpath("images").joinpath(file_name)
anno_list = annotations.get(img["id"])
if not anno_list:
continue
# 修改标注的图片id
for anno in anno_list:
anno["image_id"] = img_index
assert anno["category_id"] is not None
anno["id"] = anno_id
anno_id += 1
img_new.append({"width": img.get("width"), "height": img.get("height"), "id": img_index,
"file_name": "{0}{1}".format(img_index, img_file.suffix)})
anno_new.extend(annotations.get(img["id"]))
copy_file_ = img_dir.joinpath("{0}{1}".format(img_index, img_file.suffix))
shutil.copy(img_file, copy_file_)
img_index += 1
print(img_index, data_dir)
if not dist_dir.exists():
dist_dir.mkdir(parents=True)
result_file = dist_dir.joinpath("result.json")
with open(result_file, 'w') as f:
json.dump({
"images": img_new,
"categories": categories_new,
"annotations": anno_new,
"info": info_new
}, f, ensure_ascii=False, indent=4)
pass
if __name__ == "__main__":
pass | zhousf-lib | /zhousf-lib-0.9.9.tar.gz/zhousf-lib-0.9.9/zhousflib/datasets/coco/coco_dataset_merge.py | coco_dataset_merge.py |
import json
from pathlib import Path
from PIL import Image, ImageDraw
def compute_contain(box1, box2):
"""
计算两个box是否为包含关系
:param box1: (x_min, y_min, x_max, y_max)
:param box2: (x_min, y_min, x_max, y_max)
:return: 返回两个box重叠面积占较小box的面积比,一般大于0.8则为包含关系
box1=(317,280,553,395)
box2=(374,295,485,322)
"""
px_min, py_min, px_max, py_max = box1
gx_min, gy_min, gx_max, gy_max = box2
p_area = (px_max - px_min) * (py_max - py_min) # 计算P的面积
g_area = (gx_max - gx_min) * (gy_max - gy_min) # 计算G的面积
# 求相交矩形的左下和右上顶点坐标(x_min, y_min, x_max, y_max)
_x_min = max(px_min, gx_min) # 得到左下顶点的横坐标
_y_min = max(py_min, gy_min) # 得到左下顶点的纵坐标
_x_max = min(px_max, gx_max) # 得到右上顶点的横坐标
_y_max = min(py_max, gy_max) # 得到右上顶点的纵坐标
# 计算相交矩形的面积
w = _x_max - _x_min
h = _y_max - _y_min
if w <= 0 or h <= 0:
return 0
area = w * h # G∩P的面积
if p_area >= g_area:
return area / g_area
else:
return area / p_area
def coco_convert_bbox(box_coco: list):
_x, _y, _width, _height = tuple(box_coco)
_bbox = (_x, _y, _x + _width, _y + _height)
return _bbox
def bbox_convert_coco(bbox: tuple):
x_min_, y_min_, x_max_, y_max_ = bbox
x = x_min_
y = y_min_
width = x_max_ - x_min_
height = y_max_ - y_min_
return [x, y, width, height]
def box_expand(box, offset=50):
x_min, y_min, x_max, y_max = box
offset = min(x_min, y_min) if min(x_min, y_min) < offset else offset
_x_min = x_min - offset
_x_max = x_max + offset
_y_min = y_min - offset
_y_max = y_max + offset
return _x_min, _y_min, _x_max, _y_max
def generate_image_by_label(data_dirs: list, dist_dir: Path, label: str, contain_child: bool = False):
"""
根据标签生成标注数据,进行图片裁剪
:param data_dirs:
:param dist_dir:
:param label: 提取标签名称
:param contain_child: 提取时包括区域内的所有标签
:return:
"""
dist_dir = dist_dir.joinpath("images")
if not dist_dir.exists():
dist_dir.mkdir(parents=True)
anno_new = []
img_new = []
categories_new = []
info_new = {}
img_index = 0
anno_id = 0
for data_dir in data_dirs:
if not data_dir.is_dir():
continue
for label_json in data_dir.rglob("*.json"):
images = {}
annotations = {}
with label_json.open("r", encoding="utf-8") as f:
result_json = json.loads(f.read())
# 图片
_images = result_json["images"]
# 标签类别
_categories = result_json["categories"]
if not contain_child:
for cate in _categories:
if cate.get("name") == label:
categories_new.append(cate)
else:
categories_new = _categories
if len(categories_new) == 0:
print("label not exist: {0}".format(label))
return
label_dict = {item.get("id"): item.get("name") for item in _categories}
# 标注
_annotations = result_json["annotations"]
# info
_info = result_json["info"]
info_new = _info
# 遍历图片
for img in _images:
file_name = img["file_name"]
file_name = str(file_name).rsplit("/")[-1]
images[img["id"]] = data_dir.joinpath("images").joinpath(file_name)
# 遍历标签
for anno in _annotations:
image_id = anno["image_id"]
anno["id"] = anno_id
anno_id += 1
if image_id not in annotations:
annotations[image_id] = [anno]
else:
annotations[image_id].append(anno)
assert anno["category_id"] is not None
for img_id in annotations:
anno_list = annotations.get(img_id)
steel_graph_anno_list = []
others_anno = []
for anno in anno_list:
category_id = anno["category_id"]
category_name = label_dict.get(category_id)
if category_name == label:
steel_graph_anno_list.append(anno)
else:
others_anno.append(anno)
img = Image.open(images.get(img_id))
if img.mode != "RGB":
img = img.convert('RGB')
if len(steel_graph_anno_list) > 0 and len(others_anno) > 0:
for steel_graph_anno in steel_graph_anno_list:
contain_anno = []
bbox = coco_convert_bbox(steel_graph_anno["bbox"])
for i in range(len(others_anno) - 1, -1, -1):
_anno = others_anno[i].copy()
_bbox = coco_convert_bbox(_anno["bbox"])
iou = compute_contain(bbox, _bbox)
if iou > 0.5:
contain_anno.append(_anno)
if not steel_graph_anno:
continue
bbox = coco_convert_bbox(steel_graph_anno["bbox"])
bbox_big = box_expand(box=bbox, offset=10)
# 裁剪图片
cropped = img.crop(bbox_big)
x_min = bbox[0] - bbox_big[0]
y_min = bbox[1] - bbox_big[1]
x_max = bbox[2] - bbox_big[0]
y_max = bbox[3] - bbox_big[1]
steel_graph_anno["bbox"] = bbox_convert_coco((x_min, y_min, x_max, y_max))
steel_graph_anno["image_id"] = img_index
anno_new.append(steel_graph_anno)
# draw = ImageDraw.ImageDraw(cropped)
# draw.rectangle(xy=(x_min, y_min, x_max, y_max), fill=None, outline="red", width=1)
for anno in contain_anno:
_bbox = coco_convert_bbox(anno["bbox"])
# 坐标偏移计算
x_min_ = _bbox[0] - bbox_big[0]
y_min_ = _bbox[1] - bbox_big[1]
x_max_ = _bbox[2] - bbox_big[0]
y_max_ = _bbox[3] - bbox_big[1]
anno["bbox"] = bbox_convert_coco((x_min_, y_min_, x_max_, y_max_))
anno["image_id"] = img_index
# draw.rectangle(xy=(x_min_, y_min_, x_max_, y_max_), fill=None, outline="red", width=1)
# cropped.show()
if contain_child:
anno_new.extend(contain_anno)
save_img_file = dist_dir.joinpath("{0}_{1}{2}".format(images.get(img_id).stem,img_index, images.get(img_id).suffix))
cropped.save(save_img_file)
img_new.append({"width": cropped.width, "height": cropped.height, "id": img_index,
"file_name": save_img_file.name})
img_index += 1
print(img_index)
result_file = dist_dir.parent.joinpath("result.json")
with open(result_file, 'w', encoding="utf-8") as f:
json.dump({
"images": img_new,
"categories": categories_new,
"annotations": anno_new,
"info": info_new
}, f, ensure_ascii=False, indent=4)
if __name__ == "__main__":
pass | zhousf-lib | /zhousf-lib-0.9.9.tar.gz/zhousf-lib-0.9.9/zhousflib/datasets/coco/coco_bbox_extract.py | coco_bbox_extract.py |
import json
import random
import colorsys
from pathlib import Path
from PIL import Image, ImageDraw, ImageFont
from zhousflib.font import Font_SimSun
FONT = ImageFont.truetype(font=str(Font_SimSun), size=15)
def coco_convert_bbox(box_coco: list):
_x, _y, _width, _height = tuple(box_coco)
_bbox = (_x, _y, _x + _width, _y + _height)
return _bbox
def vis_box_coco(coco_dir: Path, dst_dir: Path):
"""
可视化coco bbox
:param coco_dir:
:param dst_dir:
:return:
"""
if not dst_dir.exists():
dst_dir.mkdir()
label_json = coco_dir.joinpath("result.json")
with label_json.open("r", encoding="utf-8") as f:
result_json = json.loads(f.read())
# 图片
_images = result_json["images"]
# 标签类别
_categories = result_json["categories"]
classes_dict = {}
for item in _categories:
classes_dict[item.get("id")] = item.get("name")
# 标注
_annotations = result_json["annotations"]
# info
_info = result_json["info"]
# 遍历标签
annotations = {}
for anno in _annotations:
image_id = anno["image_id"]
if image_id not in annotations:
annotations[image_id] = [anno]
else:
annotations[image_id].append(anno)
assert anno["category_id"] is not None
img_dir = coco_dir.joinpath("images")
for img in _images:
img_file = img_dir.joinpath(img["file_name"])
bboxes = []
print(img["id"])
ann_list = annotations.get(img["id"])
if not ann_list:
continue
for anno in ann_list:
box = anno.get("bbox")
class_id = anno.get("category_id")
bbox_coco = coco_convert_bbox(box)
# class_id, score, x_min, y_min, x_max, y_max, class_id
bboxes.append([class_id, "-", bbox_coco[0], bbox_coco[1], bbox_coco[2], bbox_coco[3]])
image = draw_bbox_label(img_file=img_file, bboxes=bboxes, classes_dict=classes_dict, show=False)
image.save(dst_dir.joinpath(img_file.name))
# break
def draw_bbox_label(img_file: Path, bboxes: list, classes_dict, show=False):
"""
绘制bbox,适用于标注数据,不支持预测可视化
:param img_file:
:param bboxes: [[class_id, score, x_min, y_min, x_max, y_max]]
:param classes_dict: {id1:name1, id2:name2} or [name1, name2]
:param show:
:return:
"""
classes_index = [cls for cls in classes_dict]
hsv_tuples = [(1.0 * x / len(classes_index), 1., 1.) for x in range(len(classes_index))]
colors = list(map(lambda x: colorsys.hsv_to_rgb(*x), hsv_tuples))
colors = list(map(lambda x: (int(x[0] * 255), int(x[1] * 255), int(x[2] * 255)), colors))
random.seed(0)
random.shuffle(colors)
image = Image.open(img_file)
print(img_file)
if image.mode != "RGB":
image = image.convert('RGB')
image_w, image_h = image.size
draw = ImageDraw.ImageDraw(image)
for box in bboxes:
offset = 0
class_id = box[-6]
score = box[-5]
x_min = int(box[-4])
y_min = int(box[-3])
x_max = int(box[-2])
y_max = int(box[-1])
if str(class_id).isdigit():
class_name = classes_dict[class_id]
else:
class_name = str(class_id)
if score != "-":
class_name += ":{0:.3f}".format(score)
else:
class_name += ":{0}".format(score)
bbox_color = colors[classes_index.index(class_id)]
width = abs(x_max - x_min)
height = abs(y_max - y_min)
draw.rectangle(xy=(x_min, y_min, x_min + width + offset, y_min + height + offset),
fill=None, outline=bbox_color, width=1)
fw, fh = FONT.getsize(class_name)
if y_min < fh:
y_min = y_min + fh
if (x_min + fw) > image_w:
x_min = x_max - fw
# draw.rectangle([x_min, y_min, x_min + fw, y_min], fill=(128, 128, 128, 128))
draw.text(xy=(x_min, y_min - fh), text=class_name, fill=bbox_color, font=FONT)
if show:
image.show()
return image
def draw_bbox_predict(img_file, bboxes: list, classes_dict, score_threshold=0.5, show=False):
"""
绘制bbox,适用于预测可视化
:param img_file:
:param bboxes: [[class_id, score, x_min, y_min, x_max, y_max]]
:param classes_dict: {id1:name1, id2:name2} or [name1, name2]
:param score_threshold:
:param show:
:return:
"""
classes_index = [i for i, cls in enumerate(classes_dict)]
hsv_tuples = [(1.0 * x / len(classes_index), 1., 1.) for x in range(len(classes_index))]
colors = list(map(lambda x: colorsys.hsv_to_rgb(*x), hsv_tuples))
colors = list(map(lambda x: (int(x[0] * 255), int(x[1] * 255), int(x[2] * 255)), colors))
random.seed(0)
random.shuffle(colors)
if isinstance(img_file, Path):
image = Image.open(img_file)
else:
image = Image.fromarray(img_file)
if image.mode != "RGB":
image = image.convert('RGB')
image_h, image_w = image.size
draw = ImageDraw.ImageDraw(image)
for box in bboxes:
offset = 0
class_id = box[-6]
score = box[-5]
if score < score_threshold:
continue
x_min = int(box[-4])
y_min = int(box[-3])
x_max = int(box[-2])
y_max = int(box[-1])
if str(class_id).isdigit():
class_name = classes_dict[class_id]
else:
class_name = class_id
if class_id in classes_dict and isinstance(classes_dict, list):
class_id = classes_dict.index(class_id)
class_name += ":{0:.3f}".format(score)
bbox_color = colors[classes_index.index(class_id)]
width = abs(x_max - x_min)
height = abs(y_max - y_min)
draw.rectangle(xy=(x_min, y_min, x_min + width + offset, y_min + height + offset),
fill=None, outline=bbox_color, width=1)
fw, fh = FONT.getsize(class_name)
if y_min < fh:
y_min = y_min + fh
if (x_min + fw) > image_w:
x_min = x_max - fw
# draw.rectangle([x_min, y_min, x_min + fw, y_min], fill=(128, 128, 128, 128))
draw.text(xy=(x_min, y_min - fh), text=class_name, fill=bbox_color, font=FONT)
if show:
image.show()
return image
if __name__ == "__main__":
pass | zhousf-lib | /zhousf-lib-0.9.9.tar.gz/zhousf-lib-0.9.9/zhousflib/datasets/coco/coco_bbox_vis.py | coco_bbox_vis.py |
import json
from pathlib import Path
def merge_label(coco_dir: Path, labels: dict):
"""
合并标签
:param coco_dir: COCO目录
:param labels: {"等级": ['等级_1', '等级_2', '等级_2E']}
:return:
"""
need_modify_labels = {}
for k in labels:
items = labels.get(k)
for item in items:
need_modify_labels[item] = k
result_json_file = coco_dir.joinpath("result.json")
with result_json_file.open("r", encoding="utf-8") as f:
result_json = json.loads(f.read())
# 图片
images = result_json["images"]
# 标签类别
categories = result_json["categories"]
# 标注
annotations = result_json["annotations"]
# info
info = result_json["info"]
# 遍历图片
for img in images:
if img["width"] is None or img["height"] is None:
continue
file_name = img["file_name"]
file_name = str(file_name).rsplit("/")[-1]
img["file_name"] = file_name
label_dict = {}
for item in categories:
label_name = item.get("name")
label_id = item.get("id")
if label_name in need_modify_labels:
label_name = need_modify_labels.get(label_name)
label_dict[label_id] = label_name
index_mapping = {}
label_names = {}
for index in label_dict.keys():
name = label_dict.get(index)
if name not in label_names:
index_mapping[index] = index
label_names[name] = index
else:
index_mapping[index] = label_names.get(name)
print(index_mapping)
print(label_dict)
# 构建新的标签列表
categories_new = {}
for index in index_mapping:
if index_mapping.get(index) not in categories_new:
categories_new[index_mapping.get(index)] = label_dict.get(index)
print(categories_new)
categories_list = []
for category in categories_new:
categories_list.append({"id": category, "name": categories_new.get(category)})
for ann in annotations:
# 更新标签id
print(ann.get("category_id"))
ann["category_id"] = index_mapping.get(ann.get("category_id"))
assert ann["category_id"] is not None
json_file = coco_dir.joinpath("result.json")
data_json = {
"images": images,
"categories": categories_list,
"annotations": annotations,
"info": info,
}
with json_file.open("w", encoding="utf-8") as f:
json.dump(data_json, f, ensure_ascii=False, indent=4)
def delete_label(coco_dir: Path, labels: list):
"""
删除标签
:param coco_dir: COCO目录
:param labels: ['等级_1', '等级_2', '等级_2E']
:return:
"""
result_json_file = coco_dir.joinpath("result.json")
with result_json_file.open("r", encoding="utf-8") as f:
result_json = json.loads(f.read())
# 图片
images = result_json["images"]
# 标签类别
categories = result_json["categories"]
# 标注
annotations = result_json["annotations"]
# info
info = result_json["info"]
# 遍历图片
for i in range(len(images) - 1, -1, -1):
img = images[i]
if img["width"] is None or img["height"] is None:
images.pop(i)
continue
file_name = img["file_name"]
file_name = str(file_name).rsplit("/")[-1]
img["file_name"] = file_name
print(file_name)
delete_indexes = [item.get("id") for item in categories if item.get("name") in labels]
for i in range(len(annotations) - 1, -1, -1):
ann = annotations[i]
# 删除标签
if ann.get("category_id") in delete_indexes:
annotations.remove(ann)
assert ann["category_id"] is not None
categories_list = []
for cate in categories:
index = cate.get("id")
if index not in delete_indexes:
categories_list.append(cate)
json_file = coco_dir.joinpath("result.json")
data_json = {
"images": images,
"categories": categories_list,
"annotations": annotations,
"info": info,
}
with json_file.open("w", encoding="utf-8") as f:
json.dump(data_json, f, ensure_ascii=False, indent=4)
def update_coco(coco_dir: Path):
result_json_file = coco_dir.joinpath("result.json")
with result_json_file.open("r", encoding="utf-8") as f:
result_json = json.loads(f.read())
# 图片
images = result_json["images"]
# 标签类别
categories = result_json["categories"]
# 标注
annotations = result_json["annotations"]
# info
info = result_json["info"]
# 遍历图片
for img in images:
if img["width"] is None or img["height"] is None:
continue
file_name = img["file_name"]
file_name = str(file_name).rsplit("/")[-1]
img["file_name"] = file_name
json_file = coco_dir.joinpath("result.json")
data_json = {
"images": images,
"categories": categories,
"annotations": annotations,
"info": info,
}
with json_file.open("w", encoding="utf-8") as f:
json.dump(data_json, f, ensure_ascii=False, indent=4)
if __name__ == "__main__":
pass | zhousf-lib | /zhousf-lib-0.9.9.tar.gz/zhousf-lib-0.9.9/zhousflib/datasets/coco/coco_bbox_update.py | coco_bbox_update.py |
import imghdr
import os.path
import warnings
import numpy as np
from pathlib import Path
def train_test_split(dataset_root: Path, val_size=0.2, test_size=0, separator=" "):
"""
数据集划分
:param dataset_root:
:param val_size:
:param test_size:
:param separator: 分割符
:return:
"""
split = [1 - val_size - test_size, val_size, test_size]
image_dir = dataset_root.joinpath("images")
label_dir = dataset_root.joinpath("labels")
image_files = []
for f in image_dir.glob("*.*"):
if not imghdr.what(f):
continue
image_files.append(str(f))
label_files = []
for f in label_dir.glob("*.png"):
label_files.append(str(f))
if not image_files:
warnings.warn("No files in {}".format(image_dir))
if not label_files:
warnings.warn("No files in {}".format(label_dir))
num_images = len(image_files)
num_label = len(label_files)
if num_images != num_label:
raise Exception(
"Number of images = {}, number of labels = {}."
"The number of images is not equal to number of labels, "
"Please check your dataset!".format(num_images, num_label))
image_files = np.array(image_files)
label_files = np.array(label_files)
state = np.random.get_state()
np.random.shuffle(image_files)
np.random.set_state(state)
np.random.shuffle(label_files)
start = 0
num_split = len(split)
dataset_name = ['train', 'val', 'test']
for i in range(num_split):
if split[i] == 0:
continue
dataset_split = dataset_name[i]
print("Creating {}.txt...".format(dataset_split))
if split[i] > 1.0 or split[i] < 0:
raise ValueError("{} dataset percentage should be 0~1.".format(
dataset_split))
file_list = os.path.join(str(dataset_root), dataset_split + '_list.txt')
with open(file_list, "w") as f:
num = round(split[i] * num_images)
end = start + num
if i == num_split - 1:
end = num_images
for item in range(start, end):
left = image_files[item].replace(str(dataset_root), '')
left = left.replace("\\", '/')[1:]
if left[0] == os.path.sep:
left = left.lstrip(os.path.sep)
try:
right = label_files[item].replace(str(dataset_root), '')
right = right.replace("\\", '/')[1:]
if right[0] == os.path.sep:
right = right.lstrip(os.path.sep)
line = left + separator + right + '\n'
except:
line = left + '\n'
f.write(line)
print(line)
start = end
if __name__ == '__main__':
pass | zhousf-lib | /zhousf-lib-0.9.9.tar.gz/zhousf-lib-0.9.9/zhousflib/datasets/segmentation/seg_dataset_split.py | seg_dataset_split.py |
import json
import numpy as np
from pathlib import Path
from PIL import Image, ImageDraw, ImageFont
from zhousflib.util.img_util import get_file_base64
from zhousflib.font import Font_SimSun
FONT = ImageFont.truetype(font=str(Font_SimSun), size=15)
def clip_img(labelme_dir: Path, dst_dir: Path, clip_labels: list, show=False):
"""
裁剪
:param labelme_dir:
:param dst_dir:
:param clip_labels: 裁剪标签
:param show:
:return:
"""
if not dst_dir.exists():
dst_dir.mkdir()
for json_file in labelme_dir.glob("*.json"):
print(json_file)
with json_file.open("r", encoding="utf-8") as f:
data = json.load(f)
imagePath = data["imagePath"]
image_file = labelme_dir.joinpath(imagePath)
image = Image.open(image_file)
if image.mode != "RGB":
image = image.convert('RGB')
image_w, image_h = image.size
draw = ImageDraw.ImageDraw(image)
for i in range(0, len(data["shapes"])):
shape = data["shapes"][i]
label = shape.get("label", None)
points = shape.get("points", None)
if not label or not points:
continue
if label not in clip_labels:
continue
p_arr = np.asarray(points)
scale_up_pixel = 5
x_min = np.min(p_arr[:, 0]) - scale_up_pixel
x_max = np.max(p_arr[:, 0]) + scale_up_pixel
y_min = np.min(p_arr[:, 1]) - scale_up_pixel
y_max = np.max(p_arr[:, 1]) + scale_up_pixel
y_min = y_min if y_min > 0 else 1
x_min = x_min if x_min > 0 else 1
x_max = x_max if x_max < image_w else image_w - 1
y_max = y_max if y_max < image_h else image_h - 1
# 裁剪的坐标原点换算
origin_point = (x_min, y_min)
p_arr_1 = p_arr[:, 0] - x_min
p_arr_2 = p_arr[:, 1] - y_min
p_arr_clip = np.stack([p_arr_1, p_arr_2], 1).tolist()
# 裁剪
cropped = image.crop((x_min, y_min, x_max, y_max))
save_img_file = dst_dir.joinpath("{0}_{1}{2}".format(image_file.stem, i, image_file.suffix))
cropped.save(save_img_file, quality=100)
image_w_clip, image_h_clip = cropped.size
# 写json文件
save_file = dst_dir.joinpath("{0}_{1}.json".format(image_file.stem, i))
data_clip = {"imagePath": str(save_img_file.name),
"imageData": get_file_base64(save_img_file),
"imageHeight": image_h_clip,
"imageWidth": image_w_clip}
shape_clip = shape.copy()
shape_clip["points"] = p_arr_clip
data_clip["shapes"] = [shape_clip]
data_clip["version"] = data["version"]
data_clip["flags"] = data["flags"]
with save_file.open("w", encoding="utf-8") as f:
json.dump(data_clip, f, ensure_ascii=False, indent=4)
if show:
width = abs(x_max - x_min)
height = abs(y_max - y_min)
draw.rectangle(xy=(x_min, y_min, x_min + width, y_min + height), fill=None, outline="red", width=1)
fw, fh = FONT.getsize(label)
if y_min < fh:
y_min = y_min + fh
if (x_min + fw) > image_w:
x_min = x_max - fw
draw.text(xy=(x_min, y_min - fh), text=label, fill="red", font=FONT)
if show:
image.show()
break
if __name__ == "__main__":
pass | zhousf-lib | /zhousf-lib-0.9.9.tar.gz/zhousf-lib-0.9.9/zhousflib/datasets/labelme/labelme_clip.py | labelme_clip.py |
from __future__ import print_function
import json
import os
import io
import os.path as osp
import numpy as np
import PIL.Image
import PIL.ImageDraw
from PIL import Image
import matplotlib.pyplot as plt
import glob
import traceback
from pathlib import Path
def label_colormap(N=256):
def bitget(byteval, idx):
return ((byteval & (1 << idx)) != 0)
cmap = np.zeros((N, 3))
for i in range(0, N):
id = i
r, g, b = 0, 0, 0
for j in range(0, 8):
r = np.bitwise_or(r, (bitget(id, 0) << 7 - j))
g = np.bitwise_or(g, (bitget(id, 1) << 7 - j))
b = np.bitwise_or(b, (bitget(id, 2) << 7 - j))
id = (id >> 3)
cmap[i, 0] = r
cmap[i, 1] = g
cmap[i, 2] = b
cmap = cmap.astype(np.float32) / 255
return cmap
def polygons_to_mask(img_shape, polygons):
mask = np.zeros(img_shape[:2], dtype=np.uint8)
mask = PIL.Image.fromarray(mask)
xy = list(map(tuple, polygons))
PIL.ImageDraw.Draw(mask).polygon(xy=xy, outline=1, fill=1)
mask = np.array(mask, dtype=bool)
return mask
# similar function as skimage.color.label2rgb
def label2rgb(lbl, img=None, n_labels=None, alpha=0.5, thresh_suppress=0):
if n_labels is None:
n_labels = len(np.unique(lbl))
cmap = label_colormap(n_labels)
cmap = (cmap * 255).astype(np.uint8)
lbl_viz = cmap[lbl]
lbl_viz[lbl == -1] = (0, 0, 0) # unlabeled
if img is not None:
img_gray = PIL.Image.fromarray(img).convert('LA')
img_gray = np.asarray(img_gray.convert('RGB'))
# img_gray = cv2.cvtColor(img, cv2.COLOR_RGB2GRAY)
# img_gray = cv2.cvtColor(img_gray, cv2.COLOR_GRAY2RGB)
lbl_viz = alpha * lbl_viz + (1 - alpha) * img_gray
lbl_viz = lbl_viz.astype(np.uint8)
return lbl_viz
def draw_label(label, img=None, label_names=None, colormap=None):
backend_org = plt.rcParams['backend']
plt.switch_backend('agg')
plt.rcParams['font.family'] = ['SimHei']
plt.subplots_adjust(left=0, right=1, top=1, bottom=0,
wspace=0, hspace=0)
plt.margins(0, 0)
plt.gca().xaxis.set_major_locator(plt.NullLocator())
plt.gca().yaxis.set_major_locator(plt.NullLocator())
if label_names is None:
label_names = [str(l) for l in range(label.max() + 1)]
if colormap is None:
colormap = label_colormap(len(label_names))
label_viz = label2rgb(label, img, n_labels=len(label_names))
plt.imshow(label_viz)
plt.axis('off')
plt_handlers = []
plt_titles = []
for label_value, label_name in enumerate(label_names):
if label_value not in label:
continue
if label_name.startswith('_'):
continue
fc = colormap[label_value]
p = plt.Rectangle((0, 0), 1, 1, fc=fc)
plt_handlers.append(p)
plt_titles.append('{value}: {name}'
.format(value=label_value, name=label_name))
plt.legend(plt_handlers, plt_titles, loc='lower right', framealpha=.5)
f = io.BytesIO()
plt.savefig(f, bbox_inches='tight', pad_inches=0)
plt.cla()
plt.close()
plt.switch_backend(backend_org)
out_size = (label_viz.shape[1], label_viz.shape[0])
out = PIL.Image.open(f).resize(out_size, PIL.Image.BILINEAR).convert('RGB')
out = np.asarray(out)
return out
def shapes_to_label_sorted(img_shape, shapes, label_name_to_value, type='class'):
assert type in ['class', 'instance']
instance_names = []
ins = None
cls_name = None
ins_id = 0
mask = None
cls = np.zeros(img_shape[:2], dtype=np.int32)
if type == 'instance':
ins = np.zeros(img_shape[:2], dtype=np.int32)
instance_names = ['_background_']
tmp = []
for i in range(0, len(shapes)):
shape = shapes[i]
polygons = shape['points']
label = shape['label']
if type == 'class':
cls_name = label
elif type == 'instance':
cls_name = label.split('-')[0]
if label not in instance_names:
instance_names.append(label)
ins_id = len(instance_names) - 1
if cls_name not in label_name_to_value:
continue
cls_id = label_name_to_value[cls_name]
mask = polygons_to_mask(img_shape[:2], polygons)
num = np.sum(mask.reshape(-1) == True)
tmp.append((num, mask, cls_id))
if type == 'instance':
ins[mask] = ins_id
# 降序排序
sort_tmp = sorted(tmp, key=lambda v: v[0], reverse=True)
for i in range(0, len(sort_tmp)):
info = sort_tmp[i]
cls[info[1]] = info[2]
if type == 'instance':
ins[mask] = ins_id
if type == 'instance':
return cls, ins
return cls
def labelme_convert_seg(labelme_dir: Path, dist_dir: Path, fetch_labels: list = None):
"""
labelme转segmentation
:param labelme_dir:
:param dist_dir:
:param fetch_labels: ["汽车", "_background_"]
:return:
"""
if not dist_dir.exists():
dist_dir.mkdir()
save_img_dir = dist_dir.joinpath("images")
save_img_vis_dir = dist_dir.joinpath("images_vis")
save_labels_dir = dist_dir.joinpath("labels")
if not save_img_dir.exists():
save_img_dir.mkdir()
if not save_labels_dir.exists():
save_labels_dir.mkdir()
if not save_img_vis_dir.exists():
save_img_vis_dir.mkdir()
# get the all class names for the given dataset
class_names = ['_background_']
for label_file in glob.glob(osp.join(labelme_dir, '*.json')):
with open(label_file, encoding="utf-8") as f:
data = json.load(f)
for shape in data['shapes']:
label = shape['label']
cls_name = label
if cls_name not in class_names:
class_names.append(cls_name)
class_name_to_id = {}
class_names_list = []
class_index = 0
for i, class_name in enumerate(class_names):
if fetch_labels:
if class_name not in fetch_labels:
continue
class_id = class_index # starts with 0
class_name_to_id[class_name] = class_id
if class_id == 0:
assert class_name == '_background_'
class_index += 1
class_names_list.append(class_name)
class_names = tuple(class_names_list)
print('class_names:', class_names)
out_class_names_file = osp.join(str(dist_dir), 'labels.txt')
with open(out_class_names_file, 'w') as f:
f.writelines('\n'.join(class_names))
print('Saved class_names:', out_class_names_file)
colormap = label_colormap(255)
for root, dirs, files in os.walk(labelme_dir):
for label_file in files:
if not label_file.endswith('.json'):
continue
print('Generating dataset from:', label_file)
label_file = os.path.join(root, label_file)
base = osp.splitext(osp.basename(label_file))[0]
out_img_file = save_img_dir.joinpath(base + '.jpg')
out_lbl_file = save_labels_dir.joinpath(base + '.png')
out_viz_file = save_img_vis_dir.joinpath(base + '.png')
if out_lbl_file.exists():
continue
try:
with open(label_file, encoding="utf-8") as f:
data = json.load(f)
img_file = osp.join(osp.dirname(label_file), data['imagePath'])
img = PIL.Image.open(img_file)
if img.mode != "RGB":
img = img.convert('RGB')
img = np.asarray(img)
PIL.Image.fromarray(img).save(str(out_img_file))
# 对标注的label面积由大到小排序,防止因标注顺序问题导致大的遮盖了小的
lbl = shapes_to_label_sorted(
img_shape=img.shape,
shapes=data['shapes'],
label_name_to_value=class_name_to_id,
)
lbl_pil = PIL.Image.fromarray(lbl)
# Only works with uint8 label
# lbl_pil = PIL.Image.fromarray(lbl, mode='P')
# lbl_pil.putpalette((colormap * 255).flatten())
lbl_pil.save(str(out_lbl_file))
# 生成验证图片-训练不需要,可以屏蔽
# label_names = ['%d: %s' % (class_name_to_id.get(cls_name), cls_name) for cls_name in class_name_to_id]
label_names = ['%s' % cls_name for cls_name in class_name_to_id]
viz = draw_label(lbl, img, label_names, colormap=colormap)
PIL.Image.fromarray(viz).save(out_viz_file)
except Exception as ex:
print(traceback.print_exc())
print('程序中断:类别 %s 不在类别列表中' % str(ex))
def check_gray_image(gray_label_file: Path):
"""
检测是否是灰度标注图片,在图像中存在3个类别时,输出应该是[0, 1, 2],若上述条件不满足,则说明标注图存在问题,不能直接用于模型训练
:param gray_label_file:
:return:
"""
print(np.unique(np.asarray(Image.open(str(gray_label_file)))))
if __name__ == '__main__':
pass | zhousf-lib | /zhousf-lib-0.9.9.tar.gz/zhousf-lib-0.9.9/zhousflib/datasets/labelme/labelme_convert_seg.py | labelme_convert_seg.py |
import json
import numpy
import shutil
import colorsys
from pathlib import Path
from zhousflib.util.img_util import get_file_base64
def coco_convert_bbox(box_coco: list):
_x, _y, _width, _height = tuple(box_coco)
_bbox = (_x, _y, _x + _width, _y + _height)
return _bbox
def bbox_convert_coco(bbox: tuple):
x_min_, y_min_, x_max_, y_max_ = bbox
x = x_min_
y = y_min_
width = x_max_ - x_min_
height = y_max_ - y_min_
return [x, y, width, height]
def coco_convert_labelme(coco_dir: Path, dist_dir: Path, is_rectangle=False):
"""
coco转labelme,支持rectangle和polygon
:param coco_dir:
:param dist_dir:
:param is_rectangle:
:return:
"""
if not dist_dir.exists():
dist_dir.mkdir()
images_dir = coco_dir.joinpath("images")
coco_result_file = coco_dir.joinpath("result.json")
with coco_result_file.open("r", encoding="utf-8") as f:
result_json = json.loads(f.read())
# 图片
images = result_json["images"]
# 标签类别
categories = result_json["categories"]
# 标注
annotations = result_json["annotations"]
# info
info = result_json["info"]
hsv_tuples = [(1.0 * x / len(categories), 1., 1.) for x in range(len(categories))]
colors = list(map(lambda x: colorsys.hsv_to_rgb(*x), hsv_tuples))
colors = list(map(lambda x: (int(x[0] * 255), int(x[1] * 255), int(x[2] * 255)), colors))
# 遍历类别
category_info_data = {}
category_index = []
for category in categories:
category_id = category["id"]
category_name = category["name"]
if category_id not in category_info_data:
category_info_data[category_id] = category_name
if category_id not in category_index:
category_index.append(category_id)
# 遍历图片
image_info_data = {}
for i in range(len(images) - 1, -1, -1):
img = images[i]
if img["width"] is None or img["height"] is None:
images.pop(i)
continue
file_name = img["file_name"]
file_name = str(file_name).rsplit("/")[-1]
img["file_name"] = file_name
image_id = img["id"]
if image_id not in image_info_data:
image_info_data[image_id] = {"image_id": image_id, "file_name": file_name,
"width": img["width"], "height": img["height"]}
# 遍历标注
image_label_data = {}
for annotation in annotations:
image_id = annotation["image_id"]
if image_id in image_label_data:
image_label_data[image_id].append(annotation)
else:
image_label_data[image_id] = [annotation]
for image_id in image_label_data.keys():
shapes = []
image_name = image_info_data.get(image_id).get("file_name")
for annotation in image_label_data.get(image_id):
category_id = annotation["category_id"]
shape = {}
if category_id not in category_info_data:
continue
points = []
shape["label"] = category_info_data.get(category_id)
segmentation = annotation["segmentation"]
bbox = annotation["bbox"]
if len(segmentation) > 0:
# todo
continue
if len(bbox) > 0:
if is_rectangle:
_x_min, _y_min, _width, _height = tuple(bbox)
_x_max = _x_min + _width
_y_max = _y_min + _height
points.append([_x_min, _y_min])
points.append([_x_max, _y_max])
else:
bbox = coco_convert_bbox(bbox)
x_min = int(bbox[0])
y_min = int(bbox[1])
x_max = int(bbox[2])
y_max = int(bbox[3])
points.append([x_min, y_min])
points.append([x_max, y_min])
points.append([x_max, y_max])
points.append([x_min, y_max])
shape["points"] = points
shape["line_color"] = colors[category_index.index(category_id)]
shape["fill_color"] = ""
shape["shape_type"] = "rectangle" if is_rectangle else "polygon"
shape["group_id"] = ""
shape["description"] = ""
shape["flags"] = {}
shapes.append(shape)
data = {
"flags": {},
"shapes": shapes,
# "lineColor": [0, 255, 0, 128],
# "fillColor": [255, 0, 0, 128],
"imagePath": image_name,
"imageWidth": image_info_data.get(image_id).get("width"),
"imageHeight": image_info_data.get(image_id).get("height"),
"imageData": get_file_base64(images_dir.joinpath(image_name)),
}
shutil.copy(images_dir.joinpath(image_name), dist_dir)
save_file = dist_dir.joinpath("{0}.json".format(image_name.split(".")[0]))
with save_file.open("w", encoding="utf-8") as f:
json.dump(data, f, ensure_ascii=False, indent=4)
print(save_file)
def labelme_convert_coco(labelme_dirs: list, dist_dir: Path):
"""
labelme转coco, shape_type支持rectangle和polygon
:param labelme_dirs:
:param dist_dir:
:return:
"""
images_dir = dist_dir.joinpath("images")
if not images_dir.exists():
images_dir.mkdir(parents=True)
images = []
categories = []
annotations = []
categories_list = []
image_id = 0
ann_id = 0
for labelme_dir in labelme_dirs:
for json_file in labelme_dir.rglob("*.json"):
print(json_file)
with json_file.open("r", encoding="utf-8") as f:
data = json.load(f)
images.append({"width": data["imageWidth"],
"height": data["imageHeight"],
"file_name": data["imagePath"],
"id": image_id})
image_file = json_file.parent.joinpath(data["imagePath"])
if not image_file.exists():
image_file.mkdir()
continue
for shape in data["shapes"]:
label = shape["label"]
if label not in categories_list:
categories_list.append(label)
points = shape["points"]
# shape_type 支持 rectangle 和 polygon
arr = numpy.asarray(points)
x_min = numpy.min(arr[:, 0])
x_max = numpy.max(arr[:, 0])
y_min = numpy.min(arr[:, 1])
y_max = numpy.max(arr[:, 1])
b_width = abs(x_max-x_min)
b_height = abs(y_max-y_min)
annotation = {"id": ann_id, "image_id": image_id, "category_id": categories_list.index(label),
"bbox": [x_min, y_min, b_width, b_height], "segmentation": [], "ignore": 0,
"iscrowd": 0, "area": b_width*b_height}
annotations.append(annotation)
ann_id += 1
shutil.copy(image_file, images_dir)
image_id += 1
for i, name in enumerate(categories_list):
categories.append({"id": i, "name": name})
json_file = dist_dir.joinpath("result.json")
data_json = {
"images": images,
"categories": categories,
"annotations": annotations,
"info": {}
}
with json_file.open("w", encoding="utf-8") as f:
json.dump(data_json, f, ensure_ascii=False, indent=4)
if __name__ == "__main__":
pass | zhousf-lib | /zhousf-lib-0.9.9.tar.gz/zhousf-lib-0.9.9/zhousflib/datasets/labelme/labelme_convert_coco.py | labelme_convert_coco.py |
import json
import numpy as np
from pathlib import Path
from PIL import Image, ImageDraw, ImageFont
from zhousflib.util.img_util import get_file_base64
from zhousflib.font import Font_SimSun
FONT = ImageFont.truetype(font=str(Font_SimSun), size=15)
def clip_img(labelme_dir: Path, dst_dir: Path, clip_labels: list, show=False):
"""
裁剪
:param labelme_dir:
:param dst_dir:
:param clip_labels: 裁剪标签
:param show:
:return:
"""
if not dst_dir.exists():
dst_dir.mkdir()
for json_file in labelme_dir.glob("*.json"):
print(json_file)
with json_file.open("r", encoding="utf-8") as f:
data = json.load(f)
imagePath = data["imagePath"]
image_file = labelme_dir.joinpath(imagePath)
image = Image.open(image_file)
if image.mode != "RGB":
image = image.convert('RGB')
image_w, image_h = image.size
draw = ImageDraw.ImageDraw(image)
for i in range(0, len(data["shapes"])):
shape = data["shapes"][i]
label = shape.get("label", None)
points = shape.get("points", None)
if not label or not points:
continue
if label not in clip_labels:
continue
p_arr = np.asarray(points)
scale_up_pixel = 5
x_min = np.min(p_arr[:, 0]) - scale_up_pixel
x_max = np.max(p_arr[:, 0]) + scale_up_pixel
y_min = np.min(p_arr[:, 1]) - scale_up_pixel
y_max = np.max(p_arr[:, 1]) + scale_up_pixel
y_min = y_min if y_min > 0 else 1
x_min = x_min if x_min > 0 else 1
x_max = x_max if x_max < image_w else image_w - 1
y_max = y_max if y_max < image_h else image_h - 1
# 裁剪的坐标原点换算
origin_point = (x_min, y_min)
p_arr_1 = p_arr[:, 0] - x_min
p_arr_2 = p_arr[:, 1] - y_min
p_arr_clip = np.stack([p_arr_1, p_arr_2], 1).tolist()
# 裁剪
cropped = image.crop((x_min, y_min, x_max, y_max))
save_img_file = dst_dir.joinpath("{0}_{1}{2}".format(image_file.stem, i, image_file.suffix))
cropped.save(save_img_file, quality=100)
image_w_clip, image_h_clip = cropped.size
# 写json文件
save_file = dst_dir.joinpath("{0}_{1}.json".format(image_file.stem, i))
data_clip = {"imagePath": str(save_img_file.name),
"imageData": get_file_base64(save_img_file),
"imageHeight": image_h_clip,
"imageWidth": image_w_clip}
shape_clip = shape.copy()
shape_clip["points"] = p_arr_clip
data_clip["shapes"] = [shape_clip]
data_clip["version"] = data["version"]
data_clip["flags"] = data["flags"]
with save_file.open("w", encoding="utf-8") as f:
json.dump(data_clip, f, ensure_ascii=False, indent=4)
if show:
width = abs(x_max - x_min)
height = abs(y_max - y_min)
draw.rectangle(xy=(x_min, y_min, x_min + width, y_min + height), fill=None, outline="red", width=1)
fw, fh = FONT.getsize(label)
if y_min < fh:
y_min = y_min + fh
if (x_min + fw) > image_w:
x_min = x_max - fw
draw.text(xy=(x_min, y_min - fh), text=label, fill="red", font=FONT)
if show:
image.show()
break
if __name__ == "__main__":
pass | zhousf-lib | /zhousf-lib-0.9.9.tar.gz/zhousf-lib-0.9.9/zhousflib/datasets/labelme/labelme_dataset_clip.py | labelme_dataset_clip.py |
import os
import json
import numpy
import base64
import requests
import numpy as np
from io import BytesIO
from pathlib import Path
from PIL import Image, ImageOps
from zhousflib.util import pil_util
def show_normalize_box(image_file: Path, bbox: list, normalize_size: list, fill_transparent=128):
"""
显示标准化后的box
:param image_file:
:param bbox:
:param normalize_size: 标准化尺寸
:param fill_transparent: 填充色透明度[0, 255],当为-1时则不填充
:return:
boxes = [[557, 102, 693, 367],
[557, 102, 693, 367],
[557, 102, 693, 367],
[557, 102, 693, 367],
[575, 404, 666, 698],
[575, 404, 666, 698],
[575, 404, 666, 698],
[903, 367, 990, 794],
[346, 529, 431, 794],
[346, 529, 431, 794],
[346, 529, 431, 794]]
show_normalize_box(bbox=boxes, image_file=None, normalize_size=[1000, 1000])
"""
if normalize_size is None:
normalize_size = [1000, 1000]
n_box = []
image_size = pil_util.get_w_h(image_file=image_file)
for box in bbox:
n_box.append(_denormalize_box(box=box, image_size=image_size, normalize_size=normalize_size))
pil_util.draw_rectangle(bbox=n_box, image_file=image_file, fill_transparent=fill_transparent)
def _normalize_box(box, image_size, normalize_size, offset_x=0, offset_y=0):
"""
box标准化
:param box: (x_min, y_min, x_max, y_max)
:param image_size: [img_w, img_h] 图片宽高
:param normalize_size: [1000, 1000]
:param offset_x:
:param offset_y:
:return:
"""
if normalize_size is None:
normalize_size = [1000, 1000]
return [
int((box[0] + offset_x) * normalize_size[0] / image_size[0]),
int((box[1] + offset_y) * normalize_size[1] / image_size[1]),
int((box[2] + offset_x) * normalize_size[0] / image_size[0]),
int((box[3] + offset_y) * normalize_size[1] / image_size[1]),
]
def _denormalize_box(box, image_size, normalize_size, offset_x=0, offset_y=0):
"""
box反标准化
:param box: (x_min, y_min, x_max, y_max)
:param image_size: [img_w, img_h] 图片宽高
:param normalize_size: [1000, 1000]
:param offset_x:
:param offset_y:
:return:
"""
return [
int((box[0] - offset_x) * image_size[0] / normalize_size[0]),
int((box[1] - offset_y) * image_size[1] / normalize_size[0]),
int((box[2] - offset_x) * image_size[0] / normalize_size[0]),
int((box[3] - offset_y) * image_size[1] / normalize_size[0])
]
def _np2base64(image_np):
img = Image.fromarray(image_np)
base64_str = _pil2base64(img)
return base64_str
def _pil2base64(image, image_type=None, size=False):
if not image_type:
image_type = "JPEG"
img_buffer = BytesIO()
image.save(img_buffer, format=image_type)
byte_data = img_buffer.getvalue()
base64_str = base64.b64encode(byte_data)
base64_string = base64_str.decode("utf-8")
if size:
return base64_string, image.size
else:
return base64_string
def _get_buffer(data, file_like=False):
buff = None
if len(data) < 1024:
if os.path.exists(data):
buff = open(data, "rb").read()
elif data.startswith("http://") or data.startswith("https://"):
resp = requests.get(data, stream=True)
if not resp.ok:
raise RuntimeError("Failed to download the file from {}".format(data))
buff = resp.raw.read()
else:
raise FileNotFoundError("Image file {} not found!".format(data))
if buff is None:
buff = base64.b64decode(data)
if buff and file_like:
return BytesIO(buff)
return buff
def _read_image(image):
"""
read image to np.ndarray
"""
image_buff = _get_buffer(image)
# Use exif_transpose to correct orientation
_image = np.array(ImageOps.exif_transpose(Image.open(BytesIO(image_buff)).convert("RGB")))
return _image
def _save_data(save_file: Path, dataset: list):
with save_file.open("w", encoding="utf-8") as f:
for item in dataset:
f.write(json.dumps(item, ensure_ascii=False) + "\n")
def _create_prompt(dataset: list, label_split: str, label_type: dict):
label_data = []
for json_file in dataset:
prompt_list = []
bbox_list = []
content = ""
with json_file.open("r", encoding="utf-8") as f:
print(json_file)
data = json.load(f)
image_file = json_file.parent.joinpath(data["imagePath"])
if not image_file.exists():
continue
image = _read_image(str(image_file))
image_base64 = _np2base64(image)
image_width = data["imageWidth"]
image_height = data["imageHeight"]
for shape in data["shapes"]:
label = shape["label"]
points = shape["points"]
# shape_type 支持 rectangle 和 polygon
arr = numpy.asarray(points)
x_min = numpy.min(arr[:, 0])
x_max = numpy.max(arr[:, 0])
y_min = numpy.min(arr[:, 1])
y_max = numpy.max(arr[:, 1])
# b_width = abs(x_max - x_min)
# b_height = abs(y_max - y_min)
bbox = _normalize_box(box=(x_min, y_min, x_max, y_max), image_size=[image_width, image_height], normalize_size=[1000, 1000])
if str(label).find(label_split) > -1:
prompt = str(label).split(label_split)[0]
word = str(label).split(label_split)[-1]
start = len(content)
content += word
end = start + len(word) - 1
prompt = label_type.get(prompt, None)
if not prompt:
continue
prompt_list.append((prompt, word, start, end, bbox))
for i in range(0, len(word)):
bbox_list.append(bbox)
else:
for i in range(0, len(label)):
bbox_list.append(bbox)
if len(prompt_list) > 0:
for item in prompt_list:
prompt, word, start, end, bbox = item
prompt_item = {
"content": content,
"result_list": [
{
"text": str(word),
"start": start,
"end": end,
}
],
"prompt": prompt,
"bbox": bbox_list,
"image": image_base64,
}
label_data.append(prompt_item)
return label_data
def train_test_split(data_dirs: list, dst_dir: Path, label_split: str, label_type: dict, val_size=0.2, test_size=0.2, shuffle=True):
"""
数据集制作
:param data_dirs: labelme目录
:param dst_dir: 保存目录
:param label_split: prompt标注时的分割符号,可以用"|"
:param label_type: prompt标注时的标识
:param val_size: 验证集占比
:param test_size: 测试集占比
:param shuffle: 是否打乱顺序
:return:
"""
if not dst_dir.exists():
dst_dir.mkdir(parents=True)
label_file_list = []
for labelme_dir in data_dirs:
for json_file in labelme_dir.rglob("*.json"):
with json_file.open("r", encoding="utf-8") as f:
data = json.load(f)
image_file = json_file.parent.joinpath(data["imagePath"])
if not image_file.exists():
continue
label_file_list.append(json_file)
# 打乱顺序
if shuffle:
state = np.random.get_state()
np.random.shuffle(label_file_list)
np.random.set_state(state)
# 开始数据集划分
dataset_val = []
dataset_test = []
split_index = 0
if 1 > val_size > 0:
split_index = int(len(label_file_list) * val_size)
dataset_val = label_file_list[:split_index]
if 1 > test_size > 0:
start = split_index
split_index += int(len(label_file_list) * test_size)
dataset_test = label_file_list[start:split_index]
dataset_train = label_file_list[split_index:]
# 训练集
if len(dataset_train) > 0:
label_data = _create_prompt(dataset=dataset_train, label_split=label_split, label_type=label_type)
_save_data(save_file=dst_dir.joinpath("train.txt"), dataset=label_data)
# 验证集
if len(dataset_val) > 0:
label_data = _create_prompt(dataset=dataset_val, label_split=label_split, label_type=label_type)
_save_data(save_file=dst_dir.joinpath("dev.txt"), dataset=label_data)
# 测试集
if len(dataset_test) > 0:
label_data = _create_prompt(dataset=dataset_test, label_split=label_split, label_type=label_type)
_save_data(save_file=dst_dir.joinpath("test.txt"), dataset=label_data)
txt = "train: {0}, val: {1}, test: {2}, total: {3}".format(len(dataset_train), len(dataset_val),
len(dataset_test), len(label_file_list))
print(txt)
readme_txt = dst_dir.joinpath("readme.txt")
with readme_txt.open("w") as f:
f.write(txt)
if __name__ == "__main__":
train_test_split(data_dirs=[Path(r"C:\Users\zhousf-a\Desktop\数据 JSON")],
dst_dir=Path(r"C:\Users\zhousf-a\Desktop\uiex"),
label_split="|",
label_type={"bh": "编号", "gs": "根数", "dj": "等级", "zj": "直径", "dgc": "单根长", "bc": "边长"},
val_size=0.2,
test_size=0.2,
shuffle=True) | zhousf-lib | /zhousf-lib-0.9.9.tar.gz/zhousf-lib-0.9.9/zhousflib/datasets/labelme/labelme_convert_uiex.py | labelme_convert_uiex.py |
import shutil
import imghdr
import numpy as np
from pathlib import Path
from zhousflib.util import list_util
"""
一般情况下,在加载预训练模型的情况下,每个类别包括 10-20 张图像即可保证基本的分类效果;
不加载预训练模型的情况下,每个类别需要至少包含 100-200 张图像以保证基本的分类效果。
训练集、验证集、测试集的类别都要全,不然会给训练带来麻烦
"""
def fetch_available_cls_folder(img_dir: Path):
"""
删除空目录
:param img_dir:
:return:
"""
for folder in img_dir.iterdir():
cls = [i for i in folder.rglob("*.*")]
if len(cls) == 0:
print(folder)
shutil.rmtree(folder)
def train_test_split(image_dir: Path, val_size=0.2, test_size=0.2, shuffle=True, every_cls_count_limit=20):
"""
训练集、验证集、测试集划分
:param image_dir: 图片目录
:param val_size: 验证集占比
:param test_size: 测试集占比
:param shuffle: 打乱数据集顺序
:param every_cls_count_limit: 每个类别最低的图片数量,若低于则随机复制
:return:
"""
train_txt_file = image_dir.parent.joinpath("train_list.txt")
val_txt_file = image_dir.parent.joinpath("val_list.txt")
test_txt_file = image_dir.parent.joinpath("test_list.txt")
label_list_file = image_dir.parent.joinpath("label_list.txt")
images = []
label_list = []
# 标签文件
for folder in image_dir.rglob("*.*"):
if folder.parent.name not in label_list:
label_list.append(folder.parent.name)
if not label_list_file.exists():
with label_list_file.open("w", encoding="utf-8") as f:
for i, d in enumerate(label_list):
f.write("{0} {1}\n".format(i, d))
label_files = {}
# 遍历所有图片文件
for folder in image_dir.rglob("*.*"):
if not folder.is_file():
continue
if not imghdr.what(folder):
continue
file = "{0}/{1}/{2} {3}\n".format(folder.parent.parent.name, folder.parent.name, folder.name, label_list.index(folder.parent.name))
if folder.parent.name not in label_files:
label_files[folder.parent.name] = [file]
else:
label_files[folder.parent.name].append(file)
print(file)
images.append(file)
# 随机复制低于every_cls_count_limit的类别
for label in label_files:
label_count = len(label_files.get(label)) if label_files.get(label) else 0
if label_count < every_cls_count_limit:
for item in list_util.random_choices(label_files.get(label), choose_k=abs(every_cls_count_limit-label_count)):
images.append(item)
# 打乱顺序
if shuffle:
state = np.random.get_state()
np.random.shuffle(images)
np.random.set_state(state)
dataset_val = []
dataset_test = []
split_index = 0
if 1 > val_size > 0:
split_index = int(len(images) * val_size)
dataset_val = images[:split_index]
if 1 > test_size > 0:
start = split_index
split_index += int(len(images) * test_size)
dataset_test = images[start:split_index]
dataset_train = images[split_index:]
# 训练集
if len(dataset_train) > 0:
with train_txt_file.open("w", encoding="utf-8") as f:
for d in dataset_train:
f.write(d)
# 验证集
if len(dataset_val) > 0:
with val_txt_file.open("w", encoding="utf-8") as f:
for d in dataset_val:
f.write(d)
# 测试集集
if len(dataset_test) > 0:
with test_txt_file.open("w", encoding="utf-8") as f:
for d in dataset_test:
f.write(d)
def data_statistics(image_dir: Path):
"""
数据分布统计
:param image_dir: 数据集目录
:return:
"""
label_files = {}
# 遍历所有图片文件
for folder in image_dir.rglob("*.*"):
if not folder.is_file():
continue
if not imghdr.what(folder):
continue
if folder.parent.name not in label_files:
label_files[folder.parent.name] = [folder]
else:
label_files[folder.parent.name].append(folder)
for label in label_files:
print("{0}: {1}".format(label, len(label_files.get(label))))
if __name__ == "__main__":
# train_test_split(image_dir=Path(r"C:\Users\zhousf-a\Desktop\steel_id\images"), val_size=0.2, test_size=0,
# shuffle=False)
data_statistics(image_dir=Path(r"C:\Users\zhousf-a\Desktop\steel_id\images"))
pass | zhousf-lib | /zhousf-lib-0.9.9.tar.gz/zhousf-lib-0.9.9/zhousflib/datasets/classification/classification_dataset_split.py | classification_dataset_split.py |
import json
def class_to_json(class_obj):
"""
对象转成json字典
:param class_obj: 对象
:return: json字典字符串
"""
return json.dumps(class_obj, default=lambda obj: obj.__dict__, sort_keys=True, indent=4)
def dict_to_json(dic):
"""
字典转成json
:param dic: 字典
:return:
"""
return json.dumps(dic, sort_keys=True, indent=4)
def json_str_to_class(json_str, object_hook):
"""
json字符串转成对象
:param json_str: json字符串
:param object_hook: 回调函数
:return:
"""
return json.loads(json_str, object_hook=object_hook)
def write_dict_into_json_file(a_dict, json_file):
"""
将字典写入json文件中
:param a_dict: 字典
:param json_file: json文件
:return:
"""
with open(json_file, 'w') as f:
json.dump(a_dict, f, ensure_ascii=False, sort_keys=True, indent=4, separators=(',', ': '))
def write_obj_into_json_file(obj, json_file):
"""
将对象写入json文件中
:param obj: 对象
:param json_file: json文件
:return:
"""
a_dict = class_to_json(obj)
if not isinstance(a_dict, dict):
a_dict = eval(a_dict)
write_dict_into_json_file(a_dict, json_file)
def load_obj_from_json_file(obj, json_file):
"""
json文件转成对象
:param obj:
:param json_file:
:return:
"""
with open(json_file, 'r') as f:
content = f.read()
if content is not None and content.strip() != '':
obj.__dict__ = json.loads(s=content)
return obj
return None
def load_dict_from_json_file(json_file):
"""
json文件转成字典
:param json_file:
:return: dict
"""
with open(json_file, 'r') as f:
content = f.read()
if content is not None and content.strip() != '':
return json.loads(s=content)
return None
def sort(json_or_dict):
"""
排序:对key排序
:param json_or_dict: {'后门壳(左)': ['刮擦'], '前门壳(左)': ['撕裂', '刮擦']}
:return: {'前门壳(左)': ['刮擦', '撕裂'], '后门壳(左)': ['刮擦']}
"""
json_or_dict = json.loads(json.dumps(json_or_dict, sort_keys=True, ensure_ascii=False))
for k in json_or_dict:
if isinstance(json_or_dict[k], list):
# 字典不排序 by guanning
if json_or_dict[k] and isinstance(json_or_dict[k][0], dict):
for index in range(0, len(json_or_dict[k])):
json_or_dict[k][index] = sorted(json_or_dict[k][index].items(), key=lambda x: x[0], reverse=True)
#按排序好的字典key(字典已经变成元组)的第一个 将list排序
json_or_dict[k] = sorted(json_or_dict[k], key=lambda x: x[0])
else:
json_or_dict[k] = sorted(json_or_dict[k])
return json_or_dict
if __name__ == '__main__':
d = {'far': '', 'middle': '/media/ubuntu/b8f80802-d95a-41c3-b157-6f4e34967425/workspace/AI_TEST/damage/2018071802/middle.jpg', 'near': '', 'code': '0000', 'message': 'success', 'result': ''}
write_dict_into_json_file(d, 't.json') | zhousf-lib | /zhousf-lib-0.9.9.tar.gz/zhousf-lib-0.9.9/zhousflib/util/json_util.py | json_util.py |
import numpy
import colorsys
from pathlib import Path
from PIL import Image, ImageDraw
def four_point_convert_bbox(four_points: list):
"""
四点转换成bbox
:param four_points: [[252, 140], [300, 140], [300, 189], [252, 189]]
:return:
"""
arr = numpy.asarray(four_points)
x_min = min(arr[:, 0])
y_min = min(arr[:, 1])
x_max = max(arr[:, 0])
y_max = max(arr[:, 1])
return x_min, y_min, x_max, y_max
def draw_rectangle(bbox: list, image_file: Path = None, image_size: list = None, fill_transparent=255, show=True):
"""
绘制矩形框
:param bbox: [(x_min, y_min, x_max, y_max)]
:param image_file: 空时以空白为背景进行绘制
:param image_size:
:param fill_transparent: 填充色透明度[0, 255],当为-1时则不填充
:param show:
:return:
"""
draw_p = []
for box in bbox:
x_min, y_min, x_max, y_max = box
draw_p.append([(x_min, y_min), (x_max, y_min), (x_max, y_max), (x_min, y_max)])
return draw_polygon(polygon=draw_p, image_file=image_file, image_size=image_size, fill_transparent=fill_transparent, show=show)
def get_w_h(image_file: Path = None):
"""
获取图片宽高
:param image_file:
:return:
"""
image = Image.open(image_file)
return [image.width, image.height]
def draw_polygon(polygon: list, image_file: Path = None, image_size: list = None, fill_transparent=255, show=True):
"""
绘制四边形
:param polygon: [[[255, 376], [291, 409], [255, 443], [218, 409]], [[252, 140], [300, 140], [300, 189], [252, 189]]]
:param image_file: 空时以空白为背景进行绘制
:param image_size:
:param fill_transparent: 填充色透明度[0, 255],当为-1时则不填充
:param show:
:return:
"""
hsv_tuples = [(1.0 * x / len(polygon), 1., 1.) for x in range(len(polygon))]
colors = list(map(lambda x: colorsys.hsv_to_rgb(*x), hsv_tuples))
colors = list(map(lambda x: (int(x[0] * 255), int(x[1] * 255), int(x[2] * 255)), colors))
image_white = None
if image_size is None:
image_size = [500, 500]
if image_file is None:
image = Image.new('RGBA', (image_size[0], image_size[1]), (255, 255, 255))
draw = ImageDraw.ImageDraw(image)
else:
image = Image.open(image_file)
if image.mode != "RGBA":
image = image.convert('RGBA')
image_white = Image.new('RGBA', (image.width, image.height), (255, 255, 255, 0))
draw = ImageDraw.ImageDraw(image_white)
for index, point in enumerate(polygon):
draw_p = [(p[0], p[1]) for p in point]
# 边框颜色
polygon_color = colors[index]
# 填充颜色+透明
file_color = (polygon_color[0], polygon_color[1], polygon_color[2], fill_transparent) if fill_transparent > -1 else None
draw.polygon(draw_p, outline=polygon_color, fill=file_color)
draw.text(xy=(draw_p[0][0]+1, draw_p[0][1]+1), text=str(index))
if image_white is not None:
image.paste(Image.alpha_composite(image, image_white))
if show:
image.show()
return image
if __name__ == "__main__":
# draw_rectangle([(218, 376, 291, 443)])
draw_polygon([[[255, 376], [291, 409], [255, 443], [218, 409]], [[252, 140], [300, 140], [300, 189], [252, 189]]])
pass | zhousf-lib | /zhousf-lib-0.9.9.tar.gz/zhousf-lib-0.9.9/zhousflib/util/pil_util.py | pil_util.py |
import matplotlib.pyplot as plt
import matplotlib.colors as mcolors
def show_rect(boxes: list):
"""
显示box
:param boxes: [(x_min, y_min, x_max, y_max)]
:return:
[(317,280,553,395), (374,295,485,322)]
"""
colors = list(mcolors.TABLEAU_COLORS.keys())
plt.xlabel("x", fontweight='bold', size=14)
plt.ylabel("y", fontweight='bold', size=14)
ax = plt.gca() # 坐标系
x_max = 0
y_max = 0
for index, box in enumerate(boxes):
x_max = box[2] if box[2] > x_max else x_max
y_max = box[3] if box[3] > y_max else y_max
ax.add_patch(
plt.Rectangle(xy=(box[0], box[1]), width=(box[2] - box[0]), height=(box[3] - box[1]),
alpha=1,
fill=False,
color=colors[index],
facecolor=colors[index],
linewidth=1))
plt.xlim(0, int(2 * x_max))
plt.ylim(0, int(2 * y_max))
# 转成屏幕坐标系(左上角为原点)
ax.xaxis.set_ticks_position('top') # 将X坐标轴移到上面
ax.invert_yaxis() # 反转Y坐标轴
plt.show()
def compute_iou(predicted_box, ground_truth_box):
"""
计算交并比
:param predicted_box: 预测box=(x_min, y_min, x_max, y_max)
:param ground_truth_box: 真实box=(x_min, y_min, x_max, y_max)
:return:
"""
px_min, py_min, px_max, py_max = predicted_box
gx_min, gy_min, gx_max, gy_max = ground_truth_box
p_area = (px_max - px_min) * (py_max - py_min) # 计算P的面积
g_area = (gx_max - gx_min) * (gy_max - gy_min) # 计算G的面积
# 求相交矩形的左下和右上顶点坐标(x_min, y_min, x_max, y_max)
_x_min = max(px_min, gx_min) # 得到左下顶点的横坐标
_y_min = max(py_min, gy_min) # 得到左下顶点的纵坐标
_x_max = min(px_max, gx_max) # 得到右上顶点的横坐标
_y_max = min(py_max, gy_max) # 得到右上顶点的纵坐标
# 计算相交矩形的面积
w = _x_max - _x_min
h = _y_max - _y_min
if w <= 0 or h <= 0:
return 0
area = w * h # G∩P的面积
return area / (p_area + g_area - area)
def compute_contain(box1, box2):
"""
计算两个box是否为包含关系
:param box1: (x_min, y_min, x_max, y_max)
:param box2: (x_min, y_min, x_max, y_max)
:return: 返回两个box重叠面积占较小box的面积比,一般大于0.8则为包含关系
box1=(317,280,553,395)
box2=(374,295,485,322)
"""
px_min = min(box1[0], box1[2])
py_min = min(box1[1], box1[3])
px_max = max(box1[0], box1[2])
py_max = max(box1[1], box1[3])
# px_min, py_min, px_max, py_max = box1
# gx_min, gy_min, gx_max, gy_max = box2
gx_min = min(box2[0], box2[2])
gy_min = min(box2[1], box2[3])
gx_max = max(box2[0], box2[2])
gy_max = max(box2[1], box2[3])
p_area = (px_max - px_min) * (py_max - py_min) # 计算P的面积
g_area = (gx_max - gx_min) * (gy_max - gy_min) # 计算G的面积
# 求相交矩形的左下和右上顶点坐标(x_min, y_min, x_max, y_max)
_x_min = max(px_min, gx_min) # 得到左下顶点的横坐标
_y_min = max(py_min, gy_min) # 得到左下顶点的纵坐标
_x_max = min(px_max, gx_max) # 得到右上顶点的横坐标
_y_max = min(py_max, gy_max) # 得到右上顶点的纵坐标
# 计算相交矩形的面积
w = _x_max - _x_min
h = _y_max - _y_min
if w <= 0 or h <= 0:
return 0
area = w * h # G∩P的面积
if p_area >= g_area:
return area / g_area
else:
return area / p_area
def group_by_box_overlap(od_result: list, return_area=False, area_rate=0.8):
"""
根据box的重叠面积进行分组-通用算法,适用目标检测、画图等
:param od_result: [(?, ?, x_min, y_min, x_max, y_max)], box置于tuple最后
:param return_area: 是否返回面积
:param area_rate: 两个box重叠面积比例,若大于该值则为包含关系
:return:
[[(index, [area, (?, ?, x_min, y_min, x_max, y_max)])]]
or
[[(?, ?, x_min, y_min, x_max, y_max)]]
"""
boxes = {}
# 按照面积从大到小排序, box置于tuple最后
for index, item in enumerate(od_result):
(x_min, y_min, x_max, y_max) = item[-4:]
area = (x_max - x_min) * (y_max - y_min)
boxes[index] = [area, item]
boxes = sorted(boxes.items(), key=lambda d: d[1], reverse=True)
box_group = []
has_add_index = []
for item1 in boxes:
(index1, [area1, box1]) = item1
(x_min1, y_min1, x_max1, y_max1) = box1[-4:]
items = [item1] if return_area else [box1]
if index1 in has_add_index:
continue
has_add_index.append(index1)
for i, item2 in enumerate(boxes):
(index2, [area2, box2]) = item2
(x_min2, y_min2, x_max2, y_max2) = box2[-4:]
if compute_contain((x_min1, y_min1, x_max1, y_max1),
(x_min2, y_min2, x_max2, y_max2)) > area_rate:
if item1 == item2:
continue
if index2 in has_add_index:
continue
has_add_index.append(index2)
if return_area:
items.append(item2)
else:
items.append(box2)
box_group.append(items)
return box_group
def search_right_box(boxes: list):
"""
搜索最右侧box
:param boxes: [(?, ?, x_min, y_min, x_max, y_max)]
:return: box
"""
if len(boxes) == 0:
return None
boxes.sort(key=lambda x: x[-2], reverse=True)
return boxes[0]
def search_top_box(boxes: list):
"""
搜索最顶端box
:param boxes: [(?, ?, x_min, y_min, x_max, y_max)]
:return: box
"""
if len(boxes) == 0:
return None
boxes.sort(key=lambda x: x[-3], reverse=False)
return boxes[0]
def search_bottom_box(boxes: list):
"""
搜索最底端box
:param boxes: [(?, ?, x_min, y_min, x_max, y_max)]
:return: box
"""
if len(boxes) == 0:
return None
boxes.sort(key=lambda x: x[-1], reverse=True)
return boxes[0]
def search_nearby_bottom_box(target_box, boxes: list):
"""
搜索紧邻target_box底部的box
:param target_box: (?, ?, x_min, y_min, x_max, y_max)
:param boxes: [(?, ?, x_min, y_min, x_max, y_max)]
:return: box
"""
if len(boxes) == 0:
return None
t_x = (target_box[-2] + target_box[-4]) / 2
t_y = (target_box[-3] + target_box[-1]) / 2
t_width = abs(target_box[-2] - target_box[-4])
t_height = abs(target_box[-3] - target_box[-1])
for box in boxes:
c_x = (box[-2] + box[-4]) / 2
c_y = (box[-3] + box[-1]) / 2
c_width = abs(box[-2] - box[-4])
c_height = abs(box[-3] - box[-1])
# 两个中心点的X轴坐标差不超过两个box的高度和的一半,表示两个box在同一垂直线上
if abs(c_x-t_x) < (t_width + c_width) / 2:
if t_y < c_y:
return box
return None
def search_nearby_right_box(target_box, boxes: list):
"""
搜索紧邻target_box右侧的box
:param target_box: (?, ?, x_min, y_min, x_max, y_max)
:param boxes: [(?, ?, x_min, y_min, x_max, y_max)]
:return: box
"""
if len(boxes) == 0:
return None
t_x = (target_box[-2] + target_box[-4]) / 2
t_y = (target_box[-3] + target_box[-1]) / 2
t_width = abs(target_box[-2] - target_box[-4])
t_height = abs(target_box[-3] - target_box[-1])
for box in boxes:
c_x = (box[-2] + box[-4]) / 2
c_y = (box[-3] + box[-1]) / 2
c_width = abs(box[-2] - box[-4])
c_height = abs(box[-3] - box[-1])
# 两个中心点的Y轴坐标差不超过两个box的高度和的一半,表示两个box在同一水平线上
if abs(c_y-t_y) < (t_height + c_height) / 2:
if t_x < c_x:
return box
return None
def search_nearby_left_box(target_box, boxes: list):
"""
搜索紧邻target_box左侧的box
:param target_box: (?, ?, x_min, y_min, x_max, y_max)
:param boxes: [(?, ?, x_min, y_min, x_max, y_max)]
:return: box
"""
if len(boxes) == 0:
return None
t_x = (target_box[-2] + target_box[-4]) / 2
t_y = (target_box[-3] + target_box[-1]) / 2
t_width = abs(target_box[-2] - target_box[-4])
t_height = abs(target_box[-3] - target_box[-1])
for box in boxes:
c_x = (box[-2] + box[-4]) / 2
c_y = (box[-3] + box[-1]) / 2
c_width = abs(box[-2] - box[-4])
c_height = abs(box[-3] - box[-1])
# 两个中心点的Y轴坐标差不超过两个box的高度和的一半,表示两个box在同一水平线上
if abs(c_y-t_y) < (t_height + c_height) / 2:
if t_x > c_x:
return box
return None
def box_scale_up(box, offset=50):
"""
box增大
:param box:
:param offset:
:return:
"""
x_min, y_min, x_max, y_max = box
_x_min = x_min - offset
_x_min = 0 if _x_min < 0 else _x_min
_x_max = x_max + offset
_y_min = y_min - offset
_y_min = 0 if _y_min < 0 else _y_min
_y_max = y_max + offset
return _x_min, _y_min, _x_max, _y_max
def box_scale_up_horizontal(box, offset=50):
"""
box增大,仅水平方向
:param box:
:param offset:
:return:
"""
x_min, y_min, x_max, y_max = box
_x_min = x_min - offset
_x_min = 0 if _x_min < 0 else _x_min
_x_max = x_max + offset
return _x_min, y_min, _x_max, y_max
def box_scale_up_vertical(box, offset=50):
"""
box增大,仅垂直方向
:param box:
:param offset:
:return:
"""
x_min, y_min, x_max, y_max = box
_y_min = y_min - offset
_y_min = 0 if _y_min < 0 else _y_min
_y_max = y_max + offset
return x_min, _y_min, x_max, _y_max
def box_scale_down(box, offset=50):
"""
box缩小
:param box:
:param offset:
:return:
"""
x_min, y_min, x_max, y_max = box
offset = min(x_min, y_min) if min(x_min, y_min) < offset else offset
_x_min = x_min + offset
_x_max = x_max - offset
_y_min = y_min + offset
_y_max = y_max - offset
return _x_min, _y_min, _x_max, _y_max
def location_y_axis(target_box, box, height_threshold=0.25):
"""
y轴位置关系判断,判断box在target_box的上面还是下面,通过中心点的y计算
:param target_box:
:param box:
:param height_threshold: 阈值比,若大于target_box该比例的高度,则位置不同
:return: 1: 上面 0: 下面 -1: 位置相同
"""
c_y_t = (target_box[-3] + target_box[-1]) / 2
height_t = abs(target_box[-3] - target_box[-1])
c_y = (box[-3] + box[-1]) / 2
# 两个中心点的Y轴坐标差超过target_box高度*height_threshold,表示两个box在target_box上面
if abs(c_y_t - c_y) >= (height_t * height_threshold):
if c_y_t > c_y:
return 1
else:
return 0
else:
return -1
if __name__ == "__main__":
# print(box_scale_down((10, 10, 20, 20), offset=2))
# print(box_scale_up((-166.68197631835938, -0.008893102407455444, 1810.6822509765625, 143.40452575683594), offset=2))
# a =(168.9995880126953, 40.77224349975586, 186.8643341064453, 62.222076416015625)
# b =(151.0, 34.0, 234.0, 77.0)
# print(compute_iou(a, b))
# print(compute_contain(a, b))
# show_rect([a, b])
pass | zhousf-lib | /zhousf-lib-0.9.9.tar.gz/zhousf-lib-0.9.9/zhousflib/util/iou_util.py | iou_util.py |
import numpy as np
import matplotlib.pyplot as plt
import matplotlib.patches as mpathes
from shapely import geometry
def in_poly(poly, point):
"""
判断点是否在poly内,不包含poly边界
:param poly: 多边形点坐标:[(0, 0), (1, 0), (1, 1), (0, 1)]
:param point: 点坐标:(0.1, 0.5)
:return:
"""
line = geometry.LineString(poly)
point = geometry.Point(point)
polygon = geometry.Polygon(line)
return polygon.contains(point)
def make_mesh(box_min, min_box_w, min_box_h, show=False):
"""
按照w,h对box进行网格划分
make_mesh([0, 0, 1280, 1280], 128, 128)
:param box_min:
:param min_box_w:
:param min_box_h:
:param show:
:return:
所有小box,按列排序 [(column_index, row_index, x_min, y_min, x_max, y_max)]
column_index:列索引
row_index:行索引
"""
[x_min, y_min, x_max, y_max] = box_min
list_x = np.arange(x_min, x_max, min_box_w)
list_y = np.arange(y_min, y_max, min_box_h)
# list_x中第i项和list_y中第j项所代表的网格为[list_x[i],list_y[j],list_x[i+1],list_y[j+1]]
if show:
fig, ax = plt.subplots()
color = ['red', 'black', 'yellow', 'blue', 'green', 'purple']
box_min = [] # x_min, y_min, x_max, y_max
for i in range(len(list_x)):
for j in range(len(list_y)):
x_left = list_x[i]
y_down = list_y[j]
if i == len(list_x) - 1:
x_right = x_max
else:
x_right = list_x[i + 1]
if j == len(list_y) - 1:
yup = y_max
else:
yup = list_y[j + 1]
box_min.append((i, j, x_left, y_down, x_right, yup))
if show:
rect = mpathes.Rectangle((x_left, y_down), min_box_w, min_box_h, linewidth=1, edgecolor='r', facecolor='none')
# rect = mpathes.Rectangle((x_left, y_down), min_box_w, min_box_h, color=color[(i + j % 5) % 5])
ax.add_patch(rect)
if show:
ax.set_xbound(x_min, x_max)
ax.set_ybound(y_min, y_max)
plt.show()
return box_min
def query_box_position(boxes, point):
"""
# 查询point在哪个box中
:param boxes:
:param point:
:return: (column_index, row_index, x_min, y_min, x_max, y_max)
column_index:列索引
row_index:行索引
"""
for box in boxes:
column_index, row_index, x_min, y_min, x_max, y_max = box
# 将box外扩1个像素,使poly计算包含边界
x_min -= 1
y_min -= 1
x_max += 1
y_max += 1
# 计算poly坐标
poly = [(x_min, y_min), (x_max, y_min), (x_max, y_max), (x_min, y_max)]
# 判断点是否在poly内,不包含poly边界
if in_poly(poly=poly, point=point):
return box
return None
if __name__ == "__main__":
# 判断点是否在poly内
# poly = [(0, 0), (1, 0), (1, 1), (0, 1)] # 多边形坐标
# pt2 = (0.1, 0.5) # 点坐标
# print(in_poly(square, pt1))
# 按照w,h对box进行网格划分
boxes_ = make_mesh([0, 0, 4000, 5000], 1280, 1280, show=True)
for box in boxes_:
print(box)
# [(column_index, row_index, x_min, y_min, x_max, y_max)]
# 查询点在哪个box中
# query_box_position(boxes=boxes_, point=(1, 1))
pass | zhousf-lib | /zhousf-lib-0.9.9.tar.gz/zhousf-lib-0.9.9/zhousflib/util/poly_util.py | poly_util.py |
import re
def is_number(string: str):
"""
是否数值,包含int、float、double
:param string:
:return:
"""
try:
float(string)
return True
except ValueError:
return False
def is_greater_than_number(string1: str, string2: str):
"""
string1数值大于string2数值
:param string1: "12"
:param string2: "12.0"
:return:
"""
if is_number(string1) and is_number(string2):
if float(string1) > float(string2):
return True
if string1 == string2:
return True
return False
def is_less_than_number(string1: str, string2: str):
"""
string1数值小于string2数值
:param string1: "12"
:param string2: "12.0"
:return:
"""
if is_number(string1) and is_number(string2):
if float(string1) < float(string2):
return True
if string1 == string2:
return True
return False
def is_equal_number(string1: str, string2: str):
"""
数值是否相等
:param string1: "12"
:param string2: "12.0"
:return:
"""
if is_number(string1) and is_number(string2):
if float(string1) == float(string2):
return True
if string1 == string2:
return True
return False
def contains(string: str, what: list):
for s in what:
if string.find(s) > -1:
return True
return False
def is_empty(obj):
str_obj = str(obj)
if str_obj is None or str_obj == 'None':
return True
if str_obj.strip() == '':
return True
return False
def is_not_empty(obj):
str_obj = str(obj)
if str_obj is None or str_obj == 'None':
return False
if str_obj.strip() == '':
return False
return True
def contain(obj, what):
str_obj = str(obj)
str_what = str(what)
if is_not_empty(str_obj) and is_not_empty(str_what):
if str_obj.find(str_what) >= 0:
return True
return False
def not_contain(obj, what):
str_obj = str(obj)
str_what = str(what)
if is_not_empty(str_obj) and is_not_empty(str_what):
if str_obj.find(str_what) >= 0:
return False
return True
def right_just(obj, length, fill_char=' '):
"""
左补齐(右调整)
:param obj: abc
:param length: 5
:param fill_char:
:return: ' abc'
"""
if not isinstance(obj, str):
obj = str(obj)
return obj.rjust(length, fill_char)
def left_just(obj, length, fill_char=' '):
"""
右补齐(左调整)
:param obj: abc
:param length: 5
:param fill_char:
:return: 'abc '
"""
if not isinstance(obj, str):
obj = str(obj)
return obj.ljust(length, fill_char)
def center_just(obj, length):
"""
中间补齐(两端调整)
:param obj: abc
:param length: 5
:return: ' abc '
"""
if not isinstance(obj, str):
obj = str(obj)
return obj.center(length)
def only_digit_letter_chinese(string):
"""
同时包含中文 & (数字 or 字母)
:param string:
:return:
"""
# 提取数字
match_digit = re.sub(u"([^\u0030-\u0039])", "", string)
# 提取大小写字母
match_letter = re.sub(u"([^\u0041-\u005a\u0061-\u007a])", "", string)
# 提取汉字
match_chinese = re.sub(u"([^\u4e00-\u9fa5])", "", string)
if len(match_chinese) == 0:
return False
if len(match_digit) + len(match_letter) == 0:
return False
return (len(match_chinese) + len(match_digit) + len(match_letter)) == len(string)
def only_digit_letter(string):
"""
同时包含数字和字母
:param string:
:return:
"""
# 提取数字
match_digit = re.sub(u"([^\u0030-\u0039])", "", string)
# 提取大小写字母
match_letter = re.sub(u"([^\u0041-\u005a\u0061-\u007a])", "", string)
if len(match_digit) == 0 or len(match_letter) == 0:
return False
return (len(match_digit) + len(match_letter)) == len(string)
def digit_or_letter(string):
"""
包含数字或字母
:param string:
:return:
"""
# 提取数字
match_digit = re.sub(u"([^\u0030-\u0039])", "", string)
# 提取大小写字母
match_letter = re.sub(u"([^\u0041-\u005a\u0061-\u007a])", "", string)
return (len(match_digit) + len(match_letter)) == len(string)
def only_digit(string):
"""
只包含数字
:param string:
:return:
"""
return string.isdigit()
def only_letter(string):
"""
只包含字母
:param string:
:return:
"""
# 提取大小写字母
match_string = re.sub(u"([^\u0041-\u005a\u0061-\u007a])", "", string)
return len(match_string) == len(string) | zhousf-lib | /zhousf-lib-0.9.9.tar.gz/zhousf-lib-0.9.9/zhousflib/util/string_util.py | string_util.py |
def is_empty(dic):
if dic is None or dic == '':
return True
if isinstance(dic, str):
dic = eval(dic)
if isinstance(dic, dict):
if len(dic) != 0:
return False
if isinstance(dic, list):
if len(dic) != 0:
return False
if isinstance(dic, set):
if len(dic) != 0:
return False
return True
def pop(dic, key):
if dic is not None and key is not None:
if key in dic:
dic.pop(key)
return True
return False
def merge(a, b):
"""
字典合并,若key重复则b值覆盖a值
:param a:
:param b:
:return:
example:
a = {'a':1,'b':{'v':1,'e':2},'g':9}
b = {'b':{'v':2,'d':2},'c':3,'g':10}
output:
{'a': 1, 'c': 3, 'b': {'e': 2, 'd': 2, 'v': 2}, 'g': 10}
"""
re = {}
a_dict = a if isinstance(a, dict) else eval(a)
b_dict = b if isinstance(b, dict) else eval(b)
key_unit = list(a_dict.keys())
key_unit.extend(list(b_dict.keys()))
for key in key_unit:
if key in a_dict and key in b_dict:
# 若是字典类型则合并
if isinstance(a_dict[key], dict):
# 合并字典
c = dict(a_dict[key], **b_dict[key])
re[key] = c
elif isinstance(a_dict[key], list):
a_key = {list(k.keys())[0]: list(k.values())[0] for k in a_dict[key]}
b_key = {list(k.keys())[0]: list(k.values())[0] for k in b_dict[key]}
for _k in list(b_key.keys()):
a_key[_k] = b_key.get(_k)
r = []
for _k in list(a_key.keys()):
r.append({_k: a_key.get(_k)})
re[key] = r
else:
# 非字典类型则覆盖
re[key] = b_dict[key]
elif key in a_dict:
re[key] = a_dict[key]
elif key in b_dict:
re[key] = b_dict[key]
return re
def merge_list(ml):
"""
list字典合并
:param ml:
:return:
example:
a = {'HouYeZiBan-Z': {'aoxian': 0.0118, 'guaca': 0.1205}}
b = {'HouMen-Z': {'guaca': 0.0505}, 'HouYeZiBan-Z': {'guaca': 0.0003, 'aoxian': 0.0047}}
c = [a,b]
merge_list(c)
output:
{'HouYeZiBan-Z': {'aoxian': 0.0047, 'guaca': 0.0003}, 'HouMen-Z': {'guaca': 0.0505}}
"""
if len(ml) < 1:
return None
first = ml[0]
if len(ml) > 1:
for i in range(1, len(ml)):
first = merge(first,ml[i])
return first | zhousf-lib | /zhousf-lib-0.9.9.tar.gz/zhousf-lib-0.9.9/zhousflib/util/dict_util.py | dict_util.py |
import threading
import time
class MultiThread(threading.Thread):
"""
多线程
"""
def __init__(self, name, func, func_arg=None, thread_lock=None):
"""
:param name: 线程名
:param func: 执行函数
:param thread_lock: threading.Lock() 默认异步
"""
threading.Thread.__init__(self)
self.name = name
self.func = func
self.thread_lock = thread_lock
self.func_arg = func_arg
self.result = None
def run(self):
if self.thread_lock:
# 可选的timeout参数不填时将一直阻塞直到获得锁定
# 成功获得锁定后返回True,超时返回False
self.thread_lock.acquire()
self.result = self.func(self.name, self.func_arg)
if self.thread_lock:
self.thread_lock.release()
def get_result(self):
try:
return self.result
except Exception as e:
print(e)
return None
def my_func(thread_name, func_arg):
counter = 3
while counter:
time.sleep(1)
print("{0}: {1}".format(thread_name, func_arg))
counter -= 1
return thread_name
def test_async():
"""
异步
"""
thread_1 = MultiThread(name="thread_1", func=my_func, func_arg="dddd1", thread_lock=None)
thread_2 = MultiThread(name="thread_2", func=my_func, func_arg="32333", thread_lock=None)
thread_1.start()
thread_2.start()
# 等待所有线程完成
threads = [thread_1, thread_2]
for t in threads:
t.join()
print(t.get_result())
print("Exiting Main Thread")
def test_sync():
"""
同步
"""
thread_lock = threading.Lock()
thread_1 = MultiThread(name="thread_1", func=my_func, thread_lock=thread_lock)
thread_2 = MultiThread(name="thread_2", func=my_func, thread_lock=thread_lock)
thread_1.start()
thread_2.start()
# 等待所有线程完成
threads = [thread_1, thread_2]
for t in threads:
t.join()
print("Exiting Main Thread")
if __name__ == "__main__":
test_async()
# test_sync()
pass | zhousf-lib | /zhousf-lib-0.9.9.tar.gz/zhousf-lib-0.9.9/zhousflib/util/thread_util.py | thread_util.py |
import time
import datetime
import cn2an
import re
CN_NUM = {'〇': '0', '一': '1', '二': '2', '三': '3', '四': '4', '五': '5', '六': '6', '七': '7', '八': '8', '九': '9',
'零': '0', '壹': '1', '贰': '2', '叁': '3', '肆': '4', '伍': '5', '陆': '6', '柒': '7', '捌': '8', '玖': '9',
'两': '2'}
def get_date_format(format_str):
"""
获取日期
:param format_str: "%Y-%m-%d %H:%M:%S"
:return:
"""
return time.strftime(format_str, time.localtime())
def get_date():
"""
获取秒级日期
:return: 2020-02-26 10:31:02
"""
return time.strftime("%Y-%m-%d %H:%M:%S", time.localtime())
def get_date_ms():
"""
获取毫秒级日期
:return: 2021-08-20 14:46:43.805594
"""
return datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S.%f')
def get_y_m_d():
"""
获取日期
:return: 2020_02_26
"""
return time.strftime("%Y_%m_%d", time.localtime())
def get_timestamp_second():
"""
秒级时间戳
:return: 1582684262.161325
"""
return time.time()
def get_timestamp_ms():
"""
毫秒级时间戳
:return: 1582684262161
"""
return int(round(time.time() * 1000))
def get_timestamp_ws():
"""
微秒级时间戳
:return: 1582684262161369
"""
return int(round(time.time() * 1000000))
def compare_time(time1: str, time2: str):
"""
比较两个字符型日期大小 日期格式:%Y年%m月%d日
:param time1: 第1个时间
:param time2: 第2个时间
:return: 整数 <0 time1 早于 time2 ; =0 time1 = time2 ; >0 time1 晚于 time2
"""
def trans_cn2d(cn_str):
return ''.join([CN_NUM[i] if i in CN_NUM else i for i in cn_str])
if re.match(r'(\d{4}-\d{1,2}-\d{1,2})',time1):
format_str = '%Y-%m-%d'
elif re.match(r'(\d{4}年\d{1,2}月\d{1,2}日)',time1):
format_str = '%Y年%m月%d日'
elif re.match(r'(\d{4}/\d{1,2}/\d{1,2})',time1):
format_str = '%Y/%m/%d'
elif re.match(r'(.{4}年.{1,2}月.{1,3}日)',time1):
format_str = 'CN年CN月CN日'
else:
format_str = '%Y-%m-%d'
if format_str == 'CN年CN月CN日':
time1_cn = trans_cn2d(time1[:time1.index('年')]) \
+ time1[time1.index('年'):time1.index('年') + 1] + \
str(cn2an.cn2an(time1[time1.index('年') + 1:time1.index('月')], 'normal')) \
+ time1[time1.index('月'):time1.index('月') + 1] + \
str(cn2an.cn2an(time1[time1.index('月') + 1:time1.index('日')], 'normal')) \
+ time1[time1.index('日'):time1.index('日') + 1]
time2_cn = trans_cn2d((time2[:time2.index('年')])) \
+ time2[time2.index('年'):time2.index('年') + 1] + \
str(cn2an.cn2an(time2[time2.index('年') + 1:time2.index('月')], 'normal')) \
+ time2[time2.index('月'):time2.index('月') + 1] + \
str(cn2an.cn2an(time2[time2.index('月') + 1:time2.index('日')], 'normal')) \
+ time2[time2.index('日'):time2.index('日') + 1]
# print(time1_cn,time2_cn)
s_time = time.mktime(time.strptime(time1_cn, '%Y年%m月%d日'))
e_time = time.mktime(time.strptime(time2_cn, '%Y年%m月%d日'))
if int(s_time) == int(e_time):
return 1
return int(s_time) - int(e_time)
s_time = time.mktime(time.strptime(time1, format_str))
e_time = time.mktime(time.strptime(time2, format_str))
# print('s_time is:', s_time)
# print('e_time is:', e_time)
if int(s_time) == int(e_time):
return 1
return int(s_time) - int(e_time)
if __name__ == '__main__':
print(compare_time('2019年02月03日', '2019年02月04日'))
print(compare_time('二0一6年一月十日', '2〇1七年七月十七日'))
print(compare_time('二〇一六年一月十日', '二〇一七年七月十七日'))
print(compare_time('2021年12月31日', '2022年12月31日'))
print(compare_time('2021-12-31', '2021-12-19'))
print(compare_time('2021/12/31', '2021/12/19'))
print(get_date_format("%Y%m%d%H%M%S"))
print(get_date())
print(get_date_ms())
print(get_y_m_d())
print(get_timestamp_second())
print(get_timestamp_second())
print(get_timestamp_ms())
print(get_timestamp_ms())
print(get_timestamp_ws())
print(get_timestamp_ws())
pass | zhousf-lib | /zhousf-lib-0.9.9.tar.gz/zhousf-lib-0.9.9/zhousflib/util/time_util.py | time_util.py |
import re
def cal(calc_formula: str) -> str:
"""
公式计算,保持原有格式计算数值的项
:param calc_formula:
:return:
"64.7*2+(70.6+332.9)/2*2+13*2+L" -> "558.9+L"
"2*L+64.7-8" -> "2*L+56.7"
"(63~64)*2+10*2" -> "(63~64)*2+20"
"""
calc_formula = calc_formula.replace("(", "(").replace(")", ")").replace(" ", "")
target_list = re.split(r"(?=[-+])", calc_formula)
number_list = []
not_number_list = []
number_is_first = False
# 过滤
for i in range(len(target_list) - 1, -1, -1):
target = target_list[i]
# 过滤空字符串
if not str(target).strip():
target_list.pop(i)
# 过滤非有效值
if target in ["+", "-", "*", "/"]:
target_list.pop(i)
# 获取数值列表和非数值列表
for i in range(0, len(target_list)):
target = target_list[i]
if len(str(target).split("|")) > 1:
not_number_list.append(target)
continue
try:
eval(target)
number_list.append(target)
if i == 0:
number_is_first = True
except Exception as e:
not_number_list.append(target)
number_str = ["{0}".format(number) for number in number_list]
number_str = "".join(number_str)
union_symbol = ""
if len(number_str) > 0:
if number_str[0] in ["*", "/"]:
union_symbol = number_str[0]
number_str = number_str[1:]
try:
num = eval("".join(number_str))
except Exception as e:
num = number_str
if isinstance(num, int) or isinstance(num, float):
number_str_cal_result = round(num, 8) if len(number_str) > 0 else ""
else:
number_str_cal_result = num if len(number_str) > 0 else ""
not_number_str = ["{0}".format(not_number) for not_number in not_number_list]
if number_is_first:
result_union = str(number_str_cal_result) + union_symbol + "".join(not_number_str)
else:
if not number_str_cal_result and not union_symbol and len(str(number_str_cal_result)) > 0:
# print(number_str_cal_result, type(number_str_cal_result), len(str(number_str_cal_result)))
if number_str_cal_result > 0:
union_symbol = "+"
result_union = "".join(not_number_str) + union_symbol + str(number_str_cal_result)
return result_union
if __name__ == "__main__":
# print(cal("64.7*2+(70.6+332.9)/2*2+13*2+L"))
# print(cal("2*L+64.7-8"))
# print(cal("(63~64)*2+10*2"))
# print(cal("64.7*2+339.7*2+13*2"))
# print(cal("2+/2+1000"))
# print(cal(".+250+320+200+200+200"))
# print(cal("*250+200"))
# print(cal("+752+2+"))
# print(cal("+05+120"))
# print(cal("100*2/4"))
# print(cal("60.3+733.9"))
# print(cal("1811-(137+350)"))
# print(cal("10+20+均353.5"))
# print(round(eval("均353.5"), 8))
print(cal("811-846.6"))
print(cal("846.6-811"))
pass | zhousf-lib | /zhousf-lib-0.9.9.tar.gz/zhousf-lib-0.9.9/zhousflib/util/calculater_util.py | calculater_util.py |
import zipfile
from pathlib import Path
import os
import shutil
import random
def unzip_file(zip_file, dst_dir):
"""
解压文件
:param zip_file:
:param dst_dir:
:return:
"""
if not zipfile.is_zipfile(zip_file):
return False, "It is not a zip file."
try:
with zipfile.ZipFile(zip_file, 'r') as f:
for fn in f.namelist():
# 解决中文乱码
name = fn.encode('cp437').decode('utf-8', "ignore")
if name.startswith("__MACOSX"):
continue
extracted_path = Path(f.extract(fn, dst_dir))
extracted_path.rename(os.path.join(dst_dir, name))
# 删除临时文件
for b in os.listdir(dst_dir):
current_dir = os.path.join(dst_dir, b)
if not os.path.isdir(current_dir):
continue
if len(os.listdir(current_dir)) == 0:
os.removedirs(current_dir)
except Exception as ex:
return False, ex
return True, "Unzip file successful."
def unzip_flat_file(zip_file, dst_dir):
"""
解压文件,平铺到dst_dir目录下
:param zip_file:
:param dst_dir:
:return:
"""
tmp_dir = dst_dir + "/tmp"
unzip_success, tip = unzip_file(zip_file, tmp_dir)
if not unzip_success:
return unzip_success, tip
try:
for root, dirs, files in os.walk(tmp_dir):
for f in files:
current_file = os.path.join(root, f)
target_file = os.path.join(dst_dir, f)
os.rename(current_file, target_file)
except Exception as ex:
print(ex)
if os.path.exists(tmp_dir):
shutil.rmtree(tmp_dir, ignore_errors=True)
return True, "Unzip file successful."
def unzip_flat_file_rename(zip_file, dst_dir):
"""
解压文件,重命名文件并平铺到dst_dir目录下(防止同名文件)
:param zip_file:
:param dst_dir:
:return:
"""
tmp_dir = dst_dir + "/tmp"
unzip_success, tip = unzip_file(zip_file, tmp_dir)
if not unzip_success:
return unzip_success, tip
try:
for root, dirs, files in os.walk(tmp_dir):
for f in files:
current_file = os.path.join(root, f)
target_file = os.path.join(dst_dir, f)
names = f.split(".")
if len(names) == 2:
name = "{0}_{1}.{2}".format(names[0], random.randint(1000, 9999), names[1])
target_file = os.path.join(dst_dir, name)
os.rename(current_file, target_file)
except Exception as ex:
print(ex)
if os.path.exists(tmp_dir):
shutil.rmtree(tmp_dir, ignore_errors=True)
return True, "Unzip file successful."
if __name__ == "__main__":
zip_file = "/Users/zhousf/Desktop/case/银行卡.zip"
# zip_file = "/Users/zhousf/Desktop/case/归档.zip"
dst_dir = "/Users/zhousf/Desktop/case/img"
print(unzip_flat_file_rename(zip_file, dst_dir)) | zhousf-lib | /zhousf-lib-0.9.9.tar.gz/zhousf-lib-0.9.9/zhousflib/util/zip_util.py | zip_util.py |
import re
def get_digit_char(string: str):
# 提取数字
return re.sub(u"([^\u0030-\u0039])", "", string)
def only_contain_letter_char(self, string: str):
"""
仅包含字母(大小写)
"""
return len(self.get_letter_char(string)) == len(string)
def get_letter_char(string: str):
# 提取大小写字母
return re.sub(u"([^\u0041-\u005a\u0061-\u007a])", "", string)
def get_digit_letter_char(string: str):
# 提取大小写字母、数字
return re.sub(u"([^\u0041-\u005a\u0061-\u007a\u0030-\u0039])", "", string)
def only_chinese(string: str):
"""
string都是中文
:param string:
:return: True都是中文 | 否
"""
match_chinese = re.sub(u"([^\u4e00-\u9fa5])", "", string)
return len(match_chinese) == len(string)
def normalize_cos_sign(string):
"""
cos符号标准化:将cos15°转成cos(radians(15))
cos15° -> cos(radians(15))
cos5 -> cos(radians(5))
cosa° -> cos(radians(a))
cosθ -> cos(radians(θ))
cos(a) -> cos(radians(a))
cos(15°) -> cos(radians(15))
cos(5) -> cos(radians(5))
:param string:
:return:
"""
# ori = string
items = re.split("[+-/*]", string)
if len(items) > 0:
for item in items:
res = re.findall("[Cc][Oo0][Ss]", item)
if len(res) > 0:
value = item.replace(res[0], "").replace("°", "").replace("。", "").replace("(", "").replace(")", "")
value = "(radians({0}))".format(value)
string = string.replace(item, "cos" + value)
# print(ori, "->", side_length)
return string
def normalize_multiple_sign(string, normalize_char="*"):
"""
乘号标准化:将x、X、×转成*
161.9+x2-nXSxd+nxS+2x+2x(a+b) -> 161.9+x2-n*S*d+n*S+2x+2*(a+b)
:param string:
:param normalize_char: 标准化的字符
:return:
"""
items = re.split("[+-/*]", string)
if len(items) > 0:
for item in items:
res = re.findall("^[^xX×].*[xX×]{1}.*[^xX×]$", item)
if len(res) > 0:
item = str(res[0]).replace("x", normalize_char).replace("X", normalize_char).replace("×", normalize_char)
string = string.replace(res[0], item)
return string | zhousf-lib | /zhousf-lib-0.9.9.tar.gz/zhousf-lib-0.9.9/zhousflib/util/re_util.py | re_util.py |
import time
from pathlib import Path
from prettytable import PrettyTable
class Logger(object):
def __init__(self, log_dir: Path = None, g=None):
"""
数据链
:param log_dir: 日志目录,默认空
:param g: flask.g 默认空
:param print: 打印日志
"""
self.log_dir = log_dir
day_time = time.strftime('%Y/%m/%d %H:%M:%S', time.localtime(time.time())) + '\n'
if g is not None:
if hasattr(g, "log"):
day_time = g.log
g.logger = self
# 日志信息-详细
self.log_txt = day_time
self.log(log_dir)
# 日志信息-仅标题
self.__log_txt_level = []
# 耗时
self.elapsed_time_dict = {}
def elapsed_time(self, k: str, start: float, end: float):
if k in self.elapsed_time_dict:
self.elapsed_time_dict[k] += abs(end - start)
else:
self.elapsed_time_dict[k] = abs(end - start)
def print_log(self):
print(self.log_txt)
if len(self.elapsed_time_dict) > 0:
table = PrettyTable()
table.field_names = self.elapsed_time_dict.keys()
table.add_row([self.elapsed_time_dict.get(i) for i in self.elapsed_time_dict.keys()])
print(table)
@property
def level_log_first(self):
txt = [title for (level, title) in self.__log_txt_level if level <= 1]
return "\n".join(txt)
@property
def level_log_second(self):
txt = [title for (level, title) in self.__log_txt_level if level <= 2]
return "\n".join(txt)
@property
def level_log_third(self):
txt = [title for (level, title) in self.__log_txt_level if level <= 3]
return "\n".join(txt)
def title_first(self, title):
title = "------------ {0} ------------".format(title)
self.log_txt = '{0}{1}\n'.format(self.log_txt, title)
self.__log_txt_level.append((1, title))
return self
def title_second(self, title):
title = "****** {0} ******".format(title)
self.log_txt = '{0}{1}\n'.format(self.log_txt, title)
self.__log_txt_level.append((2, title))
return self
def title_third(self, title):
title = "【 {0} 】".format(title)
self.log_txt = '{0}{1}\n'.format(self.log_txt, title)
self.__log_txt_level.append((3, title))
return self
def log(self, msg):
"""
记录日志
:param msg: 信息
:return:
"""
if msg is not None:
self.log_txt = '{0}{1}\n'.format(self.log_txt, msg)
return self
# noinspection PyBroadException
def save_log(self):
"""
保存日志文件
:return:
"""
if self.log_dir is None:
return
log_file = "{0}/log.txt".format(self.log_dir)
log_file = log_file.replace("\\", "/")
day_time = time.strftime('%Y/%m/%d %H:%M:%S', time.localtime(time.time())) + '\n'
self.log_txt = '{0}\n{1}'.format(self.log_txt, day_time)
try:
with open(log_file, "a+", encoding="utf-8") as f:
f.write(self.log_txt)
except Exception as e:
print(e)
pass | zhousf-lib | /zhousf-lib-0.9.9.tar.gz/zhousf-lib-0.9.9/zhousflib/web/logger.py | logger.py |
import os
import time
import codecs
import logging.handlers
from logging.handlers import BaseRotatingHandler
from flask.logging import default_handler
from flask import current_app
# noinspection PyPep8Naming
class MultiProcessSafeDailyRotatingFileHandler(BaseRotatingHandler):
"""Similar with `logging.TimedRotatingFileHandler`, while this one is
- Multi process safe
- Rotate at midnight only
- Utc not supported
日志handler:按每天分割
"""
def __init__(self, filename, encoding=None, delay=False, utc=False, **kwargs):
self.utc = utc
self.suffix = "%Y-%m-%d.txt"
self.baseFilename = filename
self.currentFileName = self._compute_fn()
BaseRotatingHandler.__init__(self, filename, 'a', encoding, delay)
def shouldRollover(self, record):
if self.currentFileName != self._compute_fn():
return True
return False
def doRollover(self):
if self.stream:
self.stream.close()
self.stream = None
self.currentFileName = self._compute_fn()
def _compute_fn(self):
return self.baseFilename + "-" + time.strftime(self.suffix, time.localtime())
def _open(self):
if self.encoding is None:
stream = open(self.currentFileName, self.mode)
else:
stream = codecs.open(self.currentFileName, self.mode, self.encoding)
# simulate file name structure of `logging.TimedRotatingFileHandler`
if os.path.exists(self.baseFilename):
try:
os.remove(self.baseFilename)
except OSError as e:
print(e)
try:
os.symlink(self.currentFileName, self.baseFilename)
except OSError as e:
print(e)
return stream
"""
日志级别
CRITICAL = 50
FATAL = CRITICAL
ERROR = 40
WARNING = 30
WARN = WARNING
INFO = 20
DEBUG = 10
NOTSET = 0
"""
def init_log(app, log_dir):
app.logger.removeHandler(default_handler)
if not os.path.exists(log_dir):
os.makedirs(log_dir)
log_file_str = os.path.join(log_dir, 'log')
time_handler = MultiProcessSafeDailyRotatingFileHandler(log_file_str, encoding='utf-8')
logging_format = logging.Formatter(
'=>%(asctime)s - [%(levelname)s]\n%(message)s')
# logging_format = logging.Formatter(
# "%(asctime)s-[%(threadName)s-%(thread)d]-%(levelname)s-[%(filename)s:%(lineno)d]\n%(message)s")
logging.basicConfig(level=logging.DEBUG)
time_handler.setFormatter(logging_format)
app.logger.addHandler(time_handler)
def error(msg, *args, **kwargs):
current_app.logger.error(msg, *args, **kwargs)
def warning(msg, *args, **kwargs):
current_app.logger.warning(msg, *args, **kwargs)
def info(msg, *args, **kwargs):
current_app.logger.info(msg, *args, **kwargs)
def debug(msg, *args, **kwargs):
current_app.logger.debug(msg, *args, **kwargs) | zhousf-lib | /zhousf-lib-0.9.9.tar.gz/zhousf-lib-0.9.9/zhousflib/web/log_util.py | log_util.py |
# 咒语:下载宇宙能量的通关密码
## 下载
### Docker
```
docker pull apachecn0/zhouyu-xiazai-yuzhou-nengliangde-tongguan-mima
docker run -tid -p <port>:80 apachecn0/zhouyu-xiazai-yuzhou-nengliangde-tongguan-mima
# 访问 http://localhost:{port} 查看文档
```
### PYPI
```
pip install zhouyu-xiazai-yuzhou-nengliangde-tongguan-mima
zhouyu-xiazai-yuzhou-nengliangde-tongguan-mima <port>
# 访问 http://localhost:{port} 查看文档
```
### NPM
```
npm install -g zhouyu-xiazai-yuzhou-nengliangde-tongguan-mima
zhouyu-xiazai-yuzhou-nengliangde-tongguan-mima <port>
# 访问 http://localhost:{port} 查看文档
``` | zhouyu-xiazai-yuzhou-nengliangde-tongguan-mima | /zhouyu_xiazai_yuzhou_nengliangde_tongguan_mima-2022.10.9.1-py3-none-any.whl/ZhouyuXiazaiYuzhouNengliangdeTongguanMima/README.md | README.md |
zhpy
====
zhpy is the python on Chinese, which is good for Taiwan and China beginners
to learn python in our native language.
Install: https://github.com/gasolin/zhpy/blob/wiki/DownloadInstall.md
Open the command line, enter the current source directory and type following
command:
$ python setup.py install
Basic Usage: https://github.com/gasolin/zhpy/blob/wiki/BasicUsage.md
To play zhpy you even don't need to install it.
All you need to do is follow the 3 steps guide:
1. Download the source pack
2. Extract the pack with zip tool
3. Run::
$ python interpreter.py
Then you got the usable zhpy interpreter!
| zhpy | /zhpy-1.7.4.zip/zhpy-1.7.4/README.txt | README.txt |
from base64 import urlsafe_b64decode, urlsafe_b64encode
from binascii import a2b_hex, b2a_hex
from pathlib import Path
import maya
class AttrDict(dict):
def __init__(self, *args, **kwargs):
super(AttrDict, self).__init__(*args, **kwargs)
self.__dict__ = self
def debug_log(base, fname, *args, **kwargs):
"""log data to file, used when debugging at server"""
ss = list(args)
ss.extend([f"{k}: {v}" for k, v in kwargs.items()])
ss.extend([f"{maya.now().datetime('Asia/Shanghai')}", "-" * 20, ""])
with Path(base).with_name(fname).open("a") as fp:
fp.write("\n".join(map(str, ss)))
def get_client_ip(request):
"""get the ip of the visitor"""
x_forwarded_for = request.META.get("HTTP_X_FORWARDED_FOR")
if x_forwarded_for:
ip = x_forwarded_for.split(",")[0]
else:
ip = request.META.get("REMOTE_ADDR")
return ip
def resolve_url(request, pk_xx, url_name="notify", app_name=None):
"""get absolute url as: https://www.host.com/wechats/notify/{pk_xx}/"""
from django.urls import reverse
app_name = app_name or request.resolver_match.app_name
url_name = f"{app_name}:{url_name}"
return request.build_absolute_uri(reverse(url_name, args=(pk_xx,)))
def be_star(s):
if not s:
return s
if len(s) > 200:
head, tail = 50, -45
length = 6
else:
head, tail = 3, -4
length = len(s) - head + tail
return f'{s[:head]}{"*"*length}{s[tail:]}'
def star_attr(obj, *args):
for attr in args:
setattr(obj, attr, be_star(getattr(obj, attr)))
return obj
def is_alipay(request):
"""check whether the browser if alipay"""
return "AlipayClient/" in request.META.get("HTTP_USER_AGENT", "")
def is_wechat(request):
"""check whether the browser is wechat"""
return "MicroMessenger/" in request.META.get("HTTP_USER_AGENT", "")
def is_qq(request):
"""check whether the browser is qq"""
return " QQ/" in request.META.get("HTTP_USER_AGENT", "")
def get_scan_type(request):
"""tell the scan tool"""
if is_wechat(request) or is_qq(request):
return "wechat"
if is_alipay(request):
return "alipay"
return "unknown"
def pk_encrypted(pk):
"""19 -> 4d546b3d"""
return b2a_hex(urlsafe_b64encode(str(pk).encode())).decode()
def decrypt_pk(s):
"""4d546b3d -> 19"""
return urlsafe_b64decode(a2b_hex(s)).decode()
# -- Deprecated --
# from django.conf import settings
# from django.http import Http404
# class RobotFilter:
# """对于请求过于频繁的ip,直接返回404"""
#
# cishu = 1000
# expire = 40 * 60
#
# @classmethod
# def check(cls, ip):
# red = redis.StrictRedis(settings.REDIS_SERVER)
# if red.get(ip) and int(red.get(ip)) >= cls.cishu:
# # raise Http404(_("非法请求次数过多,页面让外星人劫持了"))
# raise Http404
#
# @classmethod
# def incr(cls, ip):
# red = redis.StrictRedis(settings.REDIS_SERVER)
# if red.get(ip):
# red.incr(ip)
# else:
# red.set(ip, 1, cls.expire) | zhsq_toolbox | /zhsq_toolbox-0.0.3.tar.gz/zhsq_toolbox-0.0.3/toolbox/utils.py | utils.py |
__version__ = "0.1.0"
__author__ = "Charles Simpson"
# TODO use dict lookups instead of searching through the codebook string
from io import TextIOWrapper
from zipfile import ZipFile
def to_telecode(unicode, encoding=None):
"""Encode unicode string to telecode.
>>> to_telecode("中文信息")
['0022', '2429', '0207', '1873']
Because of differences between simplified and traditional characters, two
different unicode strings may encode to the same telecode:
>>> to_telecode("萧爱国")
['5618', '1947', '0948']
>>> to_telecode("蕭愛國")
['5618', '1947', '0948']
There are 419 characters where the mainland Taiwanese codebooks have
diverged: the same character maps to two different codes depending on the
codebook. For example, "仉" (U+4EC9) maps to '8022' in the mainland
codebook, but '0097' in the Taiwan codebook. In these cases, `encoding`
must be specified as 'mainland' or 'taiwan' to disambiguate. Failure to
specify an encoding will result in a LookupError.
>>> to_telecode("仉")
Traceback (most recent call last):
...
LookupError: ambiguous encoding for "仉" (U+4EC9)
>>> to_telecode("仉", encoding="mainland")
['8022']
>>> to_telecode("仉", encoding="taiwan")
['0097']
`encoding` may also be given to explicitly choose a codebook, even when
there is no ambiguity. Failure to find the character in the chosen codebook
will result in a LookupError.
>>> to_telecode("萧爱国", encoding="mainland")
['5618', '1947', '0948']
>>> to_telecode("蕭愛國", encoding="mainland")
Traceback (most recent call last):
...
LookupError: undefined encoding for "蕭" (U+856D)
There are two cases: '塵' (U+5875) and '萼' (U+843C) where the database
gives two different Taiwan telegraph codes (there are no such cases for
mainland).
In the case of '塵', there is no corresponding mainland code;
only two Taiwan codes ('1057' and '7775'). The mainland code '1057' instead
corresponds to '尘' (U+5C18) and '7775' corresponds to '铈' (U+94C8).
Somewhat arbitrarily use '1057'.
>>> to_telecode("塵", encoding="taiwan")
['1057']
In the case of '萼', mainland and one of the Taiwan codes is '5501', but
Taiwan also has a secondary code '9795'. Since both encodings accept
'5501', choose it when encoding '萼'.
>>> to_telecode("萼", encoding="taiwan")
['5501']
"""
if not encoding:
codes = __to_telecode_combined(unicode)
elif encoding.lower() == "mainland":
codes = __to_telecode_single(unicode, _MAINLAND_CODEBOOK)
elif encoding.lower() == "taiwan":
codes = __to_telecode_single(unicode, _TAIWAN_CODEBOOK)
else:
raise NotImplementedError('undefined encoding type "encoding"')
return [str(c).zfill(4) for c in codes]
def __to_telecode_single(unicode, codebook):
"""Encode unicode to telecode using either mainland or Taiwan codebooks.
"""
codes = []
for char in unicode:
try:
codes.append(codebook.index(char))
except ValueError as err:
raise LookupError(f'undefined encoding for "{char}" (U+{(ord(char)):04X})') from err
return codes
def __to_telecode_combined(unicode):
"""Encode unicode to telecode using both mainland and Taiwan codebooks.
"""
codes = []
for char in unicode:
try:
cnindex = _MAINLAND_CODEBOOK.index(char)
except ValueError:
cnindex = 0
try:
twindex = _TAIWAN_CODEBOOK.index(char)
except ValueError:
twindex = 0
if cnindex and twindex:
if cnindex == twindex:
codes.append(cnindex)
else:
raise LookupError(f'ambiguous encoding for "{char}" (U+{(ord(char)):04X})')
elif cnindex:
codes.append(cnindex)
elif twindex:
codes.append(twindex)
else:
raise LookupError(f'undefined encoding for "{char}" (U+{(ord(char)):04X})')
return codes
def to_unicode(telecode, encoding=None):
"""Decode telecode to unicode.
>>> to_unicode(['0022', '2429', '0207', '1873'])
'中文信息'
Because of differences between simplified and traditional characters, the
same telecode may decode to two completely different unicode strings.
`encoding` should be used to specify whether the 'mainland' or 'taiwan'
codebook should be used. If not specified, and there is ambiguity in the
decoding, a LookupError is raised.
>>> to_unicode(['5618', '1947', '0948'], encoding="mainland")
'萧爱国'
>>> to_unicode(['5618', '1947', '0948'], encoding="taiwan")
'蕭愛國'
>>> to_unicode(['5618', '1947', '0948'])
Traceback (most recent call last):
...
LookupError: ambiguous decoding for "5618"
There are two cases: '塵' (U+5875) and '萼' (U+843C) where the database
gives two different Taiwan telegraph codes (there are no such cases for
mainland).
In the case of '塵', there is no corresponding mainland code;
only two Taiwan codes ('1057' and '7775'). Mainland uses those codes for
two completely different characters.
>>> to_unicode(['1057', '7775'], encoding="taiwan")
'塵塵'
>>> to_unicode(['1057', '7775'], encoding="mainland")
'尘铈'
>>> to_unicode(['1057'])
Traceback (most recent call last):
...
LookupError: ambiguous decoding for "1057"
>>> to_unicode(['7775'])
Traceback (most recent call last):
...
LookupError: ambiguous decoding for "7775"
In the case of '萼', mainland and one of the Taiwan codes is '5501', but
Taiwan uses a secondary code '9795'. Since both encodings accept '5501' and
'9795' is unique to the Taiwan codebook, both can be decoded without an
explicit encoding.
>>> to_unicode(['5501', '9795'])
'萼萼'
>>> to_unicode(['5501', '9795'], encoding="taiwan")
'萼萼'
>>> to_unicode(['5501'], encoding="mainland")
'萼'
"""
if not encoding:
chars = __to_unicode_combined(telecode)
elif encoding.lower() == "mainland":
chars = __to_unicode_single(telecode, _MAINLAND_CODEBOOK)
elif encoding.lower() == "taiwan":
chars = __to_unicode_single(telecode, _TAIWAN_CODEBOOK)
else:
raise NotImplementedError('undefined encoding type "encoding"')
return "".join(chars)
def __to_unicode_single(telecode, codebook):
"""Decode telecode to unicode using either mainland or Taiwan codebooks.
"""
chars = []
for code in telecode:
char = codebook[int(code)]
if char.isspace():
raise LookupError(f'undefined decoding for "{code}"')
chars.append(char)
return chars
def __to_unicode_combined(telecode):
"""Decode telecode to unicode using both mainland and Taiwan codebooks.
"""
chars = []
for code in telecode:
cnchar = _MAINLAND_CODEBOOK[int(code)]
twchar = _TAIWAN_CODEBOOK[int(code)]
if cnchar == twchar:
if cnchar.isspace() and twchar.isspace():
raise LookupError(f'undefined decoding for "{code}"')
chars.append(cnchar)
elif cnchar.isspace():
chars.append(twchar)
elif twchar.isspace():
chars.append(cnchar)
else:
raise LookupError(f'ambiguous decoding for "{code}"')
return chars
def load_unihan_data(file):
"""Load codebook from "Other Mappings" Unihan database dump.
`file` must be a file-like object of the tab-delimited dump.
Returns a dict of Unicode ordinals (integers) and a pair of mainland and
Taiwan four-digit telegraph code.
There are two cases: '塵' (U+5875) and '萼' (U+843C) where the database
gives two different Taiwan telegraph codes (there are no such cases for
mainland). In these cases, the Taiwan code is itself a pair.
See <https://www.unicode.org/reports/tr38/> and
<https://unicode.org/reports/tr44/> for descriptions of the dump.
"""
codebook = {}
for line in file:
stline = line.strip()
if not stline or line.startswith("#"):
continue
codepoint, field, value = stline.split(sep="\t", maxsplit=2)
if field in ("kMainlandTelegraph", "kTaiwanTelegraph"):
char = int(codepoint.split("+")[1], base=16)
cncode, twcode = codebook.get(char, ('', ''))
if field == "kMainlandTelegraph":
codes = (value, twcode)
elif field == "kTaiwanTelegraph":
if " " in value:
codes = (cncode, value.split())
else:
codes = (cncode, value)
codebook[char] = codes
return codebook
def load_unihan_zip(file):
"""Load codebook from Unihan database dump zip.
Loads codebook from "Other Mappings" from the Unihan zip file downloaded
from at <https://www.unicode.org/Public/UNIDATA/Unihan.zip>.
`file` should be a filename or file-like object. If a file-like object, it
must be seekable.
"""
with ZipFile(file) as zipf:
with zipf.open("Unihan_OtherMappings.txt", mode="r") as txt:
return load_unihan_data(TextIOWrapper(txt, encoding="utf-8"))
def _generate_codebooks_strings(unihan):
"""Generate codebook strings from Unihan database dump.
Codebook strings are generated so the NNNN character in the string
corresponds to the telecode NNNN.
"""
cnbook = ["\u3000"]*10000
twbook = ["\u3000"]*10000
for codepoint, (cncode, twcode) in unihan.items():
if cncode:
cnbook[int(cncode)] = chr(codepoint)
if twcode:
if isinstance(twcode, str):
twbook[int(twcode)] = chr(codepoint)
else:
for code in twcode:
twbook[int(code)] = chr(codepoint)
return "".join(cnbook), "".join(twbook)
_MAINLAND_CODEBOOK = """
一丁七丈三上下不丐丑且丕世丙丞丢並丏丨个丫中丰 串锕丶锿丸丹主砹丿乂乃久之乍乎
乏乖乘桉乙乜九乞也 乳乾乱凼亅了予事苯二于云互五井 些亚亟吡亠亡亢交亥亦亨享京
亭亮亳亶 醭人什仁仃仄仆仇今介仍佣佬仔仕他仗付仙仝仞仡仟代令以仰仲仳仵件价任份仿
企伉伊伋伍伎伏伐休伙伯估你伴伶伸伺伻似倮佃但 位低住佐佑 何佗余佘佚佛作佞佟佺
佩伢佯佳 佶佝佻佾使侃来侈例侍侏侑侔仑侗供依 伕佰侮侯侵侣便 促俄俊俎俏俐俑俗俘
俚佡保俟侠俞俫信修俶伥働倌倬俯俱俳俵俸俺俾仓 倍俩倏们倒 倘候倚倜 借倡 值倥倦
倨倩倪伦倭偈偎傻 偃假伟偏偕做停健 侧侦偶仫偷伧傀傅傍傑傒傈伞备 催 傲传伛
债伤倾偻仅 佥僳像 僦偾 侨 僖僚伪侥僧僭僮 僵 僻 仪侬亿儆侩俭傤 僰儋儇傧
儒俦侪 偿优储 俪傥俨 儿兀允元兄充兆先光 克兑免兔 兕兖兜兢 入内全两 钚
八公六兮共兵其具典兼冀糍冂冉册再冏冇 冒冕镩冖冗 冠冢冤冥幂耖冫冬冰冲冱冶冷冮冽
况凄准净凉冼凋凌冻 凑 凛凝决几凡凭凯凳凰茌凵凶凸凹 出函菪刀刁刃分切刈刊刎刑划
刖列刨初删判别 利剁刮到刲刳 制刷券刹刺 剃刭则 削刻前剜剌剡剔剖刬刚剥剩剪剐副
割剀创 剽剿 剧劈刘刽 剑剂劓劘 剅力功加劣助努劫劬劭効劻劾劼劲勃勐勇勉勍勒
动勖勘务胜劳 募势 勤 勚勰勳劢励勷劝锝勹勺匀勿 包匈匊匍匏匐勾匕化北匙碲匚匜匝
匠匡匣匪汇匮匦 啶匸匹匾匿区铥十千廿卅升午卉半卑卒卓协南博 卜卞占卡卣卦氡卩
卯印危 却卵卷卸卺 卮即 卿胨厂厄 厘厍厚厝原厕厥 厌厮厉厦胴厶 去叁参
塅又叉及友反叔取受叙叛叟 丛叠口古句另叨叩只叫召叭叮叱可台史右叵叶司吁吃各合吉
吊同名后吏吐向吓 君吝吞吟吠否吩咐吆含吭吮呈吴吵呐吸吹告吻吼吾呀吕呃呆咂呢呦呋周
咒吧呱 味呵呶呷呻呼命咀咄咆咕和咎咏咋咖哎咤响咪咧咨咫咬咯咱咳 咸 咽哀品哂哄啊
哇哈哉哪咿员哥唔哦哩哭哮哲哺哼哽哿唁唆 唉唐唇哨啦唏售唯唱唳唾啁 啄商问启啖啥啜
哑唬啤啡啻啼喁喀喂喃善喆喇喈喉喊喏 喋喑喘喙唤喜喝唧喧喻丧乔单啾喹嗅吗啬嗑嗓嗔呜
嗪嗜嗟嗣嗤喔喟哟 嗡呛嗉嗒喽 叹嘈嘉嘏嘛唛 啧尝嘘噻嚎嘎呕嗷嗽嗾嘬哗哔叽啸嘲嘴哓
嘶嗥嘹嘻嘿 噌噍噎噢 噤器噩噪噫噬嗳哙喷噶哝哕吨噱咛嚅 嚏啮嚚 冁 严咙嘤
嚼啭嗫嚣嚷 囊呓嘱 叻囗回囚四囱因囤困囫囷囹固囿圃圄圈圉国 囵圐圙围园圆图团圜
土在圩圬圭圮地圻址圾坟均坊坍坎坏坐坑坂坌坡坤坦坭坩坷坼垂坪坫坰坳型垓坬垠垢垣垃坯
垛坨埃埋城埒垮埏域埠垧埭垡执培基堂坚堆垩埤埴埵埸堃堙堞堡堠堤堪尧报埔场堵埝堰垾塍
块茔塌塑塔墓塚 塘塞填坞垲埙埘堼坝尘堑 塾塂墀墁境墅墉垫墄埫 坠 增墟墨墩堕
塄塆墈垦壁壅坛 壑压壕 垒圹 垄垆 壤 圳士壬壮壹壶 寿壸椴夂夆簖夊夏砘 跺夕
外夙多夜够梦夤夥婀大夭天太夫夬央失夯夷夸夹奄奇奈奉奎奏奂契奔奕奓套奚锇奘奠 奢奥
奁夺奖奭奋奀女奴奶奸好妁如妃妄妊妒妍妓妖妗妙妆妣妤妥妨妯妹妻妾姆姊始姗姐姑姒姓妲
妮委姚姜姝 姣 姥姨 侄姬 姻娃姿威娉姮 娌娑娘娱娜娟娠娣娥娩娓娼姹娶娄婆婉婊
婕婚婢 妇婪娅她妳婼婷婺媒媚媛 娲 媖媳媵媸媪妈媾 嫁嫂嫄嫉 嫌媲 嫖妪嫠嫡
嫣嫦嫩嫪嫘嫚嫜嫫 妫娆嬉婵娇媭嬖嬗嫱嬛嬴嫔嬷 嬲 婴婶孀 娈妞子孑孔孕孓字存孚孛
孜孝孟季孤孥孩孙孰孱孳学孺 孪孵宀宁它宄 宅宇守安宋完宏宓宕宗官宙定宛宜客宣室宥
宦宬宫宰害宴宵家宸容 宿 寂 寄寅密寇 富寐寒寓 寞察寡寝寤寥实寨 审
写宽寮寰宠宝宧寸寺封射尅将专尉尊寻对导蒽小少尔尖尚 尕尢尤 尪就尰尴尬铒尸尹尺
尻尼尾尿局屁居届屈屉屋 屎屐屑展屏屝屠屡屣层履 屦 属 砩屮屯岳山屹屺峁岌岐岑岔
峿冈岢岈岩岫岱岵岷岸岣峒峙峋峨峭峰岛峻峡峓岘峪崃崇崎岽崑崔崖崙岗崛峥崩崚崧崦嵇崆
嵋嵫嵊嵌峧嵎岚崀崅嵩嵝嵬嵯崌嶂崭岖嶒嵚嵘峤崤峄嶙嶷岭屿崾巉嵛巍峦嶅巅巖崂巛川州巡
巢镄工左巧巨巫差酚己巳已巴 巷巽钆巾币市布帆帋帑帕希帖帗帘帙帚帛帔帝帡 帅师席帨
帐带帷常帽帏幄幅帧 幌幔幕帼帻帜幡幢 幪帮帱 幛干平年并幸 矸幺幻幼幽 酐广
庀庹庇床庋序底庖店庚府庠庥度座库庭 庵庶康庸庾 厢 廑廈廉廊廋厩廓廖厨 廛
庙 庑废 廨廪庐 厅庞锆廴延廷 建 廾 弁弄 弈弊茛弋弍式弑瀔弓 引弗弛弟弢
弦弧弩 弭弱 张 强 弼彀弹 弥弯弘彐彖彗彘 彝彟臌彡形彤彦彧彩彪 彬彭彰影彨
铪彳彷役彼 往征徂待徇很徉徊律 徐径徒得徘徙徜从徕御 徨复循徬徭微徯徵德彻徼徽焓
心必忉忌忍忒忖志忘忐忑忙忝忠忡恬快忭忮念忱忸忤忻忽忿怍怎怏怒怕怖怗怙怛思怠怡急怦
性怨怩怪怫怯 怵 您怔恁恂恃恒惦 恍恐恕恙恚恝恢恣恤 恧恨恩恪恫恭息恰悃 悄悦悌
悍悒悔悖悚悉悛悝悮悟悠患 悱 悲悴怅闷悸悻悼 情惆惇 惑惓惕惘惙惚 惜 惟
惠恶悰惋惰恼恽想惴惶 惹惺恻愀愁愆愈愉 愍愎意 愔愕愚爱惬感愠愧悫愫 怆恺慎忾愿
恿 慈慊态慌 慓慕惨 慝惭恸慢惯 慧慨怂虑慰悭 慵庆慷 忧 惫怜 憔惮憎
憝愤憧憨憩憬悯怃 宪 忆 憾 懂恳懈应懊懋怿懔 愦 怼懑 懦惩懿懵懒怀悬忏
惧 慑恋恹戆 戈戊戌戍戎成我戒戋戕或戚戛戟戢戥戡 戬截戳戮战戏戴戤户戾房所戽扁扃
扆扇扈扉 槲手才扎扑扒打扔托扛扠拤 扣扭扮扯 扳扶批抵扼找承技 抄 抉把抑抒抓
投抖抗折抨披抬抱抿抹押抽拂拄拆拇担拈拉拊抛拌拍 拐拎拒拓拔 拖拗拘拙拚招拜捂拮
拭括拱拯拳拴拷拽拾拿持挂指挈按挎挑挖拼挨挪挫振捅挹挺挼挽挟 捆捉捋捌捍掂捐挣捕挲
捎捧 捩扪捺捭据 捱 捶捷揍捻 掀扫掇授掉掊掌掏掎掐排掖掘 掠 探掣接控推掩措掬
掯抡 拣揄揆揉描捏提插揖扬换 揠搽握揣揩揪揭挥 援 掾搞 搧 损搏搒搓搔
摇捣搜搠 搦 搪搬搭搴摔抢搐 掺撂摘搂撑摧摩摭挚抠抟摸摹摺掴摒 撅 撩撇捞
撒挠撕撙 撞撤拨抚播撮撰 撬掼攉挞撼挝拥擂掳擅择击挡操擎拧擒 擗擘 撷搀挤 擢
擦拟摈搁撵擭掷扩摆擞扰攀摅拦撄攘携摄拢攒挛摊搅攫揽 拃支 塃攴收 攸改攻放政故
效敉 教敏救敕敖败 敝敞敢散敦夐敬敲整敌敷数 敛毙敩 文斌斐斑斓蟥斗料斛斜斝
斟斡斢斤斥斧斨斩斯 新断 方於施 旎旁旂旃旄旅旆 旋旌族旖旒 旗烩无既
日旦旨早旬旭旰旱晒旺昌昂昆昃明昏易昔晗昉昕昝星映春昧昨昭是昱昴昵昶晁时晃晋晌晏晚
晢昼晞晡晤晨晦暎普景晰晳晴晶晷智晬暄暇暍暑暖暗旸暝畅晕晖暂旻暮 暴 暹暨晓暾
昙 曙 曛曜曝 旷曦昽曩 昇曰曲曳更曷书曹曼曾替最会 月有朋服朔朐朓 朕朗望
朝期 朦胧耠木未末本札术朱朴朵朽杆杈杷杉杌李杏机材村杓杖柈杜杞束梠栓杪杭柿杯杰东
杲杳朿杵棵杼松板枉析枕林枙枚果枝 枇枋 枘枯 枳枵架枷枸 柁柄柏某柑柒染柔柘柙
柚柜柝柞 柢查柬柯柮柰柱柳柴栅枰 樋栗校栩株核 根槾格栽桀桁桂桃桅框案桌桎桐桑桓
桔桕栖栲栳桄桫桴 桶桷 梃梁梅梆梏梓栀梗 条枭梢梧梨梭梯梱械棁梳梵 桯弃棉棋
棍棒棕枨枣棘棚栋棠棣栈棫棬森棰楦 棹棺椁棻棼 椅植椎椒棐棓棱棨椐椓 椰椹椽椿 杨
枫楔栉 楚楞楝楠榆楢楣楫业楯楮极楷楹 槠桢楩楸榔榕榖榘榛榜 榧榨榫榭荣榱榴
榻榾槁槅槊构槌枪槎槐 榷榼樑 椠 概桨槔槽槿桩乐樊楼橦标 枢樟模样槥枞
樗 檩 樵树樽樾橄桡桥橐橙橛 橡 横樨柠椭桦 檖橘檀柽檄檐档桧槚 檠检樯檬梼 槟
槛 橹榈 椟橼栎 枥榇棂栊 栏樱权 栾榄 柩欠次欣 欲款欷欹欺钦 歃歆歇 歉
歌 欧歔 歙 欤 欢 止正此步武歪歧岁历归钬歹死殁 殂殃殄殆殉殊殍殖残殕殛殒殇
殚 殪殓殡歼镓殳段殷杀 殿毁毅殴椵母毋每毒毓 比毖毗 鲣毛毡 毫 毯毳牦氅 毹
氍 氏氐民氓氯气氛 氤氲浠水 永氾汁汀求 汗污汛汜汝江池汊汐汔汕汞汨汪汰汲汴
汶汹 汾沁沂汽沃沈沉沌 沐没沔沖 沙沚沛沆沓汩汭沅 沫沮沱河沸油治沼沽沾沿洺泄泵
泅泔泆泉泊泌泓法泗洩 泛泠泡波泣泥注泫泯泮泰泱 泳沭 泐泖 洄浐洋浕洎洒洗洛浉洞
津洧汧洪洫洮洲洱洳洵浈洸活洼洽派洨流洙洚洹浙浚浣浦浩浪浮浴海浸浃浬泾消涉淳涓涔涕
涞涢浡浜浥涂涅湟涑涯液涵涸 涿淅淆淇淋淑渊淘淙泪淝淡淤淦浄沦淫淬淮浛深菏混清淹浅
添涎涪涴淀淄涠淞浯涭滧涣渚減渝渠渡渣渤渥涡测渭港 渴游渺浑湃湄 湍湎湖湘湛 湧
湫湮汤湲 濉 湓湔湜 源準溵溜沟溟溠溢 溥溧 溪温溯溱溲溶溷溺 溽滁滂滃沧灭
滋涤荥滑滓滔滕 溘滇滘 滞渗 滴沪浒洑滚满渔漂漆漏溉漓演 漠汉涟漪漫渍 淌漱
涨漳渐漾浆 滹 漉溎沤漦漩漯溆漶溇漕颍泼洁潘潜潟潢涧润潦潭潮溃潼潴潸潺涩澄浇涝
澈澔澌澎潖澛潞 浔潵澍澂 沩澡 泽浍澳澹激浊濂浓渑 滪澧 澴 澶澼泞
湿 济濠濡涛滥濬濮濯潍滨泻 濞溅泺滤渎浏瀑 濒 瀚瀛沥潇 濑泸 瀣瀼 瀍泷
潋澜 溁瀹 灌沣 滠 滩灏湾滟灞滦炸火灰灶灸灿灼 灾炊炎炒炕炙炘炞炫炬炭炮炯
炱炳炷 畑烈 乌 烙烘烜烝烟烹烺烽焌焙焚焜 焦焰然 焠焉煅焊炼煊煌煎煮炜熙 煜煞
茕煤焕煦照烦煨 炀煽 熄 熊熏荧 熟熔熨熬 热熠颎 熹炽 烫燃 灯燎烧
燔燕营燠燥 燧 烛 燮燹烬燿焘爆 烁 炉烂 爝爟焮炖焖爨烤爪争 爬爰为爵睑父爹爷
爸爻爽 趼爿 牂牁 藠片版 牌牒 牖牍 牙肼牛牝牟牡牢牣牧 物 牯牲犋牷特牵 犀
犁 犍犒荦犊牺 犬犯犴状狂狃狄狁狎狐狗狙狡 狠狩狴狸狭狼狈狻狺狷狢猁猖猗狰猛猜
猝猞猊 猢猥猩猪猫猴犹猷猔猱猺狲猾猿 狱狮獍獐獒獗 独狯獬獠狝获獯 犷猎兽獭献
狞猡玄率 锔玉王 玕玔玖玟玠玢玦珏玩玫玲玳玷珂 珈珉玻珀珊珍 珞 珠珙珖珥
珣珩珪班珧珮珽现球琅理琇琉珶 琊琕 琚琛琢琤琥琦琨琪 琮琯琳琴琶琵 琬 琲玮瑕瑙
瑚瑛瑜瑞瑟 瑁 瑗琐瑶莹玛瑰玱 瑱 璀璃璋 瑾 璆 琎璇璐璜玑璘璞璟璠
璧璨 环 珰玺璲瑸玙 珑璎 瓒 瓜瓞瓠瓢瓣瓤 瓦瓮瓴瓶瓷瓻甃甄 瓿瓯甍
甑甓 瓩甘甚甜 咔生甡产甥甦 用甪甫甬甯甩田由甲申男甸 甾畀畋界畎畏町
畔畚畛畜亩留 毕畤略畦番画畬畯 榃畹 当畸畿疆畴 锎疋 疏 疑钪疒疔痃痉疚疝
痠疣疤疥疫痄疲疳疴疵疸疹疼疽疾 病症痂痊疰痍痒痔痕痧痘痛痞痣 痢痱痰痳痴痹痼 痿
瘀瘁 疯疡 瘝瘐痪瘕 瘗瘟瘠疮瘢瘤瘥瘦疟 瘸瘰瘳瘴瘵瘘瘼疗癃痨痫癌癖 疠瘩
癜疖 瘪瘿疬癞癣瘾癯痈瘫癫疙癶癸登发钶白百皂的皆皇皈皋皎皓皖 皑皤皞
氪皮皴 皲皱 醌皿盂 盅盆盈益 盍盎盒盔盛盗盝盏盟 尽监盘盥卢盦 盪盬蛞目盲
直相盼盾省眄眇眈眉盯看盻眨盱睬眛真眠瞄 眦眩眙眯眵 眶眷眸眺眼 着 睨睇 瞅 睚
睛睁睐睾睡睢督睦睫瞌 睥睹睿瞀 瞋 瞎瞑瞒瞠瞪瞥瞧瞬瞳 瞭瞰瞆瞻瞽瞿 矍眬矗
瞩眭矛矜矞砬矢矣知矧矩短矮 矫 镴石 矼砆矻砂砉砌砍砑砒砭砟砸砥斫砦破 砜砧砼
砰 研硎硁硝硖砗硫硬碘砚确碍碌碎硼碗 碇碔碑碓 碡 碉砀碟碣碧硕磜 碰 码碾磁磅
磊礅磋磐磙磔磕磉硙碴硵砖碛磨磬矶磲磺硗礁磷 础 矾礳磴 矿砺砾砻礴碱示礽社祀祁
祆祇祈祉 祊 祐祓 祖 祚祛祜祝神祟祠 祲祥祧票祭 祺禄禀禁 祸祯福禘禊
祎祃 禠 禧禅 礼祢祷禳 铼禸禹禺离禽镧禾秃秀私 秉秆秋种科秕秒 秘 租 秣秤
秦秧秩秫秭 秸移稀稂税 程 稍 稔稗稞稚 稠 稹 称稷稻稼稽稿 穄穆 稣
积颖穗 穑 秽秾 稳 穞穰罱穴 究穸穹突空 穿窀窄窅窆窈 窒窕窖 窗窘窟窠窨窝
窬穷窑窳窎窭窸窣窥窾 窜窍窦 窃蒗立竑站 竟章竣童竦竖竭端竞铑竹竺竽竿笄笈
笊笏笑笙笛笞笤笠笥符笨笋笪笫第笮笆 笱笳筅笔筇等筊筋筌 筎筏筐筑筒 答策筠 筷
笕 筮筲 筵 筱笺 箍箇箔箕算 箝 管筝箐 箜 箬箭 箱箴箸 节篁 篆篇
箧 篙篚篝筿篡笃 筛篦篪篲筚 篷箦簕簇簋篓簌簏篌篾簃 簟箪簠简篑 簦簧
簪箫 簸 签 簿 篮 筹籍 籐 籀箓 篯箨籁笼 笾 箩篱 篰米
粉粒粕粗粘粟粢粤粥籼 粱粲粹粳 粼精糈糊 糕糇糖糗糜糁粪糟糠籽糙 糯粝籴粑 粜
糸系纠纪纣约红纡纥纨纫紊纹 缐 纴 纳纽纾纯纱纮纸级纷纭素纺索紫缏 累细缩
绅绍绀绋 绐绌终 组绊绂 结绝 绔绞络绚给绒 絮绖统丝绛 绢 绑绨绣绠
绥 经绡综绿绸綦线绶维綮绾 纲绷缀 纶绺绮绽绰 绫缁紧绯绻 绪缄缉绵缎缔缘
缌 编缓缅纬练 缊缃 缗 缈 缇萦缙缢缒绉缞缚缜缛县缣繁缟 缝纵缧 缦絷缕缥縻
总绩 缫缪繇缡 织缮 缭 绕 缯绳绘 茧缰缳缴绎辫 继 纂缤缱 颣缬缵续
纤缠纩 缨 纛 缆绦缶缸缺 罄罅 罂罍 罐 网罔罕罘 罟罡 罩罪 置
罚罨署 骂罢罹 罽 罗罴 羁鳓羊羌美羑羔 羚羝羞 群羡义羯羲羧 羸羹羼羽翀
翁翅翊翌翎习翔翕 翟翠翡 翥翦翩羿翚翮 翰翳翘翱翻翼 耀喱老考耄者耆 耋 而
耍 耐 唡耒耔耕耗耘耙耜耢 耩耦耨耧耰钌耳耶耻耽耿聃聆聊聒圣聘聚闻联聪声耸聩聂
职 聋啉聿肄肃肆肇呤馏肉肋肌肓肖肘肚肛肝股肢肥胺肩肫肯肱育肴朊肺胃胄背胂胔胍胎
胖胙胚胳胝胞胰脒胭胯胡胥胸胼能胾脂腺胁 胱 脊脚脘胫脆腈 脧脩膨脱脬脯胀脾腆腊腋
肾腐腑腓腔腕腥脑腴肿腮脿腰腱肠腹腠腿膀膂腩膏肷 膈膊腘肤膛膜膝胶腻 膳膺 膻脍
脓臀臂 臆脸臊脐 臃胪 脏脔胤臣卧臧临栌自臬臭 镥至致 臻氇臼臾舁舀舂舄舅
与兴举旧 摞舌舍舐舒舘舖舛舜舞 镅舟 舡航般舫 舷舵舶船舸 舳舴艇艋艄 艘舱
舣艟艨 舰舻酶艮良艰钔色 艳 艸艽艾芃芄芊芋芍芎芑芒芙芝芟芡艿芤芧芨芥芩芪芫芬芭
芮芯芰花芳芷芸芹刍芼芽莜芾苑茆苒苓苔苕苗苄苛苜苞苟苣苡若苦苎苫英苴苹苻 苾茀茁茂
范茄茈茅茇 茉茗荔茜 茨茫茭茯茱兹茴茵茶 茸茹荀荃萄荆荸荇草 荏荐荑荒茝荄
荷荻荼荽莅 莎莒莓茎莘莞莠荚苋莆莨莩莪莫菟莉莽菀菁菅菇 菊菑 菖菔菘菠菜菩堇华菰
菱菲 莙菹菼菽萁萃萜莱萋萌萍萎龿菡菌萩 苌萼 萱莴萸萹落葆 菸著 葚葛葡董苇葩葫
葬葭药葱葳葵荤葸葺 蒂葑 莳蒙蒜蒡蒞蒟蒨 蒲蒸蒹蒺苍蒿荪蓁蓂蓄蓆蓉蓊盖蓖蓍蓏
蓑 蓐蓓萏蔌蒯 蓬莲蓰莼 蔻蔫芗蓼 蓿 蔑蕻蔓 蔗蔚蒌 蔡蒋荫蔴 茑 荜蔽
蔬蕺 蕃蕉蕊荞莸蕖荛蕙 蕞 蒉蕨荡芜萧蕰薯蒇蕈 蕤 蓣 蕾 薢薄薅薇荟 蓟 蔷
薛孽薜 薨萨薪薤蘖薏薰薳薹荠 藉荩藏藐蓝藁藕 藜艺 藤藩 薮 蔼蔺 藻
藿蕲蘅芦苏蕴 夔 萚藓蔹茏蘩兰 蘧蘸萝蘼 蘑虍虎虐 虔处虚虏虞号 虢 亏
醚虫虱虻虹虺蚓蚍蚕蚊蚋蚌蚣蚤蚧蚩蚪蚨蚯蚰蚱蚜蚶 蛄蛀蛆蛇蛉蛑蛋蛙蛛蛟蚴蛤蛩蛱蛭
蛾蛘蜂蜃蜇蜈 蜓蛸蛹蜕蜀蜉蜍蜊 蜘蜚蜜蜡蜣蜩蜥蜒蛔蜴蜞 螂蜢蜮蝎蛐蚀蝗蝘蝙蜅猬蝠
蜿蝥虾蝮 蝶蜷蟮蜻蝌 蜾蝣蝻蝴蜗融蠓蚂螨螟萤 蚝螃蛳螫 螺蝼螽 蛰蟊螬螭 螯螳蟀
蟆蝈蟋虮蟪蟒蟛蟠蝉 蟹蚁蟾 蛏蠃 蚃蝇虿 蠕蠖蠛 蠡蠢蛎蟏蠹蛊蠲 蛮虬血嘧
衄衅 众 嚜行衍 衒 街衙 卫衡衢 衣表衫衰衲衷衩 衽衾衿袁袈袋袍袒袖
袜 袤袪被袂 袱裁裂袅裎裀 裒裔裕裘裙补装裟 裨 裰裱裳裴裸裼裹 裾 褊
裈 褂褪褐褓袆褙褡褚褥 褫褰裤褦 褶亵 襄衮襁袢袄 褒 裣裆襟襦 褴褛
衬 袭 襶 襾西要覃覆 镎见规觅视觇 觋觎 亲觊觏觐觑觃觉览觌觍观 氖角觔觖
觚 解觥 觫 觱觳觞触 萘言订讣訇计讯讨讦训讪讫讬记讧 讱讹讶讼诀讷访许
设讻 诉诃诊证 訾诂诋詈 讵诈诒诏评 诎诅词 诩询诣 试诗诧诟诡诠诘话该详诙诖诓
诔诛 詹 认 诳誓诞诱诮语诚诫诬误诰诵诲说谁课谇诽谊訚调谄谆谈诿请诤诹谅诼论谂
谀谞谝 谔谛谐谏谕谘讳谙讽诸谚 谖诺谋谒谓谍 谌誊诌 謇 谎谜谧谑谤谦谥讲
谢 谣 谡謦谟谪谬讴谨谩 谲讥 谮识谯谭 谱警 谵譬译议谴护诪誉读谫 变
雠詟谗谶让 谠谳 谷 豁 硇豆岂 豌豉豇 豕豚 象豢豨豪豫 豭豳
铌豸豹豺貂貆貅貉貊貌 貔 脲贝贞负财贡 贫货贪贩贯责贮 贰贵贬贷买贶费贴贻贸
贺贲贳赂赁贿赅资贾贼 赈赊宾赇赒赉赐 赏赍赔赓贤卖贱赋质账 赌赖赗 赚赙购赛赜
贽赘赠赞 赡赢赆赃赑赎 赝赣 赤赦赧赫赭赪钕走赳赴赶起趁趄超越 赵趣趑趋趟蒎足趵
趾 趺 跋 跌跎跏跑 跖跚跛距 跫跟 跣跨跪跬路踩跳跽踉踊 踏践踔踖踝踞踢踣踧
踡踟踮 踵踹蹓踽蹁蹂蹄踱蹀蹐蹇蹈蹉蹊跃跄跸 蹙 蹒 蹯蹲蹴蹶跷 蹬蹭
蹰躁 躅躇趸踌跻 躐踯 踬 跹蹑 躏蹦 身躬躭躲躯 躺车轧轨军 轩轫轭软轱轸轴
轵轷轶轺轼较辂 载 轾辄 辅轻 辆辎辉辍辇辈轮辋 辊 辑辏输 辐 辗舆
毂辖辕 辘 转辙轿 轰辔轹轳 辛辜辟 辣 办辨辞辩耪辰辱农狍辵迂迄迅
迎近迓返 迢迤迥迦迨迪迫迭逄述 迷迸迹追迺退送适逃逅逆逋逯逍透逐逑途迳逖逗这通
逛逝逞速造逡逢连逭逮 进逵逶逸 逼遇遁遂遄逾 运遍过遏遐遑遒道达违遘遥遛逊递远
遣遢 遨 遭遮 迟遴遵 迁选 遗 辽遽避邀迈邂还 迩邃邈边逻逦疱邑邛邗邙邕邢 那
邡邦邨邪邠邯邱 邴邳邵邶邸邰邽郁郅郇郊 郃郗郄 郛郎郡郢郤 郜郏部郭郴邮郯都郿鄂
邹鄄邬乡鄌郧鄙 鄢鄞鄠鄜邓 郑邻鄱邺郐郸 鄯邝酂酃酆郦郝酉酊酋酌配酎酒 酗酢酥酣
酤酡酩酪酬酲酵酷酸酴酹酞 醅 醇醉 醋醍醐醒 酝 醢 醪医酱醮醯醴 醵醺
酿 锫釆采释釉里重野量 锌金钊钉釜针钓钏钙 钗 钯钝钩钤钠钞钮钧铃钹钰 钿
钜铉 钳铅钺钵 铇铏铰钲银铳铜钴铂铣铨铢铭铦衔铫锐钾销锈锉铹铤铗锋锓铀锄铓铺铻
锯钢录锥锟锤锱铮锭钱锦锚锡锢错锛锜 锅锻铋铵锹键 锺锴镀锷锽锸锲 锾 锁 锘
镃 铠镐镒镇镑镞链镝鏖铿锵镗镜镘镖镛镂铲 锑錾铙铧铝镪钟镫镌镰镯铁镮铎镍铛 铸
镬镔 鉴 钨 铄 镳 钥镭镶 镊𰾠锣 銮凿锰长蜱门闩闪闫闭闬开闳闰闲 间闵闸 阂
阁阀闺闽阃阆闾阅阊阉阎阏阍阈阒 闱 阔阕阑阇阗阌闿阖阙闯 关 阓阐阚 阛闼 阜
阝 阡 阮 阱防阪阻阼阿陀陂附陋限陌降陔陉陛陕陞陟陡院阵除陪陬阴陲陈陴陵陶陷
陆 阳 隍 隅隆隈陧队隋阶隔陨 隗隘隙际障 隧 险 隰隐隳陇随隶 苤隹隻隼雀雁
雄雅集雇雉隽雌雍雎 雒雕巂虽雘双雏杂 难钷雨雩雪雯 零雷雹电需霂 霄霈霉
霆震霍霎霏 霓霖霜霞 雾霰霪 露 霸霹 霁 霾霭雳叇灵叆 青靖靓靛静镤非靠靡氆
面 靥钎革靳靴鞅靶靰 鞍巩鞋 鞘 鞡 鞠 鞫 鞭鞮 鞲鞴 鞑 鞯羟韦
韨韩韧 韪韬韫 炝韭 音韶韵 苘页顶顷项顸顺须颂颃预顽颁颔顿
顼 颀 颇领 颉颐颚 颏 头颒颊颕 颈颓频颗 题额颜 颛 颡颠类 颟颢 顾颤显
颦颅颧粬风 飒飓飔 飏飖飕飘飙飗 飞醛食 饥饤 飧饨饪饫饬饮饭饴饲饱饰饸饹饺饼
饻饷养饵餐饾 馁饿 馂馀馄 馅饯饽 馍馆 餮 饧 饩 馒 馑馈馔 饶饔饕
飨餍 馋 首馘馗炔香馥馨 蛲马驭冯驰驯驮驲驳 驻驽驹驾骀驸驶 驼驷 骈
骇 骆 骃骎骏骔骋骓骍 骑 骐骛 骗骢 骞腾驺 骟骚骝骡蓦骜骖骠驱
骅 骁 骄骕验 惊 驿骤驴骧骥 骊 骨 骶肮骱骰骷骸骼 髀 髅 髑 体髋髓鞣高
铷髟髡 髦 髫髭 髯鬃 髻髹 鬈 鬐 鬘 鬟鬣鬓 鬥 阋 阄铯鬯鬱
锶鬲 鬻 鬼魁魂魃魄 魅魏 魈魍魉魑魔 魇唢鱼鱿鲁鲖鲂鲇鲋鲐鲊鲑鲔鲘鲍鲞鲜
鲛鲧鲤鲠鲨鲭鲵鲳鲓鲲鲸鲫鲩鳊鲽鳅 鲡鳃鳏鳒鲥鳍鳌鲶鳗鳀鳖鳝鲟鲎鲙鳣鳞鳔鳜鳘鲮鲈鲄
鸟 鸠凫 凤鸣鸢 鸱鸩鸨 鸦 鸳鸮鸪鸯鸭鸰鸲鸿鸽 鸺鹈鹅鹃鹄鹁鹉鹆鹌 鹊鹑
鹏鹍 鹙鹕鹗鹜莺 鹤 鹡鹚鹒 鸧 鹘鹣鹢鹞鸡 鸥鹧 鹥鸷鸶 鹫鹰鹭鹪鹩 鹬鹯 鸴
鸬鹦鸾 鹳鹂 卤 盐鹾熵鹿麂铈麀 麇麈麋 麑麒麓 丽 麝 麟 涮麦
麹蒴麻麽麾 黄 黉铊黍黏黎 溚黑黔默黛黜点黝黠黥黧 黯 黡黩傣黹黻黼钛黾鼋 鼌
鼍钽鼎鼐 鼒 鼓 鼗 鼙 羰鼠鼢 鼬鼯鼹鼷醣鼻 鼾 铽齐斋 齑菾齿龀龁龂
龃龄龆龈 龉龊 龋腭龌 龙厐龚龛舔龟烃龠 楟酮茼烔钍佤烷烯螅氙酰鳕荨垭铱钇镱
铟铕 鳟蟑礃锗酯
万丘乒乓甭孬 么 乩 亍亓亘 仂仉 伃 伽 伫 佼 佴佫 侢
伣俅伡 侴 倢 倴倔 偲 偬 偌 僬 儡
傩 党凃 减 冨 剞劂刿 勋 匆
吋 吱 呎听 咚 咥哆咻 呙 呗唑 啗唪唼啐啍唶 唝
啯嘌唠噙 呒 哒 嚆 嚭呖噜 喾啃 囝囡 圊 圯圪壳
坻 坿垇 垞垤垚垌 垟垕 埂 垸 埌 埕 堉堎堀堌 塓 堍
埚塬塱塝堽 塥 墕 垱 墙 墒垅 夼
姘娒 婥 媄媞 媮婿 婳娴妩嫒 孃 孖
屲屴屼 岙岍岜岨岬岞峃峇 峱 崞崮 崽崴峣 嶓崄 岿
巯 幞 庄 廒 庼 异弌弎甙 忪
扡扦抃 掰拶 捃 挜 掮揸
揎 揶 搢揾 搡 摷 搿 挦 揿 掸 掱
敜敫 斏 昀 昊 晟 晧 晾暋暌 晔暧
杠 杅 杻 栝 桋栒栟
桬 枧棑 棸 椑 楙楀楂 椝 椫橇 檎 桪枟槜檑 檴 橱
橥 榉 毚毵 氞氟氨氧氥
氦氢氰 氮氩氕氘氚 氿汆沄沇 洴 涌 浤
浼淖 淠 沨 渌淼渲 溏 溴滉滏 漷澉 沵 滢瀌
灺炅 烱 焯 焱 熳
燋 烨 牮 犇犄 狨
猆 猃 猕獾 玥 珺 琼琷琰珐珲瑄瑂瑔瑢琏瑭
璩 瑷 甏 畈疃 疢 痤 瘃 瘄瘈瘌
瘭 瘅癒 癿 盹 眚眢 睽 瞟瞫
矬 矽 砢砣砠 砝 砷 硐硅 硭硪 碚 碳碶
磡 硚 礞硷硒 禚 禤 稖
窿 竜 筘 篥 簉 簰籝 粞粦粮
粽糅 糌 纼 纰 绁 绲缂 绬 缑
罾 芈 聱 肪胛 脉胆
脞 脖 腌 膘 臑膑 舢舨舺
茓芘 荅 茬莦 荝 葠蓪 苁蓷 薸
虒 蚬 蝓 蝤螓 螗螵
蟢蠋蛴 蠭 裢 襞 觜
诇 诨 谉谰 豮
贠 趱 跤 踦踪 蹩 蹼 躞
轲 辚 迮 邋 邘 郈 郚 郫 郓
郾 鄚鄣 醑 酾酽 钋钐
钒 钑钫 钭钘 铍钻铆 钼 铐 铬 鋆 锂 锒钡 铔 锨 锬锖
锠锞鍪 铡镁钖锳 镏 镉铩鏊蓥鎏 镦 锏镨镣 镡镢 锧鑫
闹 阽 靔 靺 靼
鞨 颋颌 颙 饦
馊馎 馃 驵 骦骉 髁 鲀
鲅 鲒 鲰鲻鲷鲗鳇鲝鳆鳄鲢鲦鲱鲴鳁 鳎鳢鲚鳙鳠鲿 鳛 鸵鹓 鹖
鹔 鹴 麸 黟
""".replace("\n", "")
_TAIWAN_CODEBOOK = """
一丁七丈三上下不丐丑且丕世丙丞丢並 丨个丫中丰丱串 丶 丸丹主 丿乂乃久之乍乎
乏乖乘 乙乜九乞也乩乳乾亂氹亅了予事 二于云互五井亙况些亞亟 亠亡亢交亥亦亨享京
亭亮亳亶亹 人什仁仃仄仆仇今介仍仂仉仔仕他仗付仙仝仞仡仟代令以仰仲仳仵件价任份仿
企伉伊伋伍伎伏伐休伙伯估你伴伶伸伺伻似伽佃但佈位低住佐佑佔何佗余佘佚佛作佞佟佺佽
佩佪佯佳併佶佸佻佾使侃來侈例侍侏侑侔侖侗供依侷俔俅侮侯侵侶便係促俄俊俎俏俐俑俗俘
俚俛保俟俠俞倈信修俶倀倅倌倬俯俱俳俵俸俺俾倉個倍倆倏們倒倖倘候倚倜倂借倡倣值倥倦
倨倩倪倫倭偈偎偲偁偃假偉偏偕做停健偪側偵偶傎傔傖傀傅傍傑傒偫傘備傚傞𠉰催傭傲傳傴
債傷傾僂僅僇僉僊像僱僎僦僨僩僑僕僖僚偽僥僧僭僮儁僵價僻僾儀儂億儆儈儉儎儌僰儋儇儐
儒儔儕儗儘儤償優儲儳儷儻儼兡儿兀允元兄充兆先光兇克兌免兔兒兕兗兜兢兣入內全兩兪
八公六兮共兵其具典兼冀 冂冉册再冏冓冑冒冕 冖冗冞冠冢冤冥冪 冫冬冰冲冱冶冷冸冽
况凄准凈凉凊凋凌凍减凑凓凜凝决几凡凭凱凳凰 凵凶凸凹𠙽出函 刀刁刃分切刈刋刎刑划
刖列刨初删判別刦利剁刮到刲刳刵制刷券刹刺剋剃剄則剉削刻前剜剞剡剔剖剗剛剝剩剪剮副
割剴創剸剽剿劂劃劄劇劈劉劊劌劍劑劓劖劚剅力功加劣助努劫劬劭効劻劾劼勁勃勅勇勉勍勒
動勖勘務勝勞勠募勢勣勤勦勩勰勳勱勵勷勸 勹勺勻勿匃包匈匊匍匏匐勾匕化北匙 匚匜匝
匟匠匡匣匪匯匱匭匳匵 匸匹匾匿區 十千卄卅升午卉半卑卒卓協南博卍卜卞占卡卣卦 卩
卬卭卯印危卲却卵卷卸卺卻卮卽 卿 厂厄厎厓厔厘厙厚厝原厠厥厫厭厮厲厦 厶厹去叁參
叅 又叉及友反叔取受叙叛叟叡叢叠口古句另叨叩只叫召叭叮叱可台史右叵叶司吁吃各合吉
吊同名后吏吐向吓吒君吝呑吟吠否吩咐吪含吭吮呈吳吵呐吸吹告吻吼吾呀呂呃呆咂呢呦呧周
咒呭呱呫味呵呶呷呻呼命咀咄咆咈和咎咏咋咍咡咤咢咥咧咨咫咬咯咱咳咷咸咺咽哀品哂哄哆
哇哈哉咻咿員哥唎哦哩哭哮哲哺哼哽哿唁唆唈唉唐哤哨唄唏售唯唱唳唾啁啣啄商問啓啖啗啜
啞唪唼啐啻啼喁喀喂喃善喆喇喈喉喊喏喓喋喑喘喙喚喜喝喞喧喻喪喬單啾嗃嗅嗎嗇嗑嗓嗔嗚
嗛嗜嗟嗣嗤喔喟喤喫喭嗆嗉嗒嗿嘅嘆嘈嘉嘏嘐嘑嘒嘖嘗噓嗹嘂嘎嘔嗷嗽嗾嘬嘩嗶嘰嘯嘲嘴嘵
嘶嘷嘹嘻嘿噉噌噍噎噢噏噤器噩噪噫噬噯噲噴噶噥噦噸噱嚀嚅嚇嚏嚙嚚嚲囅嚥嚬嚮嚳嚴嚨嚶
嚼囀囁囂嚷囉囊囈囑囓叻囗回囚四囪因囤困囫囷囹固囿圃圄圈圉國圅圇圂囮圍園圓圖團圜圕
土在圩圬圭圮地圻址坻坟均坊坍坎坏坐坑坂坌坡坤坦坭坵坷坼垂坪坫坰坳型垓垝垠垢垣垞垤
垛垚埃埋城埒埂埏域埠埦埭埶執培基堂堅堆堊埤埴埵埸堃堙堞堡堠堤堪堯報堲場堵堧堰堦塍
塊塋塌塑塔墓塚塗塘塞填塢塏塤塒塎塓塵塹塼塾塿墀墁境墅墉墊墄墍墐墜墝增墟墨墩墮墺墳
墠墫墦墾壁壅壇壈壎壑壓壕壖壘壙壞壟壚壜壤壩圳士壬壯壹壺壻壽壼 夂夆 夊夏 夔 夕
外夙多夜夠夢夤夥 大夭天太夫夬央失夯夷夸夾奄奇奈奉奎奏奐契奔奕奓套奚 奘奠奡奢奧
奩奪奬奭奮奀女奴奶奸好妁如妃妄妊妒姸妓妖妗妙妝妣妤妥妨妯妹妻妾姆姊始姍姐姑姒姓妲
妮委姚姜姝姞姙姣姤姥姨姦姪姬姅姻娃姿威娉姮姱娌娑娘娛娜娟娠娣娥娩娓娼姹娶婁婆婉婊
婕婚婢婣婦婪婭娵婐婞婷婺媒媚媛媟媢媧媊媕媖媳媵媸媼媽媾媿嫁嫂嫄嫉嫋嫌媲媺嫖嫗嫠嫡
嫣嫦嫩嫪嫘嫚嫜嫫嫺媯嬈嬉嬋嬌嬃嬖嬗嬙嬛嬴嬪嬷嬭嬲嬾嬰嬸孀孅孌妞子孑孔孕孖字存孚孛
孜孝孟季孤孥孩孫孰孱孳學孺孼孿孵宀宁它宄 宅宇守安宋完宏宓宕宗官宙定宛宜客宣室宥
宦宬宮宰害宴宵家宸容梥寀宿寁寂寃寄寅密寇寧富寐寒寓寔寖寘寎寞察寡寢寤寥實寨寠寙審
寫寬寮寰寵寶宧寸寺封射 將專尉尊尋對導 小少尔尖尚尠尟尕尢尤尨尪就尰尲尬 尸尹尺
尻尼尾尿局屁居屆屈屉屋屍屎屐屑展屛屝屠屢屣層履屧屨屩屬屭 屮屯岳山屹屺屼岌岐岑岔
岅岡岢岨岩岫岱岵岷岸岣峒峙峋峨峭峯島峻峽峓峴峹峢崇崎崐崑崔崖崙崗崛崢崩崚崧崦嵇崿
嵋嵫嵊嵌崶嵎嵐嵒崱嵩嶁嵬嵯嵾嶂嶄嶇嶒嶔嶸嶠嶞嶧嶙嶷嶺嶼嶽巉巋巍巒巘巔巖嶗巛川州巡
巢 工左巧巨巫差 己巳已巴巵巷巽 巾帀市布帆帋帑帕希帖帗帘帙帚帛帔帝帡帟帥師席帨
帳帶帷常帽幃幄幅幀幋幌幔幕幗幘幟幡幢幣幪幫幬幭幯幰幛干平年幷幸幹 幺幻幼幽幾 广
庀庄庇床庋序底庖店庚府庠庥度座庫庭庳庵庶康庸庾庬庴廁廂廌廑廈廉廊廋廏廓廖廚廕廝廛
廟廠廡廢廣廨廩廬廱廳龐 廴延廷廹建廻廼廾廿弁弄弇弈弊 弋弍式弑 弓弔引弗弛弟弢弣
弦弧弩弤弨弭弱弰張彄強弸弼彀彈彊彌彎弘彐彖彗彘彙彝彠 彡形彤彥彧彩彪彫彬彭彰影彲
彳彷役彼彿往征徂待徇很徉徊律後徐徑徒得徘徙徜從徠御徧徨復循徬徭微徯徵德徹徼徽
心必忉忌忍忒忖志忘忐忑忙忝忠忡忪快忭忮念忱忸忤忻忽忿怍怎怏怒怕怖怗怙怛思怠怡急怦
性怨怩怪怫怯怱怵怈怳怔恁恂恃恆恇恌恍恐恕恙恚恝恢恣恤恥恧恨恩恪恫恭息恰悃恉悄悦悌
悍悒悔悖悚悉悛悝悞悟悠患悤悁悱惪悲悴悵悶悸悻悼悽悾惄情惆惇惎惑惓惕惘惙惚惛惜惝惟
惠惡悰惋惰惱惲想惴惶惸惹惺惻愀愁愆愈愉愊愍愎意愒愔愕愚愛愜感愠愧慤愫愬愴愷慎愾愿
慂慄慆慇慈慊態慌慁慓慕慘慙慝慚慟慢慣慩慧慨慫慮慰慳慴慵慶慷慼慾憁憂慥憊憐憑憔憚憎
憝憤憧憨憩憬憫憮憯憲憍憶憸憾懃懆懂懇懈應懊懋懌懍懓憒憗憦憏懟懣懥懦懲懿懵懶懷懸懺
懼懽懾戀戁戇慜戈戊戌戍戎成我戒戔戕或戚戛戟戢戥戡戣戩截戳戮戰戲戴戤戸戾房所戽扁扃
扅扆扇扈扉扊 手才扎扑扒打扐托扛扠扢扦扣扭扮扯扱扳扶批抵扼找承技抃抄抆抉把抑抒抓
抔投抖抗折抨披抬抱抶抹押抽拂拄拆拇担拈拉拊拋拌拍拏拐拑拒拓拔拕拖拗拘拙拚招拜扤拮
拭括拱拯拳拴拷拽拾拿持挂指挈按挍挑挖拼挨挪挫振挶挹挺挼挽挾捄捆捉捋捌捍捚捐捓捕挲
捎捧捨捩捫捬捭据捲捱拼捶捷搸捻捽掀掃掇授掉掊掌掏掎掐排掖掘掛掠採探掣接控推掩措掬
掯掄掞掔掫揀揄揆揉描揸提揷揖揚換揜揠搽握揣揩揪揭揮揲援掽掾揅揗揰揫搆㩁損搏搒搓搔
摇搗搜搠搢搥搦搨搪搬搭搴搵搶搐摏搊搘摘摟摠摧摩摭摯摳摶摸摹摺摑摒摛撅摴摽撩撇撈撐
撒撓撕撙撚撝撞撤撥撫播撮撰撲撬擫撱撻撼撾擁擂擄擅擇擊擋操擎擐擒擔擗擘據擷攙擠擡擢
擣擦擬擯擱擥擭擲擴擺擻擾攀攄攔攖攘攜攝攟攢攣攤攪攫攬攩拃支攲 攴收攷攸改攻放政故
效敉敘教敏救敕敖敗敔敂敝敞敢散敦𢿌敬敲整敵敷數敺斁斂斃斅敭文斌斐斑斕 斗料斛斜斝
斟斡斢斤斥斧斨斬斯斮新斷斲斸 方於施斿旎旁旂旃旄旅旆旐旋旌族旖旒旜旛旟旝 无旣旡
日旦旨早旬旭旰旱旴旺昌昂昆昃明昏易昔昀昉昕昝星映春昧昨昭是昱昴昵昶晁時晃晉晌晏晚
晛晝晞晡晤晨晦晧普景晰晳晴晶晷智晬暄暇暍暑暖暗暘暝暢暈暉暫𣊓暮暱暴暵暹曁曉暾曀曈
曇曏曙曚曛曜曝曡曠曦曨曩曬昇曰曲曳更曷書曹曼曾替最會朅勗月有朋服朔朐朓朏朕朗望朞
朝期朢朦朧 木未末本札朮朱朴朶朽杆𭩚杇杉杌李杏 材村杓杖杕杜杞束杠杗杪杭柿杯杰東
杲杳杶杵杝杼松板枉析枕林枙枚果枝枤枇枋枌枘枯枲枳枵架枷枸柎枻柁柄柏某柑柒染柔柘柙
柚柜柝柞柟柢查柬柯柮柰柱柳柴柵枰枹柷栗校栩株核栱根栻格栽桀桁桂桃桅框案桌桎桐桑桓
桔桕栖栲栳桄桫桴桹桶桷桿梃梁梅梆梏梓梔梗桚梜條梟梢梧棃梭梯梱械棁梳梵桮梐桯棄棉棋
棍棒棕棖棗棘棚棟棠棣棧棫棬森棰楦棲棹棺椁棻棼椀椅植椎椒棐棓棱棨椐椓椷椰椹椽椿楅楊
楓楔櫛楗楙楘楚楞楝楠榆楢楣楫業楯楮極楷楹椶椸楛楨楩楸榔榕榖榘榛榜榦榧榨榫榭榮榱榴
榻榾槁槅槊構槌槍槎槐槓榎榤榥榷榼樑槃槧槨槩槪槳橰槽槿樁樂樊樓樕標樛樞樟模樣槥樅樏
樗槱樲樸樵樹樽樾橄橈橋橐橙橛機橡橧横樨檸橢樺橁檖橘檀檉檄檐檔檜檟檝檠檢檣檬檮檯檳
檻櫂櫃櫈櫓櫚櫜櫝櫞櫟櫱櫪櫬櫺櫳櫽欄櫻權欝欒欖欃柩欠次欣欬欲款欷欹欺欽欿歃歆歇歕歉
歌歎歐歔欻歙歛歝歟歠歡歞止正此步武歪歧歲歷歸 歹死歿殀殂殃殄殆殉殊殍殖殘殕殛殞殤
殫殢殪殮殯殲 殳段殷殺殼殽殿毀毅毆 母毋每毒毓 比毖毗毘 毛毡毧毫毬毯毳氂氅毸毹
氄氈氍毜氏氐民氓氯气氛氣氤氳浠水氷永氾汁汀求汎汗汚汛汜汝江池汊汐汔汕汞汨汪汰汲汴
汶汹決汾沁沂沄沃沈沉沌沍沐沒沔沖沗沙沚沛沆沓汩汭沅沬沫沮沱河沸油治沼沽沾沿泂泄況
泅泔泆泉泊泌泓法泗泚泙泛泠泡波泣泥注泫泯泮泰泱泲泳沭沴泐泖泝洄洊洋洌洎洒洗洛洟洞
津洧汧洪洫洮洲洱洳洵洶洸活洼洽派洿流洙洚洹浙浚浣浦浩浪浮浴海浸浹浼涇消涉涌涓涔涕
涖涘浡浜浥涂涅涒涑涯液涵涸涼涿淅淆淇淋淑淖淘淙淚淝淡淤淦淨淪淫淬淮淰深淟混清淹淺
添涎涪涴淀淄淒淞 淩淜渙渚減渝渠渡渣渤渥渦測渭港渰渴游渺渾湃湄湅湊湍湎湖湘湛湝湧
湫湮湯湲湱渟渢湑湓湔湜湏湩源準溛溜溝溟溠溢溤溥溧溦溪溫溯溱溲溶溷溺溼溽滁滂滃滄滅
滋滌滎滑滓滔滕溏溘滇滘滫滮滯滲澝滴滬滸滺滾滿漁漂漆漏漑漓演漘漙漠漢漣漪漫漬漭漰漱
漲漳漸漾漿潌滷滹漈漉漎漚漦漩漯漵漶漊漕潁潑潔潘潛潟潢㵎潤潦潭潮潰潼潴潸潺澀澄澆澇
澈澉澌澎潏潝潞潠潈濆潯潾澍澂澒潙澡澟澣澤澮澳澹激濁濓濃澠澥澦澧澨澰澴澱濇澶澼濘濔
濕濈濊濛濟濠濡濤濫濬濮濯濰濱瀉濩濞濺濼濾瀆瀏瀑瀁瀕灠瀚瀛瀝瀟瀠瀨瀘瀡瀣瀼瀋瀍瀧瀦
濳瀲瀾瀰瀵瀹瀺灌灃灉 灑灘灝灣灩灞灤炸火灰灶灸灺灼災灾炊炎炒炕炙炘炤炫炬炭炮炯炰
炱炳炷炧炁烈烋烏烕烖烘烜烝烟烹烺烽焌焙焚焜無焦焰然焞焠焫煅煇煉煊煌煎煮煒熙煖煜煞
𠙦煤煥煦照煩煨煏煬煽熀熄熅熇熊熏熒熛熟熔熨熬熯熱熠熲熸熺熹熾燀燂燐燖燙燃燄燈燎燒
燔燕營燠燥燦燧燬燭燻燮燹燼燿燾爆爇爍爓爐爛爚爝爟焮焯爥爨烤爪爭爮爬爰爲爵 父爹爺
爸爻爽爾 爿牀牂牁牆 片版牋牌牒牕牖牘牓牙 牛牝牟牡牢牣牧牴物牮牯牲牶牷特牽牿犀
犂犉犍犒犖犢犧犨牳犬犯犴狀狂狃狄狁狎狐狗狙狡狥狠狩狴狸狹狼狽狻狺狷猋猁猖猗猙猛猜
猝猞猊猘猢猥猩猪猫猴猶猷猨猱猺猻猾猿獃獄獅獍獐獒獗獘獧獨獪獬獠獮獲獯獳獷獵獸獺獻
玁獰玀玄率玅玈 玉王玎玕玔玖玟玠玢玦玨玩玫玲玳玷珂珇珈珉玻珀珊珍珌玼珞珓珠珙珖珥
珣珩珪班珧珮珽現球琅理琇琉珶珷琊琕琖琚琛琢琤琥琦琨琪琭琮琯琳琴琶琵琫琬玤琲瑋瑕瑙
瑚瑛瑜瑞瑟瑠瑀瑁瑇瑑瑗瑣瑶瑩瑪瑰瑲瑬瑯瑱瑳瑴璀璃璋瑽瑾璁璆璂璈璡璇璐璜璣璘璞璟璠
璧璨璯環璪璫璽璲璸璵瓊瓏瓔瓛璿瓚瓈瓀瓘璥瓜瓞瓠瓢瓣瓤 瓦瓮瓴甁瓷瓻甃甄甇瓿甌甍甎
甑甓甕甗甔甖瓩甘甚甜甞 生甡產甥甦 用甪甫甬甯 田由甲申男甸甽甾畀畋界畎畏町畊甿
畇畔畚畛畜畝留畟畢畤略畦番畫畬畯異畮畷畹畽當畸畿疆疇疊 疋疎疏疐疑 疒疔疞疕疚疝
疢疣疤疥疫疧疲疳疴疵疸疹疼疽疾疿病症痂痊痌痍痒痔痕痗痘痛痞痣痡痢痯痰痳痴痺痼痾痿
瘀瘁瘂瘇瘉瘋瘍瘏瘝瘐瘓瘕瘖瘞瘟瘠瘡瘢瘤瘥瘦瘧瘬瘨瘰瘳瘴瘵瘻瘼療癃癆癇癉癖瘺癑癘癙
癜癤癡癢癥癏癭癧癩癬癮癯癰癱癲疙癶癸登發 白百皂的皆皇皈皐皎皓皖皙皜皚皤皥皦皭皪
皫 皮皴皷皸皺皽 皿盂盃盅盆盈益盌盍盎盒盔盛盜盝盞盟葢盡監盤盥盧盦盭盩盪盬 目盲
直相盼盾省眄眇眈眉眊看盻盰盱眚眛真眠眢眥眦眩眙眯眵眴眶眷眸眺眼眾着眹睨睇睆睊睍睚
睛睜睞睪睡睢督睦睫睠睟睥睹睿瞀睘睯瞋瞍瞎瞑瞞瞠瞪瞥瞧瞬瞳瞯瞭瞰瞶瞻瞽瞿矇矍矓矗矙
瞢矚眭矛矜矞 矢矣知矧矩短矮矰矯矱 石矴矼砆矻砂砉砌砍砑砒砭砢砣砥斫砦破砲砠砧砮
砰硃硏硎硜硝硤硨硫硬硭硯确碆碌碎硼碗碁碇碔碑碓硾碡碪碞碭碟碣碧碩碬碯碰確碼碾磁磅
磊磇磋磐磓磔磕磉磑磛磟磚磧磨磬磯磲磺磽礁磷磻礎磈礙礬礧磴礮礦礪礫礱礴碱示礽社祀祁
祅祇祈祉礿祕祊祏祐祓祔祖祗祚祛祜祝神祟祠祫祲祥祧票祭禆祼祺祿禀禁禋禍禎福禘禊禔禖
禕禡禝禦禠禫禧禪禨禮禰禱禳禴 禸禹禺离禽 禾禿秀私秈秉秆秋种科秕秒秔秘秪租秠𥞊秤
秦秧秩秫秭秬秷秸移稀稂税稈程稌稍稊稔稗稙稚稛稜稟稠稭稹種稱稷稻稼稽稿穀穄穆穉穋穌
積穎穗穜穡穟穢穠穨穩穫穭穰 穴穵究穸穹突空穽穿窀窄窅窆窈窋窊窒窕窖窞窗窘窟窠窨窩
窪窬窮窰窳窵窶窸窣窺窾竂竁竄竅竇竈竊 立竑站竚竝竟章竣童竦竪竭端競 竹竺竽竿笄笈
笊笏笑笙笛笞笤笠笥符笨笋笪笫第笮笆笎笱笳筅筆筇等筊筋筌筍筎筏筐筑筒筓答策筠筤筥筦
筧筩筮筲筴筵筯筱箋箎箂箍箇箔箕算箛箝箠管箏箐箒箑箘箜箊箬箭箯箱箴箸箾節篁範篆篇築
篋篘篔篙篚篝篠篡篤簑篧篨篩篦篪篲篳篺篴篷簀簆簇簋簍簌簏篌篾簃簚簜簟簞簠簡簣簥簦簧
簨簪簫簷簸簵簻簽簾簿簴籃籊籌籍籄籐籔籓籀籙籚籛籜籟籠籤籥籨籧籩籫籮籬籯籲篰米粃籹
粉粒粕粗粘粟粢粤粥粦粧粱粲粹粳粺粼精糈糊糉糕糇糖糗糜糝糞糟糠糓糙糧糦糯糲糴糳糵糶
糎糸系糾紀紂約紅紆紇紈紉紊紋紵紖紏紝紞紽納紐紓純紗紘紙級紛紜素紡索紫紩紬紮累細紲
紳紹紺紼紾紿絀終絃組絆紱絅絏結絶絛絜絝絞絡絢給絨絪絮絰統絲絳絖絹絺絿綁綈綉綆綌綍
綏綑經綃綜綠綢綦綫綬維綮綰網綱綳綴綵綸綹綺綻綽緅緌緎綾緇緊緋綣綯緒緘緝綿緞締緣緤
緦緵編緩緬緯練緻縕緗緙緡緥緲縏緹縈縉縊縋縐縗縛縝縟縣縑繁縞縠縢縫縱縲縳縵縶縷縹縻
總績繄繅繆繇縭縰縴繦織繕繙繚繐繞繢繡繒繩繪繫繭繮繯繳繹辮繻繼纁纂繽繾纆纇纈纘續纍
纖纏纊纑纓纔纛纚纜縧缶缸缺缾罀罃罁罄罅罇罋罌罍罏罐罆网罔罕罘罛罝罟罡罣罥罩罪罭置
罰罨署罳罫罶罵罷罹羃罽罿羅羆羇羈 羊羌美羑羔羖羜羚羝羞羢羣羨義羯羲羵羶羸羹羼羽翀
翁翅翊翌翎習翔翕翛翟翠翡翣翥翦翩翫翬翮翯翰翳翹翱翻翼翽翾翿耀 老考耄者耆耈耋 而
耍耎耐耑 耒耔耕耗耘耙耜耝耞耡耤耦耨耯耰 耳耶耻耽耿聃聆聊聒聖聘聚聞聯聰聲聳聵聶
職聽聾 聿肄肅肆肇 肉肋肌肓肖肘肚肛肝股肢肥肧肩肫肯肱育肴肸肺胃胄背胗胔胍胎胏
胖胙胚胛胝胞胟胠胭胯胡胥胸胼能胾脂脃脅脇胱脈脊脚脘脛脞脤脣脧脩脥脱脬脯脹脾腆腊腋
腎腐腑腓腔腕腥腦腴腫腮腯腰腱腸腹腠腿膀膂膋膏 腳膈膊膕膚膛膜膝膠膩膰膳膺膴膽膻膾
膿臀臂臄臆臉臊臍臘臙臑臚臝臟臠胤臣臥臧臨 自臬臭臯臲 至致臶臺臻 臼臾舁舀舂舃舅
與興舉舊舋 舌舍舐舒舘舖舛舜舞舝 舟舠舡航般舫舩舷舵舶船舸舲舳舴艇艋艄艖艗艎艘艙
艤艟艨艭艦艫 艮良艱 色艴艷蓢艸艽艾芃芄芊芋芍芎𦬊芒芙芝芟芡芣芤芧芨芥芩芪芫芬芭
芮芯芰花芳芷芸芹芻芼芽苐芾苑茆苒苓苔苕苗苙苛苜苞苟苣苡若苦苧苫英苴苹苻苽苾茀茁茂
范茄茈茅茇苢茉茗荔茜茢茦茨茫茭茯茱茲茴茵茶茷茸茹荀荃萄荊荸荇草荍荏荐荑荒茝荄莛荳
荷荻荼荽莅莊莎莒莓莖莘莞莠莢莧莆莨莩莪莫菟莉莽菀菁菅菇菉菊菑菓菖菔菘菠菜菩堇華菰
菱菲菴菶菹菼菽萁萃萆萊萋萌萍萎萑菡菌萩萐萇萼萬萱萵萸萹落葆葉葍著葘葚葛葡董葦葩葫
葬葭葯葱葳葵葷葸葺葽蒂葑葹蒐葊蒔蒙蒜蒡蒞蒟蒨蒭蒲蒸蒹蒺蒼蒿蓀蓁蓂蓄蓆蓉蓊蓋蓌蓍
蓑蔀蓐蓓蓞蔌蒯蒱蓧蓫蓬蓮蓰蒓蓴蔻蓺薌蓼蔆蔇蓿蔂蔑蔒蔓蔕蔗蔚蔞蔟蔡蔣蔭蔴蔥蔦蔔蓽蔽
蔬蕺蔾蕃蕉蕊蕎蕕蕖蕘蕙蕝蕞蕡蕢蕨蕩蕪蕭薀薯蕆蕈蕳蕤薁蕷蕸蕾薐薢薄薅薇薈薉薊薑薔薖
薙薛孽薜薦薨薩薪薤蘖薏薰薳薹薺薽薿藄藇藉藎藏藐藍藁藕蔿藚藜藝藟藤藩藥藪藷藹藺蘈藻
藾藿蘄蘅蘆蘇藴蘂虁蘋蘀蘚蘞蘢蘩蘭蘨虀蘧蘸蘿蘼虉虆蘑虍虎虐虓虔處虛虜虞號虡虢虥虣虧
虩 虫虱虯虹虺蚓蚔𧉕蚊蚋蚌蚣蚤蚧蚩蚪蚨蚯蚰蚱蚳蚶蚒蛄蛀蛆蛇蛉蛑蛋蛙蛛蛟蛣蛤蛩蛺蛭
蛾蜹蜂蜃蜇蜈蜋蜓蛸蛹蜕蜀蜉蜍蜊蜎蜘蜚蜜蜡蜣蜩蜥蜺蝃蜴蜞蝀螂蜢蜮蝎蝓蝕蝗蝘蝙蝝蝟蝠
蝏蝥蝦蝮蝱蝶蜷蟮蜻蝌蝨蝡蝣蝤蝴蝸融螓螞螘螟螢螣螥螃螄螫螱螺螻螽螿蟄蟊螬螭螮螯螳蟀
蟆蟈蟋蟣蟪蟒蟚蟠蟬蟲蟭蟹蟻蟾蟳蟶蠃蟿蠁蠅蠆蠍蠕蠖蠛蠜蠟蠡蠢蠣蠨蠹蠱蠲蠶蠻虬血 衁
衄衅衂衆衇衊 行衍衎術衒衕衖街衙衚衝衞衡衢衜衣表衫衰衲衷衺衵衽衾衿袁袈袋袍袒袖袗
袜袚袞袠袤袪被衭袴袵袷袱裁裂裊裎裀裏裒裔裕裘裙補裝裟褎裨裯裰裱裳裴裸裼裹製裾複褊
褌褉褂褪褐褓褘褙褡褚褥褧褫褰褲褦褵褶褻襃襄襋襁𧝞襖襚襌襜襞襛襝襠襟襦襪襏襤褸襫襭
襯襴襲襼襶裌襾西要覃覆覇覈覊 見規覓視覘覜覡覦覩親覬覯覲覷𧢈覺覽覿覥觀覼 角觔觖
觚觝觕解觥觩觫觭觱觳觴觸觶觼觿 言訂訃訇計訊討訐訓訕訖託記訌訏訑訒訛訝訟訣訥訪許
設訩訢訴訶診証註訾詁詆詈詋詎詐詒詔評詖詘詛詞詠詡詢詣詤試詩詫詬詭詮詰話該詳詼詿誆
誄誅誇詵詹誌認誐誑誓誕誘誚語誠誡誣誤誥誦誨説誰課誶誹誼誾調諂諄談諉請諍諏諒諑論諗
諐諛諝諞諟諠諤諦諧諫諭諮諱諳諷諸諺諵諼諾謀謁謂諜諡諴諶謄謅謆謇謌謊謎謐謔謗謙謚講
謝謟謡謏謖謦謨謫謬謳謹謾譁謿證譌譎譏譒譔譖識譙譚譍譜警譟譫譬譯議譴護譸譽讀譾讁變
讌讎讋讒讖讓讙讚讛讜讞讟誯谷谿豁豄 豆豈豋豌豉豇豎豐豒豔豏豕豚豝象豢豨豪豫豬豭豳
豵 豸豹豺貂貆貅貉貊貌貍貓貔貛 貝貞負財貢貤貧貨貪販貫責貯貲貳貴貶貸買貺費貼貽貿
賀賁貰賂賃賄賅資賈賊賉賑賒賓賕賙賚賜賝賞賫賠賡賢賣賤賦質賬賟賭賴賵賮賺賻購賽賾賸
贄贅贈贊贇贍贏贐贓贔贖贑贗贛贉赤赦赧赫赭赬 走赳赴赶起趁趄超越趕趙趣趦趨趯 足趵
趾跁趺趹跂跋跅跌跎跏跑跕跖跚跛距跗跫跟跡跣跨跪跬路跱跳跽踉踊跼踏踐踔踖踝踞踢踣踧
踘踡踟踦踰踴踵踶蹆踽蹁蹂蹄踱蹀蹐蹇蹈蹉蹊躍蹌蹕蹔蹙蹟蹣蹤蹜蹝蹠蹯蹲蹴蹶蹺蹡蹢蹬蹭
蹰躁躄躅躇躉躊躋蹋躐躑躒躓躔躚躡躩躪蹷蹚身躬躭躱軀躶軃躺車軋軌軍軏軒軔軛軟軨軫軸
軹軷軼軺軾較輅輈載輀輇輊輒輓輔輕輗輙輛輜輝輟輦輩輪輞輠輥輧輯輳輸輹輻輭輮輴輶輾輿
轂轄轅轀轆轇轕轉轍轎轏轒轖轘轞轟轡轢轤軕辛辜辟辠辣辥辦辨辭辯 辰辱農 辵迂迄迅迋
迍迎近迓返迕迢迤迥迦迨迪迫迭迮述迴迷迸迹追迺退送适逃逅逆逋逌逍透逐逑途逕逖逗這通
逛逝逞速造逡逢連逭逮週進逵逶逸逷逼遇遁遂遄逾遊運遍過遏遐遑遒道達違遘遥遛遜遞遠遡
遣遢遝遨適遭遮遯遲遴遵遶遷選遹遺遻遼遽避邀邁邂還邅邇邃邈邊邏邐迻邑邛邗邙邕邢 那
邡邦邨邪邠邯邱邲邴邳邵邶邸邰邽郁郅郇郊邾郃郉郄郕郛郎郡郢郤郙郜郟部郭郴郵郯都郿鄂
鄒鄄鄔鄉鄗鄖鄙鄘鄢鄞鄠鄜鄧鄫鄭鄰鄱鄴鄶鄲鄹鄿鄺酇酃酆酈郝酉酊酋酌配酎酒酖酗酢酥酣
酤酡酩酪酬酲酵酷酸酴酹酺醃醅醆醇醉醊醋醍醐醒醓醜醞醡醢醩醪醫醬醮醯醴醲醵醺醽醁醼
釀釁釂釄醱釆采釋釉里重野量釐鋅金釗釘釜針釣釧釬釦釭釵鈇鈀鈍鈎鈐鈒鈔鈕鈞鈴鈸鈺鉁鈿
鉅鉉鉏鉗鉛鉞鉢鉤鉋鉶鉸鉦銀銃銅銎銍銑銓銖銘銛銜銚鋭銶銷銹銼鋈鋌鋏鋒鋟鋘鋤鋩鋪鋙銲
鋸鋼錄錐錕錘錙錚錠錢錦錨錫錮錯錞錡鍊鍋鍛鍪鍫鍬鍵鍼鍾鍇鍍鍔鍠鍤鍥鍭鍰鎔鎉鎖鎗鎚鎛
鎞鎡鎌鎧鎬鎰鎮鎊鏃鏈鏑鏖鏗鏘鏜鏡鏝鏢鏞鏤鏟鏐鏦鏨鐃鏵鐓鏹鐘鐙鐫鐮鐲鐵鐶鐸鐩鐺鐻鑄
鑊鑌鑑鑒鑤鑕鑛鑠鑢鑣鑪鑰鑯鑲鑵鑷鑱鑼鑽鑾鑿錳長 門閂閃閆閉閈開閎閏閑閒間閔閘閟閡
閤閣閥閨閩閫閬閭閱閶閹閻閼閽閾闃闇闈闉闊闋闌闍闐 闓闔闕闖闒關闚闠闡闞闢闤闥闆阜
阝阞阡阤阨阬阮阯阱防阪阻阼阿陀陂附陋限陌降陔陘陛陜陞陟陡院陣除陪陬陰陲陳陴陵陶陷
陸陼陽陻隍隄隅隆隈隉隊隋階隔隕隖隗隘隙際障隣隤隧隩險隮隰隱隳隴隨隶隸 隹隻隼雀雁
雄雅集雇雉雋雌雍雎雊雒雕嶲雖雘雙雛雜雝雞離難 雨雩雪雯雰雱雲零雷雹電需𩂯霅霄霈霉
霆震霍霎霏霑霓霖霜霞霢霤霧霰霪霱露霶霸霹霮霽靀霾靄靂靆靈靉霨青靖靚靛靜 非靠靡
面靦靧靨 革靳靴鞅靶靷靸鞀鞉鞍鞏鞋鞗鞘鞙鞞鞚鞟鞠鞦鞫鞬鞭鞮鞱鞶鞸鞹鞾韁韃韆韀 韋
韍韓韌韎韐韒韔韝韘韙韜韞韈韠 韭韮韱 音韶韻響韺頀 頁頂頃項頇順須頌頏預頑頒頷頓
頊頍頎頖頗領頞頡頤 頲頦頫頭頮頰頴頳頸頽頻顆顇顋題額顏頿顓願顙顛類顜顢顥顦顧顫顯
顰顱顴 風颭颯颶颸颿颺颻颼飄飆飀颽 飛 食飡飢飣飦飧飩飪飫飭飲飯飴飼飽飾飶餂餃餅
餈餉飬餌餐餖餗餒餓餔餕餘餛餚餡餞餤餟餧館餬餮餱餲餳餹餼餽饁饅饈饉饋饌饎饐饑饒饔饕
饗饜饟饞 首馘馗 香馥馨馝馡馣 馬馭馮馳馴馱馹駁駃駐駑駒駕駘駙駛駋駜駝駟駪駉騈駬
駭駮駱駵駰駸駿騌騁騅騂騃隲騑騎騋騏騖騄騙驄騠騣騤騧騫騰騶騵騸騷騮騾驀驁驂驃驅驈騻
驊驔驍驖驕驌驗驒驚驘驙驛驟驢驤驥驩驪駗骨骩骭骯骱骰骷骸骼骽髀骾髏髆髑髒體髖髓 高
髟髠髢髣髦髧髫髭髮髥髳髴髻髹鬆鬈鬅鬋鬍鬐鬒鬑鬘鬔鬙鬚鬟鬣鬢鬎鬥鬨鬧鬩鬭鬮 鬯鬱
鬲鬴鬵鬷鬻 鬼魁魂魃魄魊魅魏魐魀魈魍魎魑魔𩴾魘 魚魨魯鮀魴鮎鮒鮐鮓鮞鮪鮆鮑鯗鮮
鮫鯀鯉鯁鯊鯖鯢鯧鯫鯤鯨鯽鰂鯿鰈鰍鰌鰕鰓鰥鰜鰣鰭鰲鰷鰻鱄鱉鱔鱘鱟鱠鱣鱗鱎鱖鱨鱷鱸魺
鳥鳦鳩鳧鳲鳳鳴鳶鴄鴟鴆鴇鴈鴉鴃鳸鴛鴞鴣鴦鴨鴒鴝鴻鴿鴽鵀鵂鵜鵝鵑鵠鵓鵡鵒鵪鵰鵲鶉鵩
鵬鵾鶃鶖鶘鶚鶩鶯鶱鶴鶵鶺鶿鶊鷇鶬鶹鶻鶼鷁鷂鷄鷃鷗鷓鷕鷖鷙鷥鷩鷲鷹鷺鷦鷯鷴鷸鸇鸑鷽
鸕鸚鸞鷟鸛鸝鴹鹵鹹鹼鹽鹺 鹿麂塵麀麃麇麈麋麌麐麑麒麓麕麗麚麛麝麞麟麢麤 麥麫麯麰
麴 麻麽麾黁黃黈黌 黍黏黎黐 黑黔默黛黜點黝黠黥黧黨黯黴黮黶黷 黹黻黼 黽黿鼃鼂
鼇鼈鼉 鼎鼐鼏鼒 鼓鼕鼗鼖鼙鼛鼚 鼠鼢鼫鼬鼯鼹鼷 鼻鼽鼾齁齅 齊齋齎齏 齒齔齕齗
齟齡齠齦齧齩齬齪齮齲齶齷齣龍龎龔龕 龜 龠龢龥
万丌丘 丳与 体僣 么 亄
亍亓亘亝偌偭働儍佣侢仈伾伀伃伈傢佂佇佉佌佫佴佹佼侀侁侐侘侚侜俁俜俴倔倞倢倮偝偩
偬偯偷傁傺儡僤僬儦儩儺俌俥偘𫣲僼党兙兛兝兞冔 冨冼
刓刜剏剌剚剠劘剷 勑勔勛勡 匆 卹卼
吴厖厪 叐咼唔吋呎吰咮唶啍喌喨嗀嗁嗈嗊嘌嘍嘓嘮嘸嘽噀噂噭噳𡃱嚄嚆嚌嚎嚕嚦嚭哪
喴唇嘜听嘛噠哢响啡囝囡囥圊圌圛圞垃堍坯堿埕圯圾坱坴垌垗垟埆埌埽堀堄堋堳塥塷墻壒埇
埝埧垕塽堉坿𡋟堎埔㘾壋奜㚟𤳦夼 妵姁姶妳姺姽娀娸娬妸婤嫈婼媮婿嫏嫳嫽嫿
嬡姘她婥孃媜娒嫻嫵孓孻寪 尀 屇屄屜屪屫岙峇岬峝崤
岞屴崽岍峪峫峱崆崟崴嶓嶥嶢嶮嶰巇崞嵂岊崮岧帣幞幠幦幨幩 幈 庈庤𢊈廒廧廍廎廔
异 弌弎虨 彴彽 悆 怀怭怲恓恔惔惷惽愃愓
慉 懠懮懰戄您恬憙恵惦戭 戹戺 挷撑撶𢪘攆扡抿拶挐挱捃揑捘掂
掗掙掤捺揎揕摉揥 揶揵搧搯搰摔摜摦摲摷摻𢵧擖搌攏掮搤搾搿搡撏撦敜敳敹敿敫
斚斠 旗 昜昬昄旼昊曆昰晅晜晟晢
晾暋暌暓曖 暔晒曄暲昺朌朣 椾杈枱朳椪檨欉机朾杅杙杷杻枬枒枓柺柂柅柣柤 柲柶栔栝
栞栫栭栵桋桬桭梡梩梴棑棜棪棸椌椏椑楀楂楄楎楬楰樠樿橇橤檇檎檑檴橣檿櫌欑欙槼橚燊樳
橒栓櫉櫸柸梘栒檙橌栟 欼歈歊歗歜歁 殭歾殈殣毈 毚氝氧氥氦氫𣱢氮
氬毿淳浬浭湟 湋漋泒浤汆氿汋汍汽 沇沕泃泜洏洑洩浧涊涁溨洴淓淠淢淥淵淶淼 渫渮渲
渼湢溴滉滊滏漷潚澐瀅瀌瀳溎洤灥澔焉燨焱煃燏炅烙焄煁煙燉燋焻熿煐𤌚𤉶烱烓熤烳煝熉牏
牐牼犆犇犔犛犫牠 犵犺狅狉狘狨狫猆猈猲獀獆獝獢獫獾獼𤢛 玆 玞
玥玗琱琿璅璊璚璩瓌瓖珛瑢瑄珔琼玶瓗瑂琷琰璉甀甏甒甆 甤甩 畈疃 𤺥瘃癌
癳疘痁痐痚痠痤痱痵痻瘈瘌瘭瘯瘶瘸癟癠痧瘄疶癿 盚
盵盹眂眅眎眏眕睅睒睙睬睽瞌瞏瞟瞫矔睻 矟 矬矸碚碉砟矽硍𥖁礄
礞砞碍碏碈碻磒磠磡砝碘硐祙禚禜禤 稑稺穈穧穮秴 窔窌窴窿
竢 竾 笰笲笵筘筭筳箄箙篜篟篥𥲻簉簹籝籣籭簰筷篢 粎粍籽
糅籼粞粮粻粽糔糰粿籸緱紃紑紕絓絻綅綎綖綪緜緄線緰緶縚縮繖繘繠縇緓纙縿纕繃缽罎
罦罻罾 羋羭羱 羿羾翃翪 耏
聓聝聱 肜 肵胆胺胰胹脄脆脉脖脗脝脡脢脰腂腌腍腒腶膇膘膟膨膷臈臏臐
臛肪腺膆 舿舺艌舨艑艣艥艩艬舢艚𦩀苠荁荅菆茾菸菻萴葄葓蒢蓪蓯蓷蓘蔋蔙蔫薧薸
薾藘藙蘤蘺𦼖莦芘葠茬蘐虒虖 蚠蚡蛐蛓蛔蜑蜒蜨蜾蜿蝯螉螗螵蟁蟜蟢蠋蠐
蠙蠓蠭蠮蝻蜆蠔蚜 蚵袟衩衮袇袢袲袺袽袿裋裦褒褕褽襥襍襡襢襮襬裡襔褳𧞅
觜觡觷 詑 誖 讅訆訧訽詉詧詻詾誃諆諢譈讇讕詨詊讑詷謷譅譆豀
豗豣豶豷 貜 貟賩賛 趡趫趟趲 踁跴跤踪跦跲踹蹏蹩蹻 躗躞
躘跧趼蹼躧 軘軜軝軥轑轗轝軻轊轔 逯逄遫邋逴遰
邧邿郈郔 郚 郠郪郫郳鄓鄀鄆鄇鄋鄍鄎鄏鄐鄑鄣鄤鄩鄬鄻鄾酅邘鄯郾郗鄈鄚 醝
釅酏醎醑醹醻釃鉌鎤鐄鍏鈹鉀鉑鋂鉼錩鎩鏇鏊鏚錪鐐鐏鐔鐧鑅鑫錁鋆錆鎣錟鎏釙鈁鋕鋐鈊錂
銈閙閧閿 陑陾隃随隵阽𨻧 雂雃 霙霣靁霂
靝 靮靺鞃鞌鞛鞨韅靼 韡韣 韹 頄頜𬱃頠頯顗顎顒
颱 颮 飜 飥養餑餪餫餺餻饘饍𩟝餿 馪 騉
驫馵駓駔駴駽駾騢騯驦騆 髁 髽鬃鬁
魋魗 鮚鯈鯔鰉鰋鰒鰱鰾鱏鱐鱓鱧鱭鱮鱯鱻鮭鮿鰵鰐鯛魷鯕
鳷瞗鴥鵷鵻鷊鷫鸂𪆟鸘鴕鶡 麩麵麱 黟黰
鼪鼴 龡銒鉍鈷鉻鈉鈣鉥銻鋁鋝鋃錔鎂鎳鐳鏛鎢鉬錖錏鍚鋰釩鎘銾鈾鍈
鏻鍹鉎鐉鉷鋇鈄侴佰伕俤 乒乓砃砅砵砷砳砸砽硅硔碶碴碳硻磹硋硪巃嶴嵼瑔珺琺瑭璭璦湳
泩洺淌泵澬濚濙瀞灄吔吽咪吱咟吧咖啤坨堌堨堽垻𡊐垸埰壳塝墘垊埗壢煚焴烯熳燡燁𤆬煟燫
𡇥𡆫焊錶釷鈦鉄 侄佬哖塭妬嬿屘崁嵙氖氟氨氰灯畑烊琍碖磘苳萣菎莿蔘萼鯓鰮鰹
""".replace("\n", "") | zhtelecode | /zhtelecode-0.1.0.tar.gz/zhtelecode-0.1.0/zhtelecode.py | zhtelecode.py |
# zhtelecode
`zhtelecode` is a Python package that converts between
[Chinese Telegraph Codes](https://en.wikipedia.org/wiki/Chinese_telegraph_code)
and Unicode Chinese characters (both simplified and traditional).
## Usage
Convert from Unicode to telegraph codes:
```python
>>> zhtelecode.to_telecode("中文信息")
['0022', '2429', '0207', '1873']
>>> zhtelecode.to_telecode("萧爱国")
['5618', '1947', '0948']
>>> zhtelecode.to_telecode("蕭愛國")
['5618', '1947', '0948']
```
Convert from telegraph codes back to Unicode:
```python
>>> telecode = ["0022", "2429", "0207", "1873"]
>>> zhtelecode.to_unicode(telecode)
'中文信息'
>>> telecode = ["5618", "1947", "0948"]
>>> zhtelecode.to_unicode(telecode, encoding="mainland")
'萧爱国'
>>> zhtelecode.to_unicode(telecode, encoding="taiwan")
'蕭愛國'
```
## Data
The codebooks are derived from the Unicode consortium's
[Unihan database](http://www.unicode.org/Public/UNIDATA/Unihan.zip) (last
updated 2022-08-03 17:20).
## License
[MIT License](LICENSE.txt).
Also see [Unicode terms of use](http://www.unicode.org/terms_of_use.html).
| zhtelecode | /zhtelecode-0.1.0.tar.gz/zhtelecode-0.1.0/README.md | README.md |
# Zhtools
Some simple tool methods like cache, timetools and so on.
## Modules
- cache: A simple cache decorator.
- data_structs: Some data structs implements and tools.
- security: some simple security methods.
- api_service: Simple way to define an api client.
- async_tools: about python async/await.
- calculation: Math calculation methods.
- cli: Command-line tools.
- concurrents: Some tools for concurrent base on multi process/thread/coroutine.
- config: Global config by this tool.
- context_manager: Common context manager.
- decorators: Common decorators.
- exceptions: Common exceptions by this tool.
- random: Random methods.
- signals: simple signal dispatcher.
- timetools: Some date/time/timezone tools.
- typing: Common type hints.
## Update logs
- **1.0.0** 2023-06-17:
- Refactored code to used more type hint. Now only support python 3.11+.
- Because of the ChatGPT, remove the `code_generator` module.
- Remove `data_structs.lazy` module, recommend to use `lazy-object-proxy` instead.
- Remove `exporters` module.
- Remove `io_tools` module.
- Remove `redis_helper` module.
- Remove `log` module.
- Move `enum` to `data_structs.enum`.
- Move `third_parties.pydantic` to `data_structs.pydantic`.
- Move `type_hint` to `typing`.
- Change `requests` and `aiohttp` requirement to `httpx`.
- **0.3.0** 2022-07-21:
- Refactored cache.
- Add signal dispatcher.
- **0.2.3** 2022-04-08:
- Fix & add cli command.
- Add log module.
- **0.2.2** 2022-01-08:
- Add AES encrypt/decrypt method.
- **0.2.1** 2021-12-30:
- Move `convertors` to `data_structs.convertors`
- add some data_structs and methods.
- **0.1.1** 2021-12-8:
- Optimize timetools. Now can set global timezone.
- **0.0.11** 2021-10-21:
- Add js-like *Promise*.
- **0.0.10** 2021-06-25:
- Add go-like defer.
- **0.0.9** 2021-06-04:
- Fix setup bug.
- **0.0.8** 2021-06-04:
- Add concurrents tools.
- Add orm code generators command-line client.
- **0.0.7** 2021-05-21:
- Add singleton decorator.
- Add orm code generators.
- **0.0.6** 2021-04-25:
- Add enum module.
- **0.0.5** 2021-04-19:
- Added api service.
- Optimized the performance of XlsxReader.
- Added progress bar to XlsxExporter (supported by [tqdm](https://github.com/tqdm/tqdm)).
| zhtools | /zhtools-1.0.1.tar.gz/zhtools-1.0.1/README.md | README.md |
"""Base Processor for all processor."""
import abc
import json
import os
from typing import Dict, List, Union
from dataclasses import dataclass, field
class DataProcessorError(Exception):
pass
@dataclass
class BaseProcessor(abc.ABC):
data_dir: str
symbols: List[str] = field(default_factory=list)
speakers_map: Dict[str, int] = field(default_factory=dict)
train_f_name: str = "train.txt"
delimiter: str = "|"
positions = {
"file": 0,
"text": 1,
"speaker_name": 2,
} # positions of file,text,speaker_name after split line
f_extension: str = ".wav"
saved_mapper_path: str = None
loaded_mapper_path: str = None
# extras
items: List[List[str]] = field(default_factory=list) # text, wav_path, speaker_name
symbol_to_id: Dict[str, int] = field(default_factory=dict)
id_to_symbol: Dict[int, str] = field(default_factory=dict)
def __post_init__(self):
if self.loaded_mapper_path is not None:
self._load_mapper(loaded_path=self.loaded_mapper_path)
if self.setup_eos_token():
self.add_symbol(
self.setup_eos_token()
) # if this eos token not yet present in symbols list.
self.eos_id = self.symbol_to_id[self.setup_eos_token()]
return
if self.symbols.__len__() < 1:
raise DataProcessorError("Symbols list is empty but mapper isn't loaded")
self.create_items()
self.create_speaker_map()
self.reverse_speaker = {v: k for k, v in self.speakers_map.items()}
self.create_symbols()
if self.saved_mapper_path is not None:
self._save_mapper(saved_path=self.saved_mapper_path)
# processor name. usefull to use it for AutoProcessor
self._processor_name = type(self).__name__
if self.setup_eos_token():
self.add_symbol(
self.setup_eos_token()
) # if this eos token not yet present in symbols list.
self.eos_id = self.symbol_to_id[self.setup_eos_token()]
def __getattr__(self, name: str) -> Union[str, int]:
if "_id" in name: # map symbol to id
return self.symbol_to_id[name.replace("_id", "")]
return self.symbol_to_id[name] # map symbol to value
def create_speaker_map(self):
"""
Create speaker map for dataset.
"""
sp_id = 0
for i in self.items:
speaker_name = i[-1]
if speaker_name not in self.speakers_map:
self.speakers_map[speaker_name] = sp_id
sp_id += 1
def get_speaker_id(self, name: str) -> int:
return self.speakers_map[name]
def get_speaker_name(self, speaker_id: int) -> str:
return self.speakers_map[speaker_id]
def create_symbols(self):
self.symbol_to_id = {s: i for i, s in enumerate(self.symbols)}
self.id_to_symbol = {i: s for i, s in enumerate(self.symbols)}
def create_items(self):
"""
Method used to create items from training file
items struct example => text, wav_file_path, speaker_name.
Note that the speaker_name should be a last.
"""
with open(
os.path.join(self.data_dir, self.train_f_name), mode="r", encoding="utf-8"
) as f:
for line in f:
parts = line.strip().split(self.delimiter)
wav_path = os.path.join(self.data_dir, parts[self.positions["file"]])
wav_path = (
wav_path + self.f_extension
if wav_path[-len(self.f_extension) :] != self.f_extension
else wav_path
)
text = parts[self.positions["text"]]
speaker_name = parts[self.positions["speaker_name"]]
self.items.append([text, wav_path, speaker_name])
def add_symbol(self, symbol: Union[str, list]):
if isinstance(symbol, str):
if symbol in self.symbol_to_id:
return
self.symbols.append(symbol)
symbol_id = len(self.symbol_to_id)
self.symbol_to_id[symbol] = symbol_id
self.id_to_symbol[symbol_id] = symbol
elif isinstance(symbol, list):
for i in symbol:
self.add_symbol(i)
else:
raise ValueError("A new_symbols must be a string or list of string.")
@abc.abstractmethod
def get_one_sample(self, item):
"""Get one sample from dataset items.
Args:
item: one item in Dataset items.
Dataset items may include (raw_text, speaker_id, wav_path, ...)
Returns:
sample (dict): sample dictionary return all feature used for preprocessing later.
"""
sample = {
"raw_text": None,
"text_ids": None,
"audio": None,
"utt_id": None,
"speaker_name": None,
"rate": None,
}
return sample
@abc.abstractmethod
def text_to_sequence(self, text: str):
return []
@abc.abstractmethod
def setup_eos_token(self):
"""Return eos symbol of type string."""
return "eos"
def convert_symbols_to_ids(self, symbols: Union[str, list]):
sequence = []
if isinstance(symbols, str):
sequence.append(self._symbol_to_id[symbols])
return sequence
elif isinstance(symbols, list):
for s in symbols:
if isinstance(s, str):
sequence.append(self._symbol_to_id[s])
else:
raise ValueError("All elements of symbols must be a string.")
else:
raise ValueError("A symbols must be a string or list of string.")
return sequence
def _load_mapper(self, loaded_path: str = None):
"""
Save all needed mappers to file
"""
loaded_path = (
os.path.join(self.data_dir, "mapper.json")
if loaded_path is None
else loaded_path
)
with open(loaded_path, "r") as f:
data = json.load(f)
self.speakers_map = data["speakers_map"]
self.symbol_to_id = data["symbol_to_id"]
self.id_to_symbol = {int(k): v for k, v in data["id_to_symbol"].items()}
self._processor_name = data["processor_name"]
# other keys
all_data_keys = data.keys()
for key in all_data_keys:
if key not in ["speakers_map", "symbol_to_id", "id_to_symbol"]:
setattr(self, key, data[key])
def _save_mapper(self, saved_path: str = None, extra_attrs_to_save: dict = None):
"""
Save all needed mappers to file
"""
saved_path = (
os.path.join(self.data_dir, "mapper.json")
if saved_path is None
else saved_path
)
with open(saved_path, "w") as f:
full_mapper = {
"symbol_to_id": self.symbol_to_id,
"id_to_symbol": self.id_to_symbol,
"speakers_map": self.speakers_map,
"processor_name": self._processor_name,
}
if extra_attrs_to_save:
full_mapper = {**full_mapper, **extra_attrs_to_save}
json.dump(full_mapper, f) | zhtts | /tensorflow_tts/processor/base_processor.py | base_processor.py |
import sys, os, argparse, codecs, string, re
# ================================================================================ #
# basic constant
# ================================================================================ #
CHINESE_DIGIS = u'零一二三四五六七八九'
BIG_CHINESE_DIGIS_SIMPLIFIED = u'零壹贰叁肆伍陆柒捌玖'
BIG_CHINESE_DIGIS_TRADITIONAL = u'零壹貳參肆伍陸柒捌玖'
SMALLER_BIG_CHINESE_UNITS_SIMPLIFIED = u'十百千万'
SMALLER_BIG_CHINESE_UNITS_TRADITIONAL = u'拾佰仟萬'
LARGER_CHINESE_NUMERING_UNITS_SIMPLIFIED = u'亿兆京垓秭穰沟涧正载'
LARGER_CHINESE_NUMERING_UNITS_TRADITIONAL = u'億兆京垓秭穰溝澗正載'
SMALLER_CHINESE_NUMERING_UNITS_SIMPLIFIED = u'十百千万'
SMALLER_CHINESE_NUMERING_UNITS_TRADITIONAL = u'拾佰仟萬'
ZERO_ALT = u'〇'
ONE_ALT = u'幺'
TWO_ALTS = [u'两', u'兩']
POSITIVE = [u'正', u'正']
NEGATIVE = [u'负', u'負']
POINT = [u'点', u'點']
# PLUS = [u'加', u'加']
# SIL = [u'杠', u'槓']
# 中文数字系统类型
NUMBERING_TYPES = ['low', 'mid', 'high']
CURRENCY_NAMES = '(人民币|美元|日元|英镑|欧元|马克|法郎|加拿大元|澳元|港币|先令|芬兰马克|爱尔兰镑|' \
'里拉|荷兰盾|埃斯库多|比塞塔|印尼盾|林吉特|新西兰元|比索|卢布|新加坡元|韩元|泰铢)'
CURRENCY_UNITS = '((亿|千万|百万|万|千|百)|(亿|千万|百万|万|千|百|)元|(亿|千万|百万|万|千|百|)块|角|毛|分)'
COM_QUANTIFIERS = '(匹|张|座|回|场|尾|条|个|首|阙|阵|网|炮|顶|丘|棵|只|支|袭|辆|挑|担|颗|壳|窠|曲|墙|群|腔|' \
'砣|座|客|贯|扎|捆|刀|令|打|手|罗|坡|山|岭|江|溪|钟|队|单|双|对|出|口|头|脚|板|跳|枝|件|贴|' \
'针|线|管|名|位|身|堂|课|本|页|家|户|层|丝|毫|厘|分|钱|两|斤|担|铢|石|钧|锱|忽|(千|毫|微)克|' \
'毫|厘|分|寸|尺|丈|里|寻|常|铺|程|(千|分|厘|毫|微)米|撮|勺|合|升|斗|石|盘|碗|碟|叠|桶|笼|盆|' \
'盒|杯|钟|斛|锅|簋|篮|盘|桶|罐|瓶|壶|卮|盏|箩|箱|煲|啖|袋|钵|年|月|日|季|刻|时|周|天|秒|分|旬|' \
'纪|岁|世|更|夜|春|夏|秋|冬|代|伏|辈|丸|泡|粒|颗|幢|堆|条|根|支|道|面|片|张|颗|块)'
# punctuation information are based on Zhon project (https://github.com/tsroten/zhon.git)
CHINESE_PUNC_STOP = '!?。。'
CHINESE_PUNC_NON_STOP = '"#$%&'()*+,-/:;<=>@[\]^_`{|}~⦅⦆「」、、〃《》「」『』【】〔〕〖〗〘〙〚〛〜〝〞〟〰〾〿–—‘’‛“”„‟…‧﹏'
CHINESE_PUNC_OTHER = '·〈〉-'
CHINESE_PUNC_LIST = CHINESE_PUNC_STOP + CHINESE_PUNC_NON_STOP + CHINESE_PUNC_OTHER
# ================================================================================ #
# basic class
# ================================================================================ #
class ChineseChar(object):
"""
中文字符
每个字符对应简体和繁体,
e.g. 简体 = '负', 繁体 = '負'
转换时可转换为简体或繁体
"""
def __init__(self, simplified, traditional):
self.simplified = simplified
self.traditional = traditional
#self.__repr__ = self.__str__
def __str__(self):
return self.simplified or self.traditional or None
def __repr__(self):
return self.__str__()
class ChineseNumberUnit(ChineseChar):
"""
中文数字/数位字符
每个字符除繁简体外还有一个额外的大写字符
e.g. '陆' 和 '陸'
"""
def __init__(self, power, simplified, traditional, big_s, big_t):
super(ChineseNumberUnit, self).__init__(simplified, traditional)
self.power = power
self.big_s = big_s
self.big_t = big_t
def __str__(self):
return '10^{}'.format(self.power)
@classmethod
def create(cls, index, value, numbering_type=NUMBERING_TYPES[1], small_unit=False):
if small_unit:
return ChineseNumberUnit(power=index + 1,
simplified=value[0], traditional=value[1], big_s=value[1], big_t=value[1])
elif numbering_type == NUMBERING_TYPES[0]:
return ChineseNumberUnit(power=index + 8,
simplified=value[0], traditional=value[1], big_s=value[0], big_t=value[1])
elif numbering_type == NUMBERING_TYPES[1]:
return ChineseNumberUnit(power=(index + 2) * 4,
simplified=value[0], traditional=value[1], big_s=value[0], big_t=value[1])
elif numbering_type == NUMBERING_TYPES[2]:
return ChineseNumberUnit(power=pow(2, index + 3),
simplified=value[0], traditional=value[1], big_s=value[0], big_t=value[1])
else:
raise ValueError(
'Counting type should be in {0} ({1} provided).'.format(NUMBERING_TYPES, numbering_type))
class ChineseNumberDigit(ChineseChar):
"""
中文数字字符
"""
def __init__(self, value, simplified, traditional, big_s, big_t, alt_s=None, alt_t=None):
super(ChineseNumberDigit, self).__init__(simplified, traditional)
self.value = value
self.big_s = big_s
self.big_t = big_t
self.alt_s = alt_s
self.alt_t = alt_t
def __str__(self):
return str(self.value)
@classmethod
def create(cls, i, v):
return ChineseNumberDigit(i, v[0], v[1], v[2], v[3])
class ChineseMath(ChineseChar):
"""
中文数位字符
"""
def __init__(self, simplified, traditional, symbol, expression=None):
super(ChineseMath, self).__init__(simplified, traditional)
self.symbol = symbol
self.expression = expression
self.big_s = simplified
self.big_t = traditional
CC, CNU, CND, CM = ChineseChar, ChineseNumberUnit, ChineseNumberDigit, ChineseMath
class NumberSystem(object):
"""
中文数字系统
"""
pass
class MathSymbol(object):
"""
用于中文数字系统的数学符号 (繁/简体), e.g.
positive = ['正', '正']
negative = ['负', '負']
point = ['点', '點']
"""
def __init__(self, positive, negative, point):
self.positive = positive
self.negative = negative
self.point = point
def __iter__(self):
for v in self.__dict__.values():
yield v
# class OtherSymbol(object):
# """
# 其他符号
# """
#
# def __init__(self, sil):
# self.sil = sil
#
# def __iter__(self):
# for v in self.__dict__.values():
# yield v
# ================================================================================ #
# basic utils
# ================================================================================ #
def create_system(numbering_type=NUMBERING_TYPES[1]):
"""
根据数字系统类型返回创建相应的数字系统,默认为 mid
NUMBERING_TYPES = ['low', 'mid', 'high']: 中文数字系统类型
low: '兆' = '亿' * '十' = $10^{9}$, '京' = '兆' * '十', etc.
mid: '兆' = '亿' * '万' = $10^{12}$, '京' = '兆' * '万', etc.
high: '兆' = '亿' * '亿' = $10^{16}$, '京' = '兆' * '兆', etc.
返回对应的数字系统
"""
# chinese number units of '亿' and larger
all_larger_units = zip(
LARGER_CHINESE_NUMERING_UNITS_SIMPLIFIED, LARGER_CHINESE_NUMERING_UNITS_TRADITIONAL)
larger_units = [CNU.create(i, v, numbering_type, False)
for i, v in enumerate(all_larger_units)]
# chinese number units of '十, 百, 千, 万'
all_smaller_units = zip(
SMALLER_CHINESE_NUMERING_UNITS_SIMPLIFIED, SMALLER_CHINESE_NUMERING_UNITS_TRADITIONAL)
smaller_units = [CNU.create(i, v, small_unit=True)
for i, v in enumerate(all_smaller_units)]
# digis
chinese_digis = zip(CHINESE_DIGIS, CHINESE_DIGIS,
BIG_CHINESE_DIGIS_SIMPLIFIED, BIG_CHINESE_DIGIS_TRADITIONAL)
digits = [CND.create(i, v) for i, v in enumerate(chinese_digis)]
digits[0].alt_s, digits[0].alt_t = ZERO_ALT, ZERO_ALT
digits[1].alt_s, digits[1].alt_t = ONE_ALT, ONE_ALT
digits[2].alt_s, digits[2].alt_t = TWO_ALTS[0], TWO_ALTS[1]
# symbols
positive_cn = CM(POSITIVE[0], POSITIVE[1], '+', lambda x: x)
negative_cn = CM(NEGATIVE[0], NEGATIVE[1], '-', lambda x: -x)
point_cn = CM(POINT[0], POINT[1], '.', lambda x,
y: float(str(x) + '.' + str(y)))
# sil_cn = CM(SIL[0], SIL[1], '-', lambda x, y: float(str(x) + '-' + str(y)))
system = NumberSystem()
system.units = smaller_units + larger_units
system.digits = digits
system.math = MathSymbol(positive_cn, negative_cn, point_cn)
# system.symbols = OtherSymbol(sil_cn)
return system
def chn2num(chinese_string, numbering_type=NUMBERING_TYPES[1]):
def get_symbol(char, system):
for u in system.units:
if char in [u.traditional, u.simplified, u.big_s, u.big_t]:
return u
for d in system.digits:
if char in [d.traditional, d.simplified, d.big_s, d.big_t, d.alt_s, d.alt_t]:
return d
for m in system.math:
if char in [m.traditional, m.simplified]:
return m
def string2symbols(chinese_string, system):
int_string, dec_string = chinese_string, ''
for p in [system.math.point.simplified, system.math.point.traditional]:
if p in chinese_string:
int_string, dec_string = chinese_string.split(p)
break
return [get_symbol(c, system) for c in int_string], \
[get_symbol(c, system) for c in dec_string]
def correct_symbols(integer_symbols, system):
"""
一百八 to 一百八十
一亿一千三百万 to 一亿 一千万 三百万
"""
if integer_symbols and isinstance(integer_symbols[0], CNU):
if integer_symbols[0].power == 1:
integer_symbols = [system.digits[1]] + integer_symbols
if len(integer_symbols) > 1:
if isinstance(integer_symbols[-1], CND) and isinstance(integer_symbols[-2], CNU):
integer_symbols.append(
CNU(integer_symbols[-2].power - 1, None, None, None, None))
result = []
unit_count = 0
for s in integer_symbols:
if isinstance(s, CND):
result.append(s)
unit_count = 0
elif isinstance(s, CNU):
current_unit = CNU(s.power, None, None, None, None)
unit_count += 1
if unit_count == 1:
result.append(current_unit)
elif unit_count > 1:
for i in range(len(result)):
if isinstance(result[-i - 1], CNU) and result[-i - 1].power < current_unit.power:
result[-i - 1] = CNU(result[-i - 1].power +
current_unit.power, None, None, None, None)
return result
def compute_value(integer_symbols):
"""
Compute the value.
When current unit is larger than previous unit, current unit * all previous units will be used as all previous units.
e.g. '两千万' = 2000 * 10000 not 2000 + 10000
"""
value = [0]
last_power = 0
for s in integer_symbols:
if isinstance(s, CND):
value[-1] = s.value
elif isinstance(s, CNU):
value[-1] *= pow(10, s.power)
if s.power > last_power:
value[:-1] = list(map(lambda v: v *
pow(10, s.power), value[:-1]))
last_power = s.power
value.append(0)
return sum(value)
system = create_system(numbering_type)
int_part, dec_part = string2symbols(chinese_string, system)
int_part = correct_symbols(int_part, system)
int_str = str(compute_value(int_part))
dec_str = ''.join([str(d.value) for d in dec_part])
if dec_part:
return '{0}.{1}'.format(int_str, dec_str)
else:
return int_str
def num2chn(number_string, numbering_type=NUMBERING_TYPES[1], big=False,
traditional=False, alt_zero=False, alt_one=False, alt_two=True,
use_zeros=True, use_units=True):
def get_value(value_string, use_zeros=True):
striped_string = value_string.lstrip('0')
# record nothing if all zeros
if not striped_string:
return []
# record one digits
elif len(striped_string) == 1:
if use_zeros and len(value_string) != len(striped_string):
return [system.digits[0], system.digits[int(striped_string)]]
else:
return [system.digits[int(striped_string)]]
# recursively record multiple digits
else:
result_unit = next(u for u in reversed(
system.units) if u.power < len(striped_string))
result_string = value_string[:-result_unit.power]
return get_value(result_string) + [result_unit] + get_value(striped_string[-result_unit.power:])
system = create_system(numbering_type)
int_dec = number_string.split('.')
if len(int_dec) == 1:
int_string = int_dec[0]
dec_string = ""
elif len(int_dec) == 2:
int_string = int_dec[0]
dec_string = int_dec[1]
else:
raise ValueError(
"invalid input num string with more than one dot: {}".format(number_string))
if use_units and len(int_string) > 1:
result_symbols = get_value(int_string)
else:
result_symbols = [system.digits[int(c)] for c in int_string]
dec_symbols = [system.digits[int(c)] for c in dec_string]
if dec_string:
result_symbols += [system.math.point] + dec_symbols
if alt_two:
liang = CND(2, system.digits[2].alt_s, system.digits[2].alt_t,
system.digits[2].big_s, system.digits[2].big_t)
for i, v in enumerate(result_symbols):
if isinstance(v, CND) and v.value == 2:
next_symbol = result_symbols[i +
1] if i < len(result_symbols) - 1 else None
previous_symbol = result_symbols[i - 1] if i > 0 else None
if isinstance(next_symbol, CNU) and isinstance(previous_symbol, (CNU, type(None))):
if next_symbol.power != 1 and ((previous_symbol is None) or (previous_symbol.power != 1)):
result_symbols[i] = liang
# if big is True, '两' will not be used and `alt_two` has no impact on output
if big:
attr_name = 'big_'
if traditional:
attr_name += 't'
else:
attr_name += 's'
else:
if traditional:
attr_name = 'traditional'
else:
attr_name = 'simplified'
result = ''.join([getattr(s, attr_name) for s in result_symbols])
# if not use_zeros:
# result = result.strip(getattr(system.digits[0], attr_name))
if alt_zero:
result = result.replace(
getattr(system.digits[0], attr_name), system.digits[0].alt_s)
if alt_one:
result = result.replace(
getattr(system.digits[1], attr_name), system.digits[1].alt_s)
for i, p in enumerate(POINT):
if result.startswith(p):
return CHINESE_DIGIS[0] + result
# ^10, 11, .., 19
if len(result) >= 2 and result[1] in [SMALLER_CHINESE_NUMERING_UNITS_SIMPLIFIED[0],
SMALLER_CHINESE_NUMERING_UNITS_TRADITIONAL[0]] and \
result[0] in [CHINESE_DIGIS[1], BIG_CHINESE_DIGIS_SIMPLIFIED[1], BIG_CHINESE_DIGIS_TRADITIONAL[1]]:
result = result[1:]
return result
# ================================================================================ #
# different types of rewriters
# ================================================================================ #
class Cardinal:
"""
CARDINAL类
"""
def __init__(self, cardinal=None, chntext=None):
self.cardinal = cardinal
self.chntext = chntext
def chntext2cardinal(self):
return chn2num(self.chntext)
def cardinal2chntext(self):
return num2chn(self.cardinal)
class Digit:
"""
DIGIT类
"""
def __init__(self, digit=None, chntext=None):
self.digit = digit
self.chntext = chntext
# def chntext2digit(self):
# return chn2num(self.chntext)
def digit2chntext(self):
return num2chn(self.digit, alt_two=False, use_units=False)
class TelePhone:
"""
TELEPHONE类
"""
def __init__(self, telephone=None, raw_chntext=None, chntext=None):
self.telephone = telephone
self.raw_chntext = raw_chntext
self.chntext = chntext
# def chntext2telephone(self):
# sil_parts = self.raw_chntext.split('<SIL>')
# self.telephone = '-'.join([
# str(chn2num(p)) for p in sil_parts
# ])
# return self.telephone
def telephone2chntext(self, fixed=False):
if fixed:
sil_parts = self.telephone.split('-')
self.raw_chntext = '<SIL>'.join([
num2chn(part, alt_two=False, use_units=False) for part in sil_parts
])
self.chntext = self.raw_chntext.replace('<SIL>', '')
else:
sp_parts = self.telephone.strip('+').split()
self.raw_chntext = '<SP>'.join([
num2chn(part, alt_two=False, use_units=False) for part in sp_parts
])
self.chntext = self.raw_chntext.replace('<SP>', '')
return self.chntext
class Fraction:
"""
FRACTION类
"""
def __init__(self, fraction=None, chntext=None):
self.fraction = fraction
self.chntext = chntext
def chntext2fraction(self):
denominator, numerator = self.chntext.split('分之')
return chn2num(numerator) + '/' + chn2num(denominator)
def fraction2chntext(self):
numerator, denominator = self.fraction.split('/')
return num2chn(denominator) + '分之' + num2chn(numerator)
class Date:
"""
DATE类
"""
def __init__(self, date=None, chntext=None):
self.date = date
self.chntext = chntext
# def chntext2date(self):
# chntext = self.chntext
# try:
# year, other = chntext.strip().split('年', maxsplit=1)
# year = Digit(chntext=year).digit2chntext() + '年'
# except ValueError:
# other = chntext
# year = ''
# if other:
# try:
# month, day = other.strip().split('月', maxsplit=1)
# month = Cardinal(chntext=month).chntext2cardinal() + '月'
# except ValueError:
# day = chntext
# month = ''
# if day:
# day = Cardinal(chntext=day[:-1]).chntext2cardinal() + day[-1]
# else:
# month = ''
# day = ''
# date = year + month + day
# self.date = date
# return self.date
def date2chntext(self):
date = self.date
try:
year, other = date.strip().split('年', 1)
year = Digit(digit=year).digit2chntext() + '年'
except ValueError:
other = date
year = ''
if other:
try:
month, day = other.strip().split('月', 1)
month = Cardinal(cardinal=month).cardinal2chntext() + '月'
except ValueError:
day = date
month = ''
if day:
day = Cardinal(cardinal=day[:-1]).cardinal2chntext() + day[-1]
else:
month = ''
day = ''
chntext = year + month + day
self.chntext = chntext
return self.chntext
class Money:
"""
MONEY类
"""
def __init__(self, money=None, chntext=None):
self.money = money
self.chntext = chntext
# def chntext2money(self):
# return self.money
def money2chntext(self):
money = self.money
pattern = re.compile(r'(\d+(\.\d+)?)')
matchers = pattern.findall(money)
if matchers:
for matcher in matchers:
money = money.replace(matcher[0], Cardinal(cardinal=matcher[0]).cardinal2chntext())
self.chntext = money
return self.chntext
class Percentage:
"""
PERCENTAGE类
"""
def __init__(self, percentage=None, chntext=None):
self.percentage = percentage
self.chntext = chntext
def chntext2percentage(self):
return chn2num(self.chntext.strip().strip('百分之')) + '%'
def percentage2chntext(self):
return '百分之' + num2chn(self.percentage.strip().strip('%'))
# ================================================================================ #
# NSW Normalizer
# ================================================================================ #
class NSWNormalizer:
def __init__(self, raw_text):
self.raw_text = '^' + raw_text + '$'
self.norm_text = ''
def _particular(self):
text = self.norm_text
pattern = re.compile(r"(([a-zA-Z]+)二([a-zA-Z]+))")
matchers = pattern.findall(text)
if matchers:
# print('particular')
for matcher in matchers:
text = text.replace(matcher[0], matcher[1]+'2'+matcher[2], 1)
self.norm_text = text
return self.norm_text
def normalize(self):
text = self.raw_text
# 规范化日期
pattern = re.compile(r"\D+((([089]\d|(19|20)\d{2})年)?(\d{1,2}月(\d{1,2}[日号])?)?)")
matchers = pattern.findall(text)
if matchers:
#print('date')
for matcher in matchers:
text = text.replace(matcher[0], Date(date=matcher[0]).date2chntext(), 1)
# 规范化金钱
pattern = re.compile(r"\D+((\d+(\.\d+)?)[多余几]?" + CURRENCY_UNITS + r"(\d" + CURRENCY_UNITS + r"?)?)")
matchers = pattern.findall(text)
if matchers:
#print('money')
for matcher in matchers:
text = text.replace(matcher[0], Money(money=matcher[0]).money2chntext(), 1)
# 规范化固话/手机号码
# 手机
# http://www.jihaoba.com/news/show/13680
# 移动:139、138、137、136、135、134、159、158、157、150、151、152、188、187、182、183、184、178、198
# 联通:130、131、132、156、155、186、185、176
# 电信:133、153、189、180、181、177
pattern = re.compile(r"\D((\+?86 ?)?1([38]\d|5[0-35-9]|7[678]|9[89])\d{8})\D")
matchers = pattern.findall(text)
if matchers:
#print('telephone')
for matcher in matchers:
text = text.replace(matcher[0], TelePhone(telephone=matcher[0]).telephone2chntext(), 1)
# 固话
pattern = re.compile(r"\D((0(10|2[1-3]|[3-9]\d{2})-?)?[1-9]\d{6,7})\D")
matchers = pattern.findall(text)
if matchers:
# print('fixed telephone')
for matcher in matchers:
text = text.replace(matcher[0], TelePhone(telephone=matcher[0]).telephone2chntext(fixed=True), 1)
# 规范化分数
pattern = re.compile(r"(\d+/\d+)")
matchers = pattern.findall(text)
if matchers:
#print('fraction')
for matcher in matchers:
text = text.replace(matcher, Fraction(fraction=matcher).fraction2chntext(), 1)
# 规范化百分数
text = text.replace('%', '%')
pattern = re.compile(r"(\d+(\.\d+)?%)")
matchers = pattern.findall(text)
if matchers:
#print('percentage')
for matcher in matchers:
text = text.replace(matcher[0], Percentage(percentage=matcher[0]).percentage2chntext(), 1)
# 规范化纯数+量词
pattern = re.compile(r"(\d+(\.\d+)?)[多余几]?" + COM_QUANTIFIERS)
matchers = pattern.findall(text)
if matchers:
#print('cardinal+quantifier')
for matcher in matchers:
text = text.replace(matcher[0], Cardinal(cardinal=matcher[0]).cardinal2chntext(), 1)
# 规范化数字编号
pattern = re.compile(r"(\d{4,32})")
matchers = pattern.findall(text)
if matchers:
#print('digit')
for matcher in matchers:
text = text.replace(matcher, Digit(digit=matcher).digit2chntext(), 1)
# 规范化纯数
pattern = re.compile(r"(\d+(\.\d+)?)")
matchers = pattern.findall(text)
if matchers:
#print('cardinal')
for matcher in matchers:
text = text.replace(matcher[0], Cardinal(cardinal=matcher[0]).cardinal2chntext(), 1)
self.norm_text = text
self._particular()
return self.norm_text.lstrip('^').rstrip('$')
def nsw_test_case(raw_text):
print('I:' + raw_text)
print('O:' + NSWNormalizer(raw_text).normalize())
print('')
def nsw_test():
nsw_test_case('固话:0595-23865596或23880880。')
nsw_test_case('固话:0595-23865596或23880880。')
nsw_test_case('手机:+86 19859213959或15659451527。')
nsw_test_case('分数:32477/76391。')
nsw_test_case('百分数:80.03%。')
nsw_test_case('编号:31520181154418。')
nsw_test_case('纯数:2983.07克或12345.60米。')
nsw_test_case('日期:1999年2月20日或09年3月15号。')
nsw_test_case('金钱:12块5,34.5元,20.1万')
nsw_test_case('特殊:O2O或B2C。')
nsw_test_case('3456万吨')
nsw_test_case('2938个')
nsw_test_case('938')
nsw_test_case('今天吃了115个小笼包231个馒头')
nsw_test_case('有62%的概率')
if __name__ == '__main__':
#nsw_test()
p = argparse.ArgumentParser()
p.add_argument('ifile', help='input filename, assume utf-8 encoding')
p.add_argument('ofile', help='output filename')
p.add_argument('--to_upper', action='store_true', help='convert to upper case')
p.add_argument('--to_lower', action='store_true', help='convert to lower case')
p.add_argument('--has_key', action='store_true', help="input text has Kaldi's key as first field.")
p.add_argument('--log_interval', type=int, default=100000, help='log interval in number of processed lines')
args = p.parse_args()
ifile = codecs.open(args.ifile, 'r', 'utf8')
ofile = codecs.open(args.ofile, 'w+', 'utf8')
n = 0
for l in ifile:
key = ''
text = ''
if args.has_key:
cols = l.split(maxsplit=1)
key = cols[0]
if len(cols) == 2:
text = cols[1].strip()
else:
text = ''
else:
text = l.strip()
# cases
if args.to_upper and args.to_lower:
sys.stderr.write('cn_tn.py: to_upper OR to_lower?')
exit(1)
if args.to_upper:
text = text.upper()
if args.to_lower:
text = text.lower()
# NSW(Non-Standard-Word) normalization
text = NSWNormalizer(text).normalize()
# Punctuations removal
old_chars = CHINESE_PUNC_LIST + string.punctuation # includes all CN and EN punctuations
new_chars = ' ' * len(old_chars)
del_chars = ''
text = text.translate(str.maketrans(old_chars, new_chars, del_chars))
#
if args.has_key:
ofile.write(key + '\t' + text + '\n')
else:
if text.strip() != '': # skip empty line in pure text format(without Kaldi's utt key)
ofile.write(text + '\n')
n += 1
if n % args.log_interval == 0:
sys.stderr.write("cn_tn.py: {} lines done.\n".format(n))
sys.stderr.flush()
sys.stderr.write("cn_tn.py: {} lines done in total.\n".format(n))
sys.stderr.flush()
ifile.close()
ofile.close() | zhtts | /tensorflow_tts/processor/cn_tn.py | cn_tn.py |
"""Perform preprocessing and raw feature extraction for Baker dataset."""
import os
import re
from typing import Dict, List, Union, Tuple, Any
# import librosa
import numpy as np
# import soundfile as sf
from dataclasses import dataclass, field
from pypinyin import Style
from pypinyin.contrib.neutral_tone import NeutralToneWith5Mixin
from pypinyin.converter import DefaultConverter
from pypinyin.core import Pinyin
from .base_processor import BaseProcessor
from .cn_tn import NSWNormalizer
_pad = ["pad"]
_eos = ["eos"]
_pause = ["sil", "#0", "#1", "#2", "#3"]
_initials = [
"^",
"b",
"c",
"ch",
"d",
"f",
"g",
"h",
"j",
"k",
"l",
"m",
"n",
"p",
"q",
"r",
"s",
"sh",
"t",
"x",
"z",
"zh",
]
_tones = ["1", "2", "3", "4", "5"]
_finals = [
"a",
"ai",
"an",
"ang",
"ao",
"e",
"ei",
"en",
"eng",
"er",
"i",
"ia",
"ian",
"iang",
"iao",
"ie",
"ii",
"iii",
"in",
"ing",
"iong",
"iou",
"o",
"ong",
"ou",
"u",
"ua",
"uai",
"uan",
"uang",
"uei",
"uen",
"ueng",
"uo",
"v",
"van",
"ve",
"vn",
]
ALPHA_PHONE_DICT = {
'A': ['EE', 'EI1'],
'B': ['B', 'I4'],
'C': ['S', 'I1'],
'D': ['D', 'I4'],
'E': ['II', 'I4'],
'F': ['EE', 'EI2', 'F', 'U5'],
'G': ['J', 'I4'],
'H': ['EE', 'EI1', 'Q', 'U1'],
'I': ['AA', 'AI4'],
'J': ['J', 'IE4'],
'K': ['K', 'IE4'],
'L': ['EE', 'EI2', 'L', 'E5'],
'M': ['EE', 'EI2', 'M', 'ENG5'],
'N': ['EE', 'EN1'],
'O': ['OO', 'OU1'],
'P': ['P', 'I1'],
'Q': ['Q', 'OU1'],
'R': ['AA', 'AI1', 'EE', 'ER5'],
'S': ['EE', 'EI2', 'S', 'IY1'],
'T': ['T', 'I4'],
'U': ['II', 'IU1'],
'V': ['UU', 'UI1'],
'W': ['D', 'A2', 'B', 'U5', 'L', 'IU5'],
'X': ['EE', 'EI2', 'K', 'IE5', 'S', 'IY1'],
'Y': ['UU', 'UAI1'],
'Z': ['Z', 'E1']}
_alpha_phones = []
[_alpha_phones.extend(i) for i in ALPHA_PHONE_DICT.values()]
# BAKER_SYMBOLS = _pad + _pause + _initials + [i + j for i in _finals for j in _tones] + _eos + _alpha_phones
# TODO 等待支持英文字母
BAKER_SYMBOLS = _pad + _pause + _initials + [i + j for i in _finals for j in _tones] + _eos
PINYIN_DICT = {
"a": ("^", "a"),
"ai": ("^", "ai"),
"an": ("^", "an"),
"ang": ("^", "ang"),
"ao": ("^", "ao"),
"ba": ("b", "a"),
"bai": ("b", "ai"),
"ban": ("b", "an"),
"bang": ("b", "ang"),
"bao": ("b", "ao"),
"be": ("b", "e"),
"bei": ("b", "ei"),
"ben": ("b", "en"),
"beng": ("b", "eng"),
"bi": ("b", "i"),
"bian": ("b", "ian"),
"biao": ("b", "iao"),
"bie": ("b", "ie"),
"bin": ("b", "in"),
"bing": ("b", "ing"),
"bo": ("b", "o"),
"bu": ("b", "u"),
"ca": ("c", "a"),
"cai": ("c", "ai"),
"can": ("c", "an"),
"cang": ("c", "ang"),
"cao": ("c", "ao"),
"ce": ("c", "e"),
"cen": ("c", "en"),
"ceng": ("c", "eng"),
"cha": ("ch", "a"),
"chai": ("ch", "ai"),
"chan": ("ch", "an"),
"chang": ("ch", "ang"),
"chao": ("ch", "ao"),
"che": ("ch", "e"),
"chen": ("ch", "en"),
"cheng": ("ch", "eng"),
"chi": ("ch", "iii"),
"chong": ("ch", "ong"),
"chou": ("ch", "ou"),
"chu": ("ch", "u"),
"chua": ("ch", "ua"),
"chuai": ("ch", "uai"),
"chuan": ("ch", "uan"),
"chuang": ("ch", "uang"),
"chui": ("ch", "uei"),
"chun": ("ch", "uen"),
"chuo": ("ch", "uo"),
"ci": ("c", "ii"),
"cong": ("c", "ong"),
"cou": ("c", "ou"),
"cu": ("c", "u"),
"cuan": ("c", "uan"),
"cui": ("c", "uei"),
"cun": ("c", "uen"),
"cuo": ("c", "uo"),
"da": ("d", "a"),
"dai": ("d", "ai"),
"dan": ("d", "an"),
"dang": ("d", "ang"),
"dao": ("d", "ao"),
"de": ("d", "e"),
"dei": ("d", "ei"),
"den": ("d", "en"),
"deng": ("d", "eng"),
"di": ("d", "i"),
"dia": ("d", "ia"),
"dian": ("d", "ian"),
"diao": ("d", "iao"),
"die": ("d", "ie"),
"ding": ("d", "ing"),
"diu": ("d", "iou"),
"dong": ("d", "ong"),
"dou": ("d", "ou"),
"du": ("d", "u"),
"duan": ("d", "uan"),
"dui": ("d", "uei"),
"dun": ("d", "uen"),
"duo": ("d", "uo"),
"e": ("^", "e"),
"ei": ("^", "ei"),
"en": ("^", "en"),
"ng": ("^", "en"),
"eng": ("^", "eng"),
"er": ("^", "er"),
"fa": ("f", "a"),
"fan": ("f", "an"),
"fang": ("f", "ang"),
"fei": ("f", "ei"),
"fen": ("f", "en"),
"feng": ("f", "eng"),
"fo": ("f", "o"),
"fou": ("f", "ou"),
"fu": ("f", "u"),
"ga": ("g", "a"),
"gai": ("g", "ai"),
"gan": ("g", "an"),
"gang": ("g", "ang"),
"gao": ("g", "ao"),
"ge": ("g", "e"),
"gei": ("g", "ei"),
"gen": ("g", "en"),
"geng": ("g", "eng"),
"gong": ("g", "ong"),
"gou": ("g", "ou"),
"gu": ("g", "u"),
"gua": ("g", "ua"),
"guai": ("g", "uai"),
"guan": ("g", "uan"),
"guang": ("g", "uang"),
"gui": ("g", "uei"),
"gun": ("g", "uen"),
"guo": ("g", "uo"),
"ha": ("h", "a"),
"hai": ("h", "ai"),
"han": ("h", "an"),
"hang": ("h", "ang"),
"hao": ("h", "ao"),
"he": ("h", "e"),
"hei": ("h", "ei"),
"hen": ("h", "en"),
"heng": ("h", "eng"),
"hong": ("h", "ong"),
"hou": ("h", "ou"),
"hu": ("h", "u"),
"hua": ("h", "ua"),
"huai": ("h", "uai"),
"huan": ("h", "uan"),
"huang": ("h", "uang"),
"hui": ("h", "uei"),
"hun": ("h", "uen"),
"huo": ("h", "uo"),
"ji": ("j", "i"),
"jia": ("j", "ia"),
"jian": ("j", "ian"),
"jiang": ("j", "iang"),
"jiao": ("j", "iao"),
"jie": ("j", "ie"),
"jin": ("j", "in"),
"jing": ("j", "ing"),
"jiong": ("j", "iong"),
"jiu": ("j", "iou"),
"ju": ("j", "v"),
"juan": ("j", "van"),
"jue": ("j", "ve"),
"jun": ("j", "vn"),
"ka": ("k", "a"),
"kai": ("k", "ai"),
"kan": ("k", "an"),
"kang": ("k", "ang"),
"kao": ("k", "ao"),
"ke": ("k", "e"),
"kei": ("k", "ei"),
"ken": ("k", "en"),
"keng": ("k", "eng"),
"kong": ("k", "ong"),
"kou": ("k", "ou"),
"ku": ("k", "u"),
"kua": ("k", "ua"),
"kuai": ("k", "uai"),
"kuan": ("k", "uan"),
"kuang": ("k", "uang"),
"kui": ("k", "uei"),
"kun": ("k", "uen"),
"kuo": ("k", "uo"),
"la": ("l", "a"),
"lai": ("l", "ai"),
"lan": ("l", "an"),
"lang": ("l", "ang"),
"lao": ("l", "ao"),
"le": ("l", "e"),
"lei": ("l", "ei"),
"leng": ("l", "eng"),
"li": ("l", "i"),
"lia": ("l", "ia"),
"lian": ("l", "ian"),
"liang": ("l", "iang"),
"liao": ("l", "iao"),
"lie": ("l", "ie"),
"lin": ("l", "in"),
"ling": ("l", "ing"),
"liu": ("l", "iou"),
"lo": ("l", "o"),
"long": ("l", "ong"),
"lou": ("l", "ou"),
"lu": ("l", "u"),
"lv": ("l", "v"),
"luan": ("l", "uan"),
"lve": ("l", "ve"),
"lue": ("l", "ve"),
"lun": ("l", "uen"),
"luo": ("l", "uo"),
"ma": ("m", "a"),
"mai": ("m", "ai"),
"man": ("m", "an"),
"mang": ("m", "ang"),
"mao": ("m", "ao"),
"me": ("m", "e"),
"mei": ("m", "ei"),
"men": ("m", "en"),
"meng": ("m", "eng"),
"mi": ("m", "i"),
"mian": ("m", "ian"),
"miao": ("m", "iao"),
"mie": ("m", "ie"),
"min": ("m", "in"),
"ming": ("m", "ing"),
"miu": ("m", "iou"),
"mo": ("m", "o"),
"mou": ("m", "ou"),
"mu": ("m", "u"),
"na": ("n", "a"),
"nai": ("n", "ai"),
"nan": ("n", "an"),
"nang": ("n", "ang"),
"nao": ("n", "ao"),
"ne": ("n", "e"),
"nei": ("n", "ei"),
"nen": ("n", "en"),
"neng": ("n", "eng"),
"ni": ("n", "i"),
"nia": ("n", "ia"),
"nian": ("n", "ian"),
"niang": ("n", "iang"),
"niao": ("n", "iao"),
"nie": ("n", "ie"),
"nin": ("n", "in"),
"ning": ("n", "ing"),
"niu": ("n", "iou"),
"nong": ("n", "ong"),
"nou": ("n", "ou"),
"nu": ("n", "u"),
"nv": ("n", "v"),
"nuan": ("n", "uan"),
"nve": ("n", "ve"),
"nue": ("n", "ve"),
"nuo": ("n", "uo"),
"o": ("^", "o"),
"ou": ("^", "ou"),
"pa": ("p", "a"),
"pai": ("p", "ai"),
"pan": ("p", "an"),
"pang": ("p", "ang"),
"pao": ("p", "ao"),
"pe": ("p", "e"),
"pei": ("p", "ei"),
"pen": ("p", "en"),
"peng": ("p", "eng"),
"pi": ("p", "i"),
"pian": ("p", "ian"),
"piao": ("p", "iao"),
"pie": ("p", "ie"),
"pin": ("p", "in"),
"ping": ("p", "ing"),
"po": ("p", "o"),
"pou": ("p", "ou"),
"pu": ("p", "u"),
"qi": ("q", "i"),
"qia": ("q", "ia"),
"qian": ("q", "ian"),
"qiang": ("q", "iang"),
"qiao": ("q", "iao"),
"qie": ("q", "ie"),
"qin": ("q", "in"),
"qing": ("q", "ing"),
"qiong": ("q", "iong"),
"qiu": ("q", "iou"),
"qu": ("q", "v"),
"quan": ("q", "van"),
"que": ("q", "ve"),
"qun": ("q", "vn"),
"ran": ("r", "an"),
"rang": ("r", "ang"),
"rao": ("r", "ao"),
"re": ("r", "e"),
"ren": ("r", "en"),
"reng": ("r", "eng"),
"ri": ("r", "iii"),
"rong": ("r", "ong"),
"rou": ("r", "ou"),
"ru": ("r", "u"),
"rua": ("r", "ua"),
"ruan": ("r", "uan"),
"rui": ("r", "uei"),
"run": ("r", "uen"),
"ruo": ("r", "uo"),
"sa": ("s", "a"),
"sai": ("s", "ai"),
"san": ("s", "an"),
"sang": ("s", "ang"),
"sao": ("s", "ao"),
"se": ("s", "e"),
"sen": ("s", "en"),
"seng": ("s", "eng"),
"sha": ("sh", "a"),
"shai": ("sh", "ai"),
"shan": ("sh", "an"),
"shang": ("sh", "ang"),
"shao": ("sh", "ao"),
"she": ("sh", "e"),
"shei": ("sh", "ei"),
"shen": ("sh", "en"),
"sheng": ("sh", "eng"),
"shi": ("sh", "iii"),
"shou": ("sh", "ou"),
"shu": ("sh", "u"),
"shua": ("sh", "ua"),
"shuai": ("sh", "uai"),
"shuan": ("sh", "uan"),
"shuang": ("sh", "uang"),
"shui": ("sh", "uei"),
"shun": ("sh", "uen"),
"shuo": ("sh", "uo"),
"si": ("s", "ii"),
"song": ("s", "ong"),
"sou": ("s", "ou"),
"su": ("s", "u"),
"suan": ("s", "uan"),
"sui": ("s", "uei"),
"sun": ("s", "uen"),
"suo": ("s", "uo"),
"ta": ("t", "a"),
"tai": ("t", "ai"),
"tan": ("t", "an"),
"tang": ("t", "ang"),
"tao": ("t", "ao"),
"te": ("t", "e"),
"tei": ("t", "ei"),
"teng": ("t", "eng"),
"ti": ("t", "i"),
"tian": ("t", "ian"),
"tiao": ("t", "iao"),
"tie": ("t", "ie"),
"ting": ("t", "ing"),
"tong": ("t", "ong"),
"tou": ("t", "ou"),
"tu": ("t", "u"),
"tuan": ("t", "uan"),
"tui": ("t", "uei"),
"tun": ("t", "uen"),
"tuo": ("t", "uo"),
"wa": ("^", "ua"),
"wai": ("^", "uai"),
"wan": ("^", "uan"),
"wang": ("^", "uang"),
"wei": ("^", "uei"),
"wen": ("^", "uen"),
"weng": ("^", "ueng"),
"wo": ("^", "uo"),
"wu": ("^", "u"),
"xi": ("x", "i"),
"xia": ("x", "ia"),
"xian": ("x", "ian"),
"xiang": ("x", "iang"),
"xiao": ("x", "iao"),
"xie": ("x", "ie"),
"xin": ("x", "in"),
"xing": ("x", "ing"),
"xiong": ("x", "iong"),
"xiu": ("x", "iou"),
"xu": ("x", "v"),
"xuan": ("x", "van"),
"xue": ("x", "ve"),
"xun": ("x", "vn"),
"ya": ("^", "ia"),
"yan": ("^", "ian"),
"yang": ("^", "iang"),
"yao": ("^", "iao"),
"ye": ("^", "ie"),
"yi": ("^", "i"),
"yin": ("^", "in"),
"ying": ("^", "ing"),
"yo": ("^", "iou"),
"yong": ("^", "iong"),
"you": ("^", "iou"),
"yu": ("^", "v"),
"yuan": ("^", "van"),
"yue": ("^", "ve"),
"yun": ("^", "vn"),
"za": ("z", "a"),
"zai": ("z", "ai"),
"zan": ("z", "an"),
"zang": ("z", "ang"),
"zao": ("z", "ao"),
"ze": ("z", "e"),
"zei": ("z", "ei"),
"zen": ("z", "en"),
"zeng": ("z", "eng"),
"zha": ("zh", "a"),
"zhai": ("zh", "ai"),
"zhan": ("zh", "an"),
"zhang": ("zh", "ang"),
"zhao": ("zh", "ao"),
"zhe": ("zh", "e"),
"zhei": ("zh", "ei"),
"zhen": ("zh", "en"),
"zheng": ("zh", "eng"),
"zhi": ("zh", "iii"),
"zhong": ("zh", "ong"),
"zhou": ("zh", "ou"),
"zhu": ("zh", "u"),
"zhua": ("zh", "ua"),
"zhuai": ("zh", "uai"),
"zhuan": ("zh", "uan"),
"zhuang": ("zh", "uang"),
"zhui": ("zh", "uei"),
"zhun": ("zh", "uen"),
"zhuo": ("zh", "uo"),
"zi": ("z", "ii"),
"zong": ("z", "ong"),
"zou": ("z", "ou"),
"zu": ("z", "u"),
"zuan": ("z", "uan"),
"zui": ("z", "uei"),
"zun": ("z", "uen"),
"zuo": ("z", "uo"),
}
zh_pattern = re.compile("[\u4e00-\u9fa5]")
alpha_pattern = re.compile(r"[a-zA-Z]")
def is_zh(word):
global zh_pattern
match = zh_pattern.search(word)
return match is not None
def is_alpha(word):
global alpha_pattern
match = alpha_pattern.search(word)
return match is not None
class MyConverter(NeutralToneWith5Mixin, DefaultConverter):
pass
@dataclass
class BakerProcessor(BaseProcessor):
pinyin_dict: Dict[str, Tuple[str, str]] = field(default_factory=lambda: PINYIN_DICT)
cleaner_names: str = None
target_rate: int = 24000
speaker_name: str = "baker"
def __post_init__(self):
super().__post_init__()
self.pinyin_parser = self.get_pinyin_parser()
def setup_eos_token(self):
return _eos[0]
def create_items(self):
items = []
if self.data_dir:
with open(
os.path.join(self.data_dir, "ProsodyLabeling/000001-010000.txt"),
encoding="utf-8",
) as ttf:
lines = ttf.readlines()
for idx in range(0, len(lines), 2):
utt_id, chn_char = lines[idx].strip().split() # [100001, 中文]
pinyin = lines[idx + 1].strip().split() # ['zhong1', 'wen2']
phonemes = self.get_phoneme_from_char_and_pinyin(chn_char, pinyin)
wav_path = os.path.join(self.data_dir, "Wave", "%s.wav" % utt_id)
items.append(
[" ".join(phonemes), wav_path, utt_id, self.speaker_name]
)
self.items = items
def get_phoneme_from_char_and_pinyin(self, chn_char, pinyin):
# we do not need #4, use sil to replace it
chn_char = chn_char.replace("#4", "")
char_len = len(chn_char)
i, j = 0, 0
result = ["sil"]
while i < char_len:
cur_char = chn_char[i]
if is_zh(cur_char):
if pinyin[j][:-1] == 'n': # 处理特殊“嗯” 特殊拼音
pinyin[j] = 'en' + pinyin[j][-1]
if pinyin[j][:-1] not in self.pinyin_dict: #处理儿化音
assert chn_char[i + 1] == "儿", f"current_char : {cur_char}, next_char: {chn_char[i+1]}, cur_pinyin: {pinyin[j]}"
assert pinyin[j][-2] == "r"
tone = pinyin[j][-1]
a = pinyin[j][:-2]
a1, a2 = self.pinyin_dict[a]
result += [a1, a2 + tone, "er5"]
if i + 2 < char_len and chn_char[i + 2] != "#":
result.append("#0")
i += 2
j += 1
else:
tone = pinyin[j][-1]
a = pinyin[j][:-1]
a1, a2 = self.pinyin_dict[a] # a="wen" a1="^", a2="en"
result += [a1, a2 + tone] # result = [zh, ong1, ^,en2]
if i + 1 < char_len and chn_char[i + 1] != "#": # 每个字后面接一个#0
result.append("#0")
i += 1
j += 1
# TODO support English alpha
# elif is_alpha(cur_char):
# result += ALPHA_PHONE_DICT[cur_char.upper()]
# if i + 1 < char_len and chn_char[i + 1] not in "#、,。!?:" : # 每个字后面接一个#0
# result.append("#0")
# i += 1
# j += 1 # baker alpha dataset "ABC" in pinyin
elif cur_char == "#":
result.append(chn_char[i : i + 2])
i += 2
# elif cur_char in "、,。!?:": # 遇到标点符号,添加停顿
# result.pop() # 去掉#0
# result.append("#3")
# i += 1
else:
# ignore the unknown char
# result.append(chn_char[i])
i += 1
if result[-1] == "#0": # 去掉最后的#0,改为sil
result = result[:-1]
if result[-1] != "sil":
result.append("sil")
assert j == len(pinyin)
return result
def get_one_sample(self, item):
text, wav_file, utt_id, speaker_name = item
# normalize audio signal to be [-1, 1], soundfile already norm.
audio, rate = sf.read(wav_file)
audio = audio.astype(np.float32)
if rate != self.target_rate:
assert rate > self.target_rate
audio = librosa.resample(audio, rate, self.target_rate)
# convert text to ids
try:
text_ids = np.asarray(self.text_to_sequence(text), np.int32)
except Exception as e:
print(e, utt_id, text)
return None
# return None
sample = {
"raw_text": text,
"text_ids": text_ids,
"audio": audio,
"utt_id": str(int(utt_id)),
"speaker_name": speaker_name,
"rate": self.target_rate,
}
return sample
def get_pinyin_parser(self):
my_pinyin = Pinyin(MyConverter())
pinyin = my_pinyin.pinyin
return pinyin
def text_to_phone(self, text):
""" return string like 'sil c e4 #0 sh iii4 #0 ^ uen2 #0 b en3 sil' """
text = NSWNormalizer(text.strip()).normalize()
pinyin = self.pinyin_parser(text, style=Style.TONE3, errors="ignore")
new_pinyin = []
for x in pinyin:
x = "".join(x)
if "#" not in x:
new_pinyin.append(x)
phonemes = self.get_phoneme_from_char_and_pinyin(text, new_pinyin) # phoneme seq: [sil c e4 #0 sh iii4 #0 ^ uen2 #0 b en3 sil] string 的list
phones = " ".join(phonemes)
return text, phones
def text_to_sequence(self, text, inference=False):
""" string 'sil c e4 #0 sh iii4 #0 ^ uen2 #0 b en3 sil' to list[int], use mapper.json symbol_to_id """
if inference:
_, phones = self.text_to_phone(text)
sequence = []
for symbol in phones.split():
idx = self.symbol_to_id[symbol]
sequence.append(idx)
# add eos tokens
sequence += [self.eos_id]
return sequence | zhtts | /tensorflow_tts/processor/baker.py | baker.py |
import re
import time
import datetime
import requests
import numpy as np
import pandas as pd
import openpyxl.utils.cell as c
from bs4 import BeautifulSoup
from selenium import webdriver
import matplotlib.pyplot as plt
from openpyxl import load_workbook
from pandas import DataFrame,Series
from win32com.client import Dispatch
from openpyxl.formula.translate import Translator
def find_income(company_id, n, google_driver_path, com_revenue_monthly_path, date_col, rev_col):
driver = webdriver.Chrome(executable_path=google_driver_path)
driver.get("https://mops.twse.com.tw/mops/web/t05st10_ifrs")
time.sleep(1)
input_from_list = driver.find_element_by_id("co_id").send_keys(str(company_id))
time.sleep(1)
button = driver.find_element_by_css_selector('[value=" 查詢 "]')
button.click()
windows = driver.window_handles
driver.switch_to.window(windows[-1])
time.sleep(1.5)
html = driver.page_source
soup = BeautifulSoup(html, "html.parser")
driver.close()
table = soup.find_all("div", {"id": "table01"})
for t in table:
tr_tag = t.find_all('tr')
if tr_tag == []:
find_income(company_id, n, google_driver_path, com_revenue_monthly_path, date_col, rev_col)
else:
new_income = tr_tag[4].text[3:].split()
new_date = tr_tag[2].text[:9].split()
if new_date[0][:2] == '民國':
new_date = new_date
else:
new_date = tr_tag[1].text[:9].split()
wb = load_workbook(filename=com_revenue_monthly_path)
ws = wb.active
ws._current_row = n
ws.append({rev_col: new_income[0]})
ws._current_row = n
ws.append({date_col: new_date[0]})
wb.save(filename=com_revenue_monthly_path)
def find_row(company_name, sheetname):
company_row = 'None'
for col in sheetname.columns:
for row in col:
if row.value == company_name:
company_row = str(row.row)
return company_row
def find_col(date, sheetname):
for row in sheetname.rows:
for col in row:
if col.value == date:
date_col = c.get_column_letter(col.col_idx)
return date_col
def find_address(com_name, date, sheetname):
if find_col(date, sheetname) != 'None' and find_row(com_name, sheetname) != 'None':
address = find_col(date, sheetname) + find_row(com_name, sheetname)
else:
address = "None"
return address
def just_open(filename):
xlApp = Dispatch("Excel.Application")
xlApp.Visible = False
xlBook = xlApp.Workbooks.Open(filename)
xlBook.Save()
xlBook.Close()
def twweb_to_excel(num,google_driver_path,com_revenue_monthly_path,date_col, rev_col):
wb = load_workbook(com_revenue_monthly_path)
ws = wb.active
v = []
for row in ws.rows:
v.append(row[0].value)
for i in range(num,179):
find_income(v[i][:4], i, google_driver_path,com_revenue_monthly_path,date_col, rev_col)
print(i)
def find_cunchu_price(chrome_path):
driver_cunchu = webdriver.Chrome(executable_path=chrome_path)
driver_cunchu.get('https://www.dramx.com/')
html = driver_cunchu.page_source
soup = BeautifulSoup(html,"html.parser")
table_time = soup.find_all("div",{"class":'menudiv'})
for t in table_time:
td_tag = t.find_all('time')
dram_flash_time = str(td_tag[0])[6:16]
table_dram = soup.find_all("div",{"id":'con_one_1'})
for t in table_dram:
td_tag = t.find_all('td')
dram_price_a = [td_tag[5],td_tag[12],td_tag[19],td_tag[26],td_tag[33],td_tag[40]]
dram_price_change_a = [td_tag[6],td_tag[13],td_tag[20],td_tag[27],td_tag[34],td_tag[41]]
dram_price = []
dram_price_change = []
for i in dram_price_a:
result = float(str(i)[4:9])
dram_price.append(result)
for i in dram_price_change_a:
a = str(i)[-11:-5]
if a[0] =='>':
result = float(a[1:-1])/100
else:
result = 0 - float(a[1:-1])/100
dram_price_change.append(result)
table_flash = soup.find_all("div",{"id":'con_one_2'})
for t in table_flash:
td_tag = t.find_all('td')
flash_price_a = [td_tag[5],td_tag[12],td_tag[19],td_tag[26],td_tag[33]]
flash_price_change_a = [td_tag[6],td_tag[13],td_tag[20],td_tag[27],td_tag[34]]
flash_price = []
flash_price_change = []
for i in flash_price_a:
result = float(str(i)[4:9])
flash_price.append(result)
for i in flash_price_change_a:
a = str(i)[-11:-5]
if a[0] =='>':
result = float(a[1:-1])/100
else:
result = 0 - float(a[1:-1])/100
flash_price_change.append(result)
driver_cunchu.close()
driver_cunchu = webdriver.Chrome("C:\Program Files\Google\Chrome\Application\chromedriver.exe")
driver_cunchu.get('https://www.0101ssd.com/c/price_new.html')
html = driver_cunchu.page_source
soup = BeautifulSoup(html,"html.parser")
table = soup.find_all("div",{"class":'quotedpriceLeft wid_700'})
for t in table:
td_tag = t.find_all('td')
ssd_price_a = [td_tag[2],td_tag[9],td_tag[16],td_tag[23],td_tag[30],
td_tag[37],td_tag[44],td_tag[51],td_tag[72],td_tag[79],
td_tag[86],td_tag[93],td_tag[100],td_tag[107]]
ssd_price_change_a = [td_tag[5],td_tag[12],td_tag[19],td_tag[26],td_tag[33],
td_tag[40],td_tag[47],td_tag[54],td_tag[75],td_tag[82],
td_tag[89],td_tag[96],td_tag[103],td_tag[110]]
ssd_price = []
ssd_price_change = []
for i in ssd_price_a:
a = int(str(i)[4:-5])
ssd_price.append(a)
for i in ssd_price_change_a:
x = str(i)
if x.find('-') == -1:
n = x.find('>')
result = float(x[n+1:-6])/100
else:
if x[4:-5] == '-':
result = '-'
else:
n = x.find('-')
result = 0 - float(x[n+1:-6])/100
ssd_price_change.append(result)
table = soup.find_all("b",{"class":'quotedTableOneNameTime'})
for t in table:
td_tag = t.find_all('td')
table = soup.find_all("b",{"class":'quotedTableOneNameTime'})
ssd_time= str(table[0])[-19:-4]
driver_cunchu.close()
print('dram和flash更新时间:' + dram_flash_time)
print('dram价格和涨跌')
print(dram_price)
print(dram_price_change)
print('flash价格和涨跌')
print(flash_price)
print(flash_price_change)
print('ssd' + ssd_time)
print(ssd_price)
print(ssd_price_change)
return dram_price, dram_price_change, flash_price, flash_price_change, ssd_price, ssd_price_change
def start_driver(google_driver_address):
driver = webdriver.Chrome(executable_path=google_driver_address)
return driver
def jd_start():
driver.get("https://item.jd.com/100018807455.html")
def jd_find_price_stock(web):
if web == 'https://jd.com/':
result = ['暂无信息', '']
else:
product_num = web[20:-5]
price_code = 'price J-p-' + product_num
driver.get(web)
html = driver.page_source
soup = BeautifulSoup(html,"html.parser")
table_price = soup.find_all("span",{"class":price_code})
if table_price == []:
result = ['已下架', '']
else:
price_str = str(table_price[0])
price_location = 25 + len(product_num)
price = int(price_str[price_location:-10])
table_stock = soup.find_all("div",{"class":'store-prompt'})
stock_str = str(table_stock[0])
stock = stock_str[52:-23]
if stock == '无货':
result = [price, stock]
else:
result = [price, '有货']
return result
def find_auto_data(web,year, month, google_driver_address):
driver = webdriver.Chrome(executable_path=google_driver_address)
driver.get(web)
html = driver.page_source
soup = BeautifulSoup(html,"html.parser")
table = soup.find_all("div",{"id":'daisuFlash'})
for t in table:
td_tag = t.find_all('script')
a = str(td_tag[2])[15:-10].split(',')
b = str(td_tag[2])[27:-10].split(',')
if a[0][0:8] == 'Data'+ year:
x = b[month - 1][-9:]
if x == 'undefined':
result = x
else:
result = int(re.sub(u"([^\u0030-\u0039])", "", b[month - 1][-10:]))
else:
result = 'no data'
driver.close()
return result | zhu-3w | /zhu_3w-1.0.17.tar.gz/zhu_3w-1.0.17/threewfund/datatool.py | datatool.py |
========================
zhu2020
========================
.. {# pkglts, doc
.. image:: https://b326.gitlab.io/zhu2020/_images/badge_doc.svg
:alt: Documentation status
:target: https://b326.gitlab.io/zhu2020/
.. image:: https://b326.gitlab.io/zhu2020/_images/badge_pkging_pip.svg
:alt: PyPI version
:target: https://pypi.org/project/zhu2020/1.0.0/
.. image:: https://b326.gitlab.io/zhu2020/_images/badge_pkging_conda.svg
:alt: Conda version
:target: https://anaconda.org/revesansparole/zhu2020
.. image:: https://badge.fury.io/py/zhu2020.svg
:alt: PyPI version
:target: https://badge.fury.io/py/zhu2020
.. #}
.. {# pkglts, glabpkg, after doc
main: |main_build|_ |main_coverage|_
.. |main_build| image:: https://gitlab.com/b326/zhu2020/badges/main/pipeline.svg
.. _main_build: https://gitlab.com/b326/zhu2020/commits/main
.. |main_coverage| image:: https://gitlab.com/b326/zhu2020/badges/main/coverage.svg
.. _main_coverage: https://gitlab.com/b326/zhu2020/commits/main
prod: |prod_build|_ |prod_coverage|_
.. |prod_build| image:: https://gitlab.com/b326/zhu2020/badges/prod/pipeline.svg
.. _prod_build: https://gitlab.com/b326/zhu2020/commits/prod
.. |prod_coverage| image:: https://gitlab.com/b326/zhu2020/badges/prod/coverage.svg
.. _prod_coverage: https://gitlab.com/b326/zhu2020/commits/prod
.. #}
Data and formalisms from Zhu 2020
| zhu2020 | /zhu2020-1.0.0.tar.gz/zhu2020-1.0.0/README.rst | README.rst |
============
Contributing
============
Contributions are welcome, and they are greatly appreciated! Every little bit
helps, and credit will always be given.
You can contribute in many ways:
Types of Contributions
----------------------
Report Bugs
~~~~~~~~~~~
Report bugs at issues_.
If you are reporting a bug, please include:
* Your operating system name and version.
* Any details about your local setup that might be helpful in troubleshooting.
* Detailed steps to reproduce the bug.
Fix Bugs
~~~~~~~~
Look through the Gitlab issues for bugs.
Anything tagged with "bug" is open to whoever wants to implement it.
Implement Features
~~~~~~~~~~~~~~~~~~
Look through the Gitlab issues for
features. Anything tagged with "feature" is open to whoever wants to implement it.
Write Documentation
~~~~~~~~~~~~~~~~~~~
**zhu2020** could always use more documentation, whether as
part of the official **zhu2020** docs, in docstrings, or even
on the web in blog posts, articles, and such.
Submit Feedback
~~~~~~~~~~~~~~~
The best way to send feedback is to file an issue at issues_.
If you are proposing a feature:
* Explain in detail how it would work.
* Keep the scope as narrow as possible, to make it easier to implement.
* Remember that this is a volunteer-driven project, and that contributions
are welcome :)
Get Started!
------------
Ready to contribute? Here's how to set up `zhu2020` for local
development.
1. Fork the `zhu2020` repo on
gitlab.com.
2. Clone your fork locally::
$ git clone [email protected]:your_name_here/zhu2020.git
3. Install your local copy into a virtualenv. Assuming you have virtualenv_
installed, this is how you set up your fork for local development::
$ virtualenv dvlpt
$ dvlpt/script/activate
(dvlpt)$ pip install -e .
4. Create a branch for local development (wip stands for work in progress)::
(dvlpt)$ git checkout -b wip_name-of-your-bugfix-or-feature
Now you can make your changes locally.
5. When you're done making changes, check that your changes pass flake8 and the
tests, including testing other Python versions with tox::
(dvlpt)$ cd zhu2020
(dvlpt) zhu2020$ flake8
(dvlpt) zhu2020$ pytest
(dvlpt) zhu2020$ tox
To get flake8 and tox, just pip install them into your virtualenv.
6. Commit your changes and push your branch to Gitlab::
$ git add .
$ git commit -m "Your detailed description of your changes."
$ git push origin wip_name-of-your-bugfix-or-feature
7. Submit a merge request through the Gitlab website.
Pull Request Guidelines
-----------------------
Before you submit a pull request, check that it meets these guidelines:
1. The pull request should include tests.
2. If the pull request adds functionality, the docs should be updated. Put
your new functionality into a function with a docstring, and add the
feature to the list in README.rst.
3. The pull request should work for Python 39.
Tips
----
To run a subset of tests::
$ pytest test/test_XXX
.. _issues: https://gitlab.com/b326/zhu2020/issues
.. _virtualenv: https://pypi.python.org/pypi/virtualenv
| zhu2020 | /zhu2020-1.0.0.tar.gz/zhu2020-1.0.0/CONTRIBUTING.rst | CONTRIBUTING.rst |
Welcome to zhu2020's documentation!
====================================================
.. toctree::
:caption: Contents
:maxdepth: 2
readme
installation
usage
.. toctree::
:caption: User's documentation
:maxdepth: 2
user/index
_gallery/index
.. toctree::
:caption: Developer's documentation
:maxdepth: 4
Sources <_dvlpt/modules>
.. toctree::
:caption: Annexe
:maxdepth: 2
gitlab home <https://gitlab.com/b326/zhu2020>
management
* :ref:`genindex`
* :ref:`modindex`
* :ref:`search`
| zhu2020 | /zhu2020-1.0.0.tar.gz/zhu2020-1.0.0/doc/index.rst | index.rst |
Overview
========
.. {# pkglts, glabpkg
.. image:: https://b326.gitlab.io/zhu2020/_images/badge_doc.svg
:alt: Documentation status
:target: https://b326.gitlab.io/zhu2020/
.. image:: https://b326.gitlab.io/zhu2020/_images/badge_pkging_pip.svg
:alt: PyPI version
:target: https://pypi.org/project/zhu2020/1.0.0/
.. image:: https://b326.gitlab.io/zhu2020/_images/badge_pkging_conda.svg
:alt: Conda version
:target: https://anaconda.org/revesansparole/zhu2020
.. image:: https://badge.fury.io/py/zhu2020.svg
:alt: PyPI version
:target: https://badge.fury.io/py/zhu2020
main: |main_build|_ |main_coverage|_
.. |main_build| image:: https://gitlab.com/b326/zhu2020/badges/main/pipeline.svg
.. _main_build: https://gitlab.com/b326/zhu2020/commits/main
.. |main_coverage| image:: https://gitlab.com/b326/zhu2020/badges/main/coverage.svg
.. _main_coverage: https://gitlab.com/b326/zhu2020/commits/main
prod: |prod_build|_ |prod_coverage|_
.. |prod_build| image:: https://gitlab.com/b326/zhu2020/badges/prod/pipeline.svg
.. _prod_build: https://gitlab.com/b326/zhu2020/commits/prod
.. |prod_coverage| image:: https://gitlab.com/b326/zhu2020/badges/prod/coverage.svg
.. _prod_coverage: https://gitlab.com/b326/zhu2020/commits/prod
.. #}
Data and formalisms from Zhu 2020
| zhu2020 | /zhu2020-1.0.0.tar.gz/zhu2020-1.0.0/doc/readme.rst | readme.rst |
zhue
============
This library eases the interaction with Philips Hue devices
.. code-block::
from zhue import Bridge
# upnp/nupnp discovery
b = Bridge.discover()
# register a new user on the Hue bridge
b.create_user()
# or use a predefined username
b.username = 'MY_USERNAME'
# query lights
b.lights
# turn light on and off
b.light('outdoor').on()
b.light('outdoor').off()
# query groups
b.groups
# turn all lights belonging to a group on
b.group('living').on()
# query sensors
b.sensors
# get temperature readings
[x.temperature for x in b.temperature_sensors]
# get light level readings
[x.light_level for x in b.light_level_sensors]
# get battery levels
[x.config.battery for x in b.sensors if hasattr(x.config, 'battery')]
| zhue | /zhue-0.9.5.tar.gz/zhue-0.9.5/README.rst | README.rst |
import math
import matplotlib.pyplot as plt
from .Generaldistribution import Distribution
class Gaussian(Distribution):
""" Gaussian distribution class for calculating and
visualizing a Gaussian distribution.
Attributes:
mean (float) representing the mean value of the distribution
stdev (float) representing the standard deviation of the distribution
data_list (list of floats) a list of floats extracted from the data file
"""
def __init__(self, mu=0, sigma=1):
Distribution.__init__(self, mu, sigma)
def calculate_mean(self):
"""Function to calculate the mean of the data set.
Args:
None
Returns:
float: mean of the data set
"""
avg = 1.0 * sum(self.data) / len(self.data)
self.mean = avg
return self.mean
def calculate_stdev(self, sample=True):
"""Function to calculate the standard deviation of the data set.
Args:
sample (bool): whether the data represents a sample or population
Returns:
float: standard deviation of the data set
"""
if sample:
n = len(self.data) - 1
else:
n = len(self.data)
mean = self.calculate_mean()
sigma = 0
for d in self.data:
sigma += (d - mean) ** 2
sigma = math.sqrt(sigma / n)
self.stdev = sigma
return self.stdev
def plot_histogram(self):
"""Function to output a histogram of the instance variable data using
matplotlib pyplot library.
Args:
None
Returns:
None
"""
plt.hist(self.data)
plt.title('Histogram of Data')
plt.xlabel('data')
plt.ylabel('count')
def pdf(self, x):
"""Probability density function calculator for the gaussian distribution.
Args:
x (float): point for calculating the probability density function
Returns:
float: probability density function output
"""
return (1.0 / (self.stdev * math.sqrt(2*math.pi))) * math.exp(-0.5*((x - self.mean) / self.stdev) ** 2)
def plot_histogram_pdf(self, n_spaces = 50):
"""Function to plot the normalized histogram of the data and a plot of the
probability density function along the same range
Args:
n_spaces (int): number of data points
Returns:
list: x values for the pdf plot
list: y values for the pdf plot
"""
mu = self.mean
sigma = self.stdev
min_range = min(self.data)
max_range = max(self.data)
# calculates the interval between x values
interval = 1.0 * (max_range - min_range) / n_spaces
x = []
y = []
# calculate the x values to visualize
for i in range(n_spaces):
tmp = min_range + interval*i
x.append(tmp)
y.append(self.pdf(tmp))
# make the plots
fig, axes = plt.subplots(2,sharex=True)
fig.subplots_adjust(hspace=.5)
axes[0].hist(self.data, density=True)
axes[0].set_title('Normed Histogram of Data')
axes[0].set_ylabel('Density')
axes[1].plot(x, y)
axes[1].set_title('Normal Distribution for \n Sample Mean and Sample Standard Deviation')
axes[0].set_ylabel('Density')
plt.show()
return x, y
def __add__(self, other):
"""Function to add together two Gaussian distributions
Args:
other (Gaussian): Gaussian instance
Returns:
Gaussian: Gaussian distribution
"""
result = Gaussian()
result.mean = self.mean + other.mean
result.stdev = math.sqrt(self.stdev ** 2 + other.stdev ** 2)
return result
def __repr__(self):
"""Function to output the characteristics of the Gaussian instance
Args:
None
Returns:
string: characteristics of the Gaussian
"""
return "mean {}, standard deviation {}".format(self.mean, self.stdev) | zhuhegegehahaha | /zhuhegegehahaha-0.2.tar.gz/zhuhegegehahaha-0.2/distributions/Gaussiandistribution.py | Gaussiandistribution.py |
import math
import matplotlib.pyplot as plt
from .Generaldistribution import Distribution
class Binomial(Distribution):
""" Binomial distribution class for calculating and
visualizing a Binomial distribution.
Attributes:
mean (float) representing the mean value of the distribution
stdev (float) representing the standard deviation of the distribution
data_list (list of floats) a list of floats to be extracted from the data file
p (float) representing the probability of an event occurring
n (int) number of trials
TODO: Fill out all functions below
"""
def __init__(self, prob=.5, size=20):
self.n = size
self.p = prob
Distribution.__init__(self, self.calculate_mean(), self.calculate_stdev())
def calculate_mean(self):
"""Function to calculate the mean from p and n
Args:
None
Returns:
float: mean of the data set
"""
self.mean = self.p * self.n
return self.mean
def calculate_stdev(self):
"""Function to calculate the standard deviation from p and n.
Args:
None
Returns:
float: standard deviation of the data set
"""
self.stdev = math.sqrt(self.n * self.p * (1 - self.p))
return self.stdev
def replace_stats_with_data(self):
"""Function to calculate p and n from the data set
Args:
None
Returns:
float: the p value
float: the n value
"""
self.n = len(self.data)
self.p = 1.0 * sum(self.data) / len(self.data)
self.mean = self.calculate_mean()
self.stdev = self.calculate_stdev()
def plot_bar(self):
"""Function to output a histogram of the instance variable data using
matplotlib pyplot library.
Args:
None
Returns:
None
"""
plt.bar(x = ['0', '1'], height = [(1 - self.p) * self.n, self.p * self.n])
plt.title('Bar Chart of Data')
plt.xlabel('outcome')
plt.ylabel('count')
def pdf(self, k):
"""Probability density function calculator for the gaussian distribution.
Args:
x (float): point for calculating the probability density function
Returns:
float: probability density function output
"""
a = math.factorial(self.n) / (math.factorial(k) * (math.factorial(self.n - k)))
b = (self.p ** k) * (1 - self.p) ** (self.n - k)
return a * b
def plot_bar_pdf(self):
"""Function to plot the pdf of the binomial distribution
Args:
None
Returns:
list: x values for the pdf plot
list: y values for the pdf plot
"""
x = []
y = []
# calculate the x values to visualize
for i in range(self.n + 1):
x.append(i)
y.append(self.pdf(i))
# make the plots
plt.bar(x, y)
plt.title('Distribution of Outcomes')
plt.ylabel('Probability')
plt.xlabel('Outcome')
plt.show()
return x, y
def __add__(self, other):
"""Function to add together two Binomial distributions with equal p
Args:
other (Binomial): Binomial instance
Returns:
Binomial: Binomial distribution
"""
try:
assert self.p == other.p, 'p values are not equal'
except AssertionError as error:
raise
result = Binomial()
result.n = self.n + other.n
result.p = self.p
result.calculate_mean()
result.calculate_stdev()
return result
def __repr__(self):
"""Function to output the characteristics of the Binomial instance
Args:
None
Returns:
string: characteristics of the Gaussian
"""
return "mean {}, standard deviation {}, p {}, n {}".\
format(self.mean, self.stdev, self.p, self.n) | zhuhegegehahaha | /zhuhegegehahaha-0.2.tar.gz/zhuhegegehahaha-0.2/distributions/Binomialdistribution.py | Binomialdistribution.py |
import pandas as pd
import re
from lxml import etree
from selenium import webdriver
from bs4 import BeautifulSoup
from lmf.dbv2 import db_write, db_command, db_query
from selenium.webdriver.common.keys import Keys
from selenium.webdriver.common.by import By
from selenium.common.exceptions import NoSuchElementException, StaleElementReferenceException
from selenium.common.exceptions import WebDriverException
from selenium.webdriver.support.wait import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
import sys
import time
from zhulong2.util.etl import est_meta, est_html
_name_ = 'liaoning_chaoyang'
def f3(driver, url):
driver.get(url)
try:
locator = (By.XPATH, '//table[@id="tblInfo"]')
WebDriverWait(driver, 20).until(EC.visibility_of_element_located(locator))
flag = 1
except:
locator = (By.XPATH, '//*[@id="container"]/div[2]/div[2]')
WebDriverWait(driver, 20).until(EC.visibility_of_element_located(locator))
flag = 2
before = len(driver.page_source)
time.sleep(0.1)
after = len(driver.page_source)
i = 0
while before != after:
before = len(driver.page_source)
time.sleep(0.1)
after = len(driver.page_source)
i += 1
if i > 5: break
page = driver.page_source
soup = BeautifulSoup(page, 'html.parser')
if flag ==1:
div = soup.find('table', id='tblInfo')
else:
div = soup.find('div',class_="ewb-show-con")
return div
def f1(driver, num):
locator = (By.XPATH, '//ul[@class="wb-data-item"]/li[1]/a')
WebDriverWait(driver, 20).until(EC.presence_of_element_located(locator))
val = driver.find_element_by_xpath('//ul[@class="wb-data-item"]/li[1]/a').get_attribute("href")[-50:]
cnum = re.findall('(\d+)/',driver.find_element_by_xpath('//div[@class="pages"]//li[@class="wb-page-li visible-desktop"]').text)[0]
# print('val', val, 'cnum', cnum,'num',num)
if int(cnum) != int(num):
driver.execute_script("ShowAjaxNewPage('categorypagingcontent','/ZGCY/','004017001',%s)"%num)
locator = (By.XPATH, '//ul[@class="wb-data-item"]/li[1]/a[not(contains(@href,"%s"))]' % val)
WebDriverWait(driver, 30).until(EC.presence_of_element_located(locator))
data = []
page = driver.page_source
body = etree.HTML(page)
content_list = body.xpath('//ul[@class="wb-data-item"]/li')
for content in content_list:
name = content.xpath("./a/text()")[0].strip()
ggstart_time = content.xpath("./span/text()")[0].strip()
url = "http://www.zgcy.gov.cn" + content.xpath("./a/@href")[0]
temp = [name, ggstart_time, url]
# print(temp)
data.append(temp)
df = pd.DataFrame(data=data)
df['info'] = None
return df
def f2(driver):
locator = (By.XPATH, '//div[@class="pages"]//li[@class="wb-page-li visible-desktop"]')
WebDriverWait(driver, 30).until(EC.presence_of_all_elements_located(locator))
total_page = re.findall('/(\d+)',driver.find_element_by_xpath('//div[@class="pages"]//li[@class="wb-page-li visible-desktop"]').text)[0]
driver.quit()
return int(total_page)
data = [
["zfcg_zhaobiao_gg","http://www.zgcy.gov.cn/ZGCY/zwgk/004017/004017001/",["name", "ggstart_time", "href", "info"], f1, f2],
["zfcg_zhongbiao_gg","http://www.zgcy.gov.cn/ZGCY/zwgk/004017/004017002/",["name", "ggstart_time", "href", "info"], f1, f2],
#验收公示,需求公示,都无法访问
]
def work(conp, **arg):
est_meta(conp, data=data, diqu="辽宁省朝阳市", **arg)
est_html(conp, f=f3, **arg)
if __name__ == '__main__':
work(conp=["postgres", "since2015", "192.168.3.171", "anbang", "liaoning_chaoyang"],pageloadtimeout=60,pageloadstrategy='none',) | zhulong2 | /liaoning/chaoyang.py | chaoyang.py |
import json
import math
import pandas as pd
import re
from lxml import etree
from selenium import webdriver
from bs4 import BeautifulSoup
from lmf.dbv2 import db_write, db_command, db_query
from selenium.webdriver.common.keys import Keys
from selenium.webdriver.common.by import By
from selenium.common.exceptions import NoSuchElementException, StaleElementReferenceException
from selenium.common.exceptions import WebDriverException
from selenium.webdriver.support.wait import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
import sys
import time
from zhulong2.util.etl import est_meta, est_html
_name_ = 'liaoning_shenyang'
def f3(driver, url):
driver.get(url)
if "您没有权限查看当前信息。" in driver.page_source:
return '无权限查看信息!'
locator = (By.XPATH, "//div[@class='panel-body panel-notice']|//div[@class='control-label-text']")
WebDriverWait(driver, 20).until(EC.visibility_of_element_located(locator))
before = len(driver.page_source)
time.sleep(0.1)
after = len(driver.page_source)
i = 0
while before != after:
before = len(driver.page_source)
time.sleep(0.1)
after = len(driver.page_source)
i += 1
if i > 5: break
page = driver.page_source
soup = BeautifulSoup(page, 'html.parser')
div = soup.find('div', class_='panel-body panel-notice')
if not div:
div = soup.find('div', class_='control-label-text')
return div
def f1(driver, num):
locator = (By.XPATH, '//table[@id="protalInfoid"]/tbody/tr')
WebDriverWait(driver, 20).until(EC.presence_of_all_elements_located(locator))
#
val = driver.find_element_by_xpath('//table[@id="protalInfoid"]/tbody/tr[2]//a').text
cnum = int(driver.find_element_by_xpath('//ul[@class="pagination"]/li[contains(@class,"active")]').text)
while int(cnum) != int(num):
total_page = math.ceil(int(re.findall("共 (\d+) 条",driver.find_element_by_xpath('//div[@class="col-xs-6 infoBar"]/span').text)[0])/50)
if total_page - num < total_page//2:
driver.find_elements_by_xpath('//a[@data-page="last"]')[0].click()
locator = (By.XPATH, '//ul[@class="pagination"]/li[contains(@class,"active")][string()!=%s]'%cnum)
WebDriverWait(driver, 20).until(EC.presence_of_element_located(locator))
cnum = int(driver.find_element_by_xpath('//ul[@class="pagination"]/li[contains(@class,"active")]').text)
# print('val1', val, 'cnum1', cnum,'num',num)
if cnum > int(num):
dnum = cnum - int(num)
for _ in range(dnum):
driver.find_element_by_xpath("//ul[@class='pagination']/li[@class='prev']/a").click()
else:
dnum = int(num) > cnum
for i in range(dnum):
driver.find_element_by_xpath("//ul[@class='pagination']/li[@class='next']/a").click()
locator = (By.XPATH, '//ul[@class="pagination"]/li[contains(@class,"active")][string()!=%s]' % cnum)
WebDriverWait(driver, 20).until(EC.presence_of_element_located(locator))
cnum = int(driver.find_element_by_xpath('//ul[@class="pagination"]/li[contains(@class,"active")]').text)
if int(cnum) == int(num):
locator = (By.XPATH, '//table[@id="protalInfoid"]/tbody/tr[1]//a[not(contains(string(),"%s"))]' % val)
WebDriverWait(driver, 30).until(EC.visibility_of_element_located(locator))
break
locator = (By.XPATH, '//table[@id="protalInfoid"][@aria-busy="false"]')
WebDriverWait(driver, 30).until(EC.visibility_of_element_located(locator))
data = []
locator = (By.XPATH, '//table[@id="protalInfoid"]/tbody/tr')
WebDriverWait(driver, 20).until(EC.presence_of_all_elements_located(locator))
page = driver.page_source
body = etree.HTML(page)
content_list = body.xpath('//table[@id="protalInfoid"]/tbody/tr')
for content in content_list:
try:
name = content.xpath(".//a/text()")[0].strip() + content.xpath("./td[2]/text()")[0].strip()
except:
name = content.xpath("./td[2]/text()")[0].strip()
ggstart_time = content.xpath("./td[last()]/text()")[0].strip()
url = "http://www.ccgp-shenyang.gov.cn/sygpimp/portalsys/portal.do?method=pubinfoView&info_id="+content.xpath(".//a/@id")[0].strip()
temp = [name, ggstart_time, url]
if int(content.xpath("count(./td)")) >3:
info = content.xpath("./td[3]/text()")[0].strip()
else:
info = None
info = json.dumps(info, ensure_ascii=False)
temp.append(info)
data.append(temp)
df = pd.DataFrame(data=data)
# df['info'] = None
return df
def f2(driver):
locator = (By.XPATH, "//ul[@class='pagination']")
WebDriverWait(driver, 30).until(EC.presence_of_all_elements_located(locator))
locator = (By.XPATH, '//table[@id="protalInfoid"]/tbody/tr[1]')
WebDriverWait(driver, 20).until(EC.presence_of_all_elements_located(locator))
driver.find_elements_by_xpath('//a[@data-page="last"]')[0].click()
locator = (By.XPATH, '//ul[@class="pagination"]/li[contains(@class,"active")][string()!=1]')
WebDriverWait(driver, 20).until(EC.presence_of_element_located(locator))
total_page = int(driver.find_element_by_xpath('//ul[@class="pagination"]/li[contains(@class,"active")]').text)
# print('total_page', total_page)
driver.quit()
return total_page
def choice_50(driver):
locator = (By.XPATH, "//ul[@class='pagination']")
WebDriverWait(driver, 30).until(EC.presence_of_all_elements_located(locator))
locator = (By.XPATH, '//table[@id="protalInfoid"]/tbody/tr[1]')
dropdown_text = driver.find_element_by_xpath('//button[@class="btn btn-default dropdown-toggle"]/span[@class="dropdown-text"]').text
if int(dropdown_text) != 50:
driver.find_element_by_xpath('//button[@class="btn btn-default dropdown-toggle"]').click()
WebDriverWait(driver, 20).until(EC.presence_of_all_elements_located(locator))
locator = (By.XPATH, '//table[@id="protalInfoid"]/tbody/tr[1]')
WebDriverWait(driver, 20).until(EC.presence_of_all_elements_located(locator))
driver.find_element_by_xpath('//ul[@role="menu"]//a[@data-action="50"]').click()
time.sleep(1)
def before(f):
def wrap(*args):
driver = args[0]
choice_50(driver)
return f(*args)
return wrap
data = [
#
["zfcg_zhaobiao_gg", "http://www.ccgp-shenyang.gov.cn/sygpimp/portalindex.do?method=goInfo&linkId=cggg",
["name", "ggstart_time", "href", "info"], before(f1), before(f2)],
["zfcg_yanshou_gg", "http://www.ccgp-shenyang.gov.cn/sygpimp/portalindex.do?method=goInfoysgs&linkId=ysgs",
["name", "ggstart_time", "href", "info"], before(f1), before(f2)],
]
def work(conp, **arg):
est_meta(conp, data=data, diqu="辽宁省沈阳市", **arg)
est_html(conp, f=f3, **arg)
if __name__ == '__main__':
# work(conp=["postgres", "since2015", "192.168.3.171", "anbang", "liaoning_shenyang"],pageloadstrategy='none')
driver = webdriver.Chrome()
# driver.get("http://www.ccgp-shenyang.gov.cn/sygpimp/portalindex.do?method=goInfo&linkId=cggg")
# print(before(f1)(driver,88))
# print(f3(driver, 'http://www.ccgp-shenyang.gov.cn/sygpimp/portalsys/portal.do?method=pubinfoView&info_id=-3d3ed5a8161d6b95230-76e2')) | zhulong2 | /liaoning/shenyang.py | shenyang.py |
import pandas as pd
import re
from lxml import etree
from selenium import webdriver
from bs4 import BeautifulSoup
from lmf.dbv2 import db_write, db_command, db_query
from selenium.webdriver.common.keys import Keys
from selenium.webdriver.common.by import By
from selenium.common.exceptions import NoSuchElementException, StaleElementReferenceException
from selenium.common.exceptions import WebDriverException
from selenium.webdriver.support.wait import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
import sys
import time
from zhulong2.util.etl import est_meta, est_html
_name_ = 'liaoning_wafangdian'
def f3(driver, url):
driver.get(url)
locator = (By.XPATH, '//div[@class="news-content-page"]')
WebDriverWait(driver, 20).until(EC.visibility_of_element_located(locator))
before = len(driver.page_source)
time.sleep(0.1)
after = len(driver.page_source)
i = 0
while before != after:
before = len(driver.page_source)
time.sleep(0.1)
after = len(driver.page_source)
i += 1
if i > 5: break
page = driver.page_source
soup = BeautifulSoup(page, 'html.parser')
div = soup.find('div', class_='news-content-page')
return div
def f1(driver, num):
locator = (By.XPATH, '//ul[@class="large-list"]/li/a')
WebDriverWait(driver, 20).until(EC.presence_of_element_located(locator))
val = driver.find_element_by_xpath('//ul[@class="large-list"]/li[1]/a').get_attribute("href")[-15:]
cnum = driver.find_element_by_xpath('//*[@id="pages"]/b[2]/font').text
# print('val', val, 'cnum', cnum,'num',num)
if int(cnum) != int(num):
url = driver.current_url
url = url.rsplit('/',maxsplit=1)[0] +'/'+ str(num)+".html"
driver.get(url)
locator = (By.XPATH, '//ul[@class="large-list"]/li[1]/a[not(contains(@href,"%s"))]' % val)
WebDriverWait(driver, 30).until(EC.presence_of_element_located(locator))
data = []
page = driver.page_source
body = etree.HTML(page)
content_list = body.xpath('//ul[@class="large-list"]/li')
for content in content_list:
name = content.xpath("./a/text()")[0].strip()
ggstart_time = content.xpath("./span/text()")[0].strip()
url = content.xpath("./a/@href")[0]
if 'http://www' not in url:
url = "http://www.dlwfd.gov.cn/" + url
temp = [name, ggstart_time, url]
# print(temp)
data.append(temp)
df = pd.DataFrame(data=data)
df['info'] = None
return df
def f2(driver):
locator = (By.XPATH, '//*[@id="pages"]/b[2]')
WebDriverWait(driver, 30).until(EC.presence_of_all_elements_located(locator))
total_page = re.findall('/(\d+)',driver.find_element_by_xpath('//*[@id="pages"]/b[2]').text)[0]
# print('total_page', total_page)
driver.quit()
# print(total_page)
return int(total_page)
data = [
["zfcg_zhaobiao_gg","http://www.dlwfd.gov.cn/xxgk/zbcg/zzcg/1.html",["name", "ggstart_time", "href", "info"], f1, f2],
["zfcg_biangeng_gg","http://www.dlwfd.gov.cn/xxgk/zbcg/gczb/1.html",["name", "ggstart_time", "href", "info"], f1, f2],
["zfcg_zhongbiao_gg", "http://www.dlwfd.gov.cn/xxgk/zbcg/zbgg/1.html", ["name", "ggstart_time", "href", "info"], f1,f2],
#验收公示,需求公示,都无法访问
]
def work(conp, **arg):
est_meta(conp, data=data, diqu="辽宁省瓦房店市", **arg)
est_html(conp, f=f3, **arg)
if __name__ == '__main__':
work(conp=["postgres", "since2015", "192.168.3.171", "anbang", "liaoning_wafangdian"],pageloadtimeout=60,pageloadstrategy='none') | zhulong2 | /liaoning/wafangdian.py | wafangdian.py |
import math
import pandas as pd
import re
from lxml import etree
from selenium import webdriver
from bs4 import BeautifulSoup
from lmf.dbv2 import db_write, db_command, db_query
from selenium.webdriver.common.keys import Keys
from selenium.webdriver.common.by import By
from selenium.common.exceptions import NoSuchElementException, StaleElementReferenceException
from selenium.common.exceptions import WebDriverException
from selenium.webdriver.support.wait import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
import sys
import time
from zhulong2.util.etl import est_meta, est_html, add_info
_name_ = 'liaoning_changchun'
def f3(driver, url):
driver.get(url)
locator = (By.XPATH, '//div[@class="details"]')
WebDriverWait(driver, 20).until(EC.visibility_of_element_located(locator))
before = len(driver.page_source)
time.sleep(0.1)
after = len(driver.page_source)
i = 0
while before != after:
before = len(driver.page_source)
time.sleep(0.1)
after = len(driver.page_source)
i += 1
if i > 5: break
page = driver.page_source
soup = BeautifulSoup(page, 'html.parser')
div = soup.find('div', class_='details')
return div
def f1(driver, num):
locator = (By.XPATH, '//table[@id="row"]/tbody/tr[1]/td/a')
val = WebDriverWait(driver, 20).until(EC.presence_of_element_located(locator)).get_attribute("href")[-50:]
locator = (By.XPATH, '//span[@class="pagelinks"]/font/strong')
cnum = int(WebDriverWait(driver, 20).until(EC.presence_of_element_located(locator)).text)
# print('val', val, 'cnum', cnum,'num',num)
if int(cnum) != int(num):
new_url = re.sub('p=\d+', 'p=' + str(num), driver.current_url)
driver.get(new_url)
locator = (By.XPATH, '//table[@id="row"]/tbody/tr[1]/td/a[not(contains(@href,"%s"))]' % val)
WebDriverWait(driver, 20).until(EC.presence_of_element_located(locator))
data = []
page = driver.page_source
body = etree.HTML(page)
content_list = body.xpath('//table[@id="row"]/tbody/tr')
for content in content_list:
name = content.xpath("./td[@class='tit']/a/text()")[0].strip()
ggstart_time = content.xpath("./td[@class='time']/text()")[0].strip()
url = "http://www.cczfcg.gov.cn" + content.xpath("./td/a/@href")[0]
temp = [name, ggstart_time, url]
data.append(temp)
df = pd.DataFrame(data=data)
df['info'] = None
return df
def f2(driver):
locator = (By.XPATH, "//span[@class='pagebanner']")
txt = WebDriverWait(driver, 30).until(EC.presence_of_element_located(locator)).text
total_temp = int(re.findall('(\d+)', txt)[0])
total_page = math.ceil(total_temp / 20)
driver.quit()
return int(total_page)
data = [
["zfcg_zhaobiao_sj_gg",
"http://www.cczfcg.gov.cn/article/bid_list.action?__fp=xTwapxltBMnpSsXN23uINw%3D%3D&field=1&title=&d-16544-p=1&getList=&getList=%E6%90%9C%E7%B4%A2&_sourcePage=2BVgADW7Hx6FNrAm9GCx-F0umL1uqkNLHg3gos_i5uo%3D&type=1",
["name", "ggstart_time", "href", "info"], add_info(f1, {'area': '市级'}), f2],
["zfcg_zhaobiao_qx_gg",
"http://www.cczfcg.gov.cn/article/bid_list.action?field=2&d-16544-p=1&getList=&type=1",
["name", "ggstart_time", "href", "info"], add_info(f1, {'area': '区县'}), f2],
["zfcg_zhaobiao_wf_gg",
"http://www.cczfcg.gov.cn/article/news_list.action?d-16544-p=1&getList=&type=13",
["name", "ggstart_time", "href", "info"], add_info(f1, {'area': '外阜'}), f2],
["zfcg_zhongbiao_sj_gg",
"http://www.cczfcg.gov.cn/article/bid_list.action?field=1&d-16544-p=1&getList=&type=2",
["name", "ggstart_time", "href", "info"], add_info(f1, {'area': '市级'}), f2],
["zfcg_zhongbiao_qx_gg",
"http://www.cczfcg.gov.cn/article/bid_list.action?field=2&d-16544-p=1&getList=&type=2",
["name", "ggstart_time", "href", "info"], add_info(f1, {'area': '区县'}), f2],
["zfcg_zhongbiao_wf_gg",
"http://www.cczfcg.gov.cn/article/news_list.action?d-16544-p=1&getList=&type=14",
["name", "ggstart_time", "href", "info"], add_info(f1, {'area': '外阜'}), f2],
]
def work(conp, **arg):
est_meta(conp, data=data, diqu="辽宁省长春市", **arg)
est_html(conp, f=f3, **arg)
if __name__ == '__main__':
work(conp=["postgres", "since2015", "192.168.3.171", "anbang", "liaoning_changchun"], pageloadtimeout=60, pageloadstrategy='none',num=1)
# driver= webdriver.Chrome()
# driver.get('http://www.cczfcg.gov.cn/article/bid_list.action?__fp=xTwapxltBMnpSsXN23uINw%3D%3D&field=1&title=&d-16544-p=1&getList=&getList=%E6%90%9C%E7%B4%A2&_sourcePage=2BVgADW7Hx6FNrAm9GCx-F0umL1uqkNLHg3gos_i5uo%3D&type=1')
# print(f2(driver))
# f1(driver,2)
# print(f3(driver, 'http://www.cczfcg.gov.cn/article/ShowInviteBid.action?showInvite=&project_id=1E109F87A4136398CFCE1D9F7F889276BCE02CD2C8276DF574E33519E0A34D1A')) | zhulong2 | /liaoning/changchun.py | changchun.py |
import pandas as pd
import re
from lxml import etree
from selenium import webdriver
from bs4 import BeautifulSoup
from lmf.dbv2 import db_write, db_command, db_query
from selenium.webdriver.common.keys import Keys
from selenium.webdriver.common.by import By
from selenium.common.exceptions import NoSuchElementException, StaleElementReferenceException
from selenium.common.exceptions import WebDriverException
from selenium.webdriver.support.wait import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
import sys
import time
from zhulong2.util.etl import est_meta, est_html, add_info
_name_ = 'liaoning_dalian'
def f3(driver, url):
driver.get(url)
locator = (By.XPATH, '//table[@id="tblInfo"]')
WebDriverWait(driver, 20).until(EC.visibility_of_element_located(locator))
before = len(driver.page_source)
time.sleep(0.1)
after = len(driver.page_source)
i = 0
while before != after:
before = len(driver.page_source)
time.sleep(0.1)
after = len(driver.page_source)
i += 1
if i > 5: break
page = driver.page_source
soup = BeautifulSoup(page, 'html.parser')
div = soup.find('table', id='tblInfo')
return div
def f1(driver, num):
time.sleep(0.3)
locator = (By.XPATH, '//td[@id="MoreInfoList1_tdcontent"]/table/tbody/tr')
WebDriverWait(driver, 20).until(EC.presence_of_element_located(locator))
val = driver.find_element_by_xpath(
'//td[@id="MoreInfoList1_tdcontent"]/table/tbody/tr/td/a').get_attribute("href")[-50:]
cnum = int(driver.find_element_by_xpath("//div[@id='MoreInfoList1_Pager']/table/tbody/tr[1]/td/font[3]").text)
# print('val', val, 'cnum', cnum,'num',num)
if int(cnum) != int(num):
driver.execute_script("javascript:__doPostBack('MoreInfoList1$Pager','%s')" % num)
locator = (By.XPATH,
'//td[@id="MoreInfoList1_tdcontent"]/table/tbody/tr[1]/td/a[not(contains(@href,"%s"))]' % val)
WebDriverWait(driver, 30).until(EC.presence_of_element_located(locator))
data = []
page = driver.page_source
body = etree.HTML(page)
content_list = body.xpath('//td[@id="MoreInfoList1_tdcontent"]/table/tbody/tr')
for content in content_list:
name = content.xpath("./td/a/text()")[0].strip()
ggstart_time = content.xpath("./td[last()]/text()")[0].strip()
url = "http://www.ccgp.dl.gov.cn" + content.xpath("./td/a/@href")[0]
temp = [name, ggstart_time, url]
# print(temp)
data.append(temp)
df = pd.DataFrame(data=data)
df['info'] = None
return df
def f2(driver):
time.sleep(0.3)
locator = (By.XPATH, "//div[@id='MoreInfoList1_Pager']/table/tbody/tr/td/font[2]")
WebDriverWait(driver, 30).until(EC.presence_of_all_elements_located(locator))
total_page = driver.find_element_by_xpath("//div[@id='MoreInfoList1_Pager']/table/tbody/tr/td/font[2]").text
driver.quit()
return int(total_page)
data = [
["zfcg_zhaobiao_shi_gg", "http://www.ccgp.dl.gov.cn/dlweb/003/003001/003001001/MoreInfo.aspx?CategoryNum=003001001",
["name", "ggstart_time", "href", "info"], add_info(f1,{'area':'市区'}), f2],
["zfcg_zhongbiao_shi_gg",
"http://www.ccgp.dl.gov.cn/dlweb/003/003002/003002001/MoreInfo.aspx?CategoryNum=003002001",
["name", "ggstart_time", "href", "info"], add_info(f1,{'area':'市区'}), f2],
["zfcg_zhaobiao_danyilaiyuan_shi_gg",
"http://www.ccgp.dl.gov.cn/dlweb/003/003004/003004001/MoreInfo.aspx?CategoryNum=003004001",
["name", "ggstart_time", "href", "info"], add_info(f1,{'area':'市区','method':'单一来源'}), f2],
# 验收公示,需求公示,都无法访问
]
data1 = [
'zfcg_zhaobiao_quxian_%s_gg;http://www.ccgp.dl.gov.cn/dlweb/003/003001/003001002/0030010020%s/MoreInfo.aspx?CategoryNum=0030010020%s',
'zfcg_zhongbiao_quxian_%s_gg;http://www.ccgp.dl.gov.cn/dlweb/003/003002/003002002/0030020020%s/MoreInfo.aspx?CategoryNum=0030020020%s',
'zfcg_zhaobiao_danyilaiyuan_quxian_%s_gg;http://www.ccgp.dl.gov.cn/dlweb/003/003004/003004002/0030040020%s/MoreInfo.aspx?CategoryNum=0030040020%s',
]
def new_url_list():
for u in data1:
url = iter(u % (x, x, x) for x in
['01', '02', '03', '04', '05', '06', '07', '08', '09', '10', '11', '12', '13', '14', '15', '16'])
while 1:
try:
new_url_str = url.__next__().split(";")
new_url_str.extend([["name", "ggstart_time", "href", "info"], f1, f2])
data.append(new_url_str)
except StopIteration:
break
def work(conp, **arg):
est_meta(conp, data=data, diqu="辽宁省大连市", **arg)
est_html(conp, f=f3, **arg)
new_url_list()
if __name__ == '__main__':
work(conp=["postgres", "since2015", "192.168.3.171", "anbang", "liaoning_dalian"],headless=False,pageloadstrategy='none') | zhulong2 | /liaoning/dalian.py | dalian.py |
import pandas as pd
import re
from selenium import webdriver
from bs4 import BeautifulSoup
from lmf.dbv2 import db_write
from selenium.webdriver.common.keys import Keys
from selenium.webdriver.common.by import By
from selenium.common.exceptions import NoSuchElementException, StaleElementReferenceException
from selenium.common.exceptions import WebDriverException
from selenium.webdriver.support.select import Select
from selenium.webdriver.support.wait import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
import json
import time
from zhulong4.util.etl import est_html, est_meta, add_info,est_meta_large
_name_ = "www_sinochemitc_com"
def f1(driver, num):
locator = (By.XPATH, "//div[@class='zbdt-news-module-content']/div[1]//a")
WebDriverWait(driver, 10).until(EC.presence_of_element_located(locator))
url = driver.current_url
try:
locator = (By.XPATH, "//span[@class='i-pager-info-c']")
cnum = WebDriverWait(driver, 10).until(EC.presence_of_element_located(locator)).text.strip()
except:
cnum = 1
if num != int(cnum):
val = driver.find_element_by_xpath("//div[@class='zbdt-news-module-content']/div[1]//a").get_attribute('href')[-15:]
if num == 1:
url = re.sub("-[0-9]*\.html", "-1.html", url)
else:
s = "-%d.html" % (num) if num > 1 else "-1.html"
url = re.sub("-[0-9]*\.html", s, url)
driver.get(url)
locator = (By.XPATH, "//div[@class='zbdt-news-module-content']/div[1]//a[not(contains(@href, '%s'))]" % val)
WebDriverWait(driver, 20).until(EC.presence_of_element_located(locator))
page = driver.page_source
soup = BeautifulSoup(page, "html.parser")
div = soup.find("div", class_='zbdt-news-module-content')
lis = div.find_all('div', class_='zbdt-news-item')
data = []
for li in lis:
a = li.find("a")
try:
title = a['title'].strip()
except:
title = a.text.strip()
link = a["href"]
if 'http' in link:
href = link
else:
href = 'http://www.sinochemitc.com/s/' + link
span = li.find('div', class_='zbdt-news-item-date').text.strip()
tmp = [title, span, href]
data.append(tmp)
df = pd.DataFrame(data=data)
df['info'] = None
return df
def f2(driver):
locator = (By.XPATH, "//div[@class='zbdt-news-module-content']/div[1]//a")
WebDriverWait(driver, 10).until(EC.presence_of_element_located(locator))
try:
locator = (By.XPATH, "//span[@class='i-pager-info-p']")
total = WebDriverWait(driver, 10).until(EC.presence_of_element_located(locator)).text.strip()
num = re.findall(r'(\d+)', total)[0]
except:
num = 1
driver.quit()
return int(num)
def f3(driver, url):
driver.get(url)
locator = (By.XPATH, "//div[@class='Gnews-detail']")
WebDriverWait(driver, 10).until(EC.presence_of_all_elements_located(locator))
before = len(driver.page_source)
time.sleep(0.1)
after = len(driver.page_source)
i = 0
while before != after:
before = len(driver.page_source)
time.sleep(0.1)
after = len(driver.page_source)
i += 1
if i > 5: break
page = driver.page_source
soup = BeautifulSoup(page, 'html.parser')
div = soup.find('div', class_='Gnews-detail')
return div
data = [
["qy_zhaobiao_gg",
"http://www.sinochemitc.com/l/7239-18882-1.html",
["name", "ggstart_time", "href", "info"], f1, f2],
["qy_gqita_zhong_liu_gg",
"http://www.sinochemitc.com/l/7241-18885-1.html",
["name", "ggstart_time", "href", "info"], f1, f2],
]
def work(conp, **args):
est_meta_large(conp, data=data, diqu="中国中化集团有限公司", **args)
est_html(conp, f=f3, **args)
if __name__ == '__main__':
work(conp=["postgres", "since2015", "192.168.3.171", "guoziqiang3", "www_sinochemitc_com"])
# driver = webdriver.Chrome()
# url = "http://www.sinochemitc.com/l/7239-18882-1.html"
# driver.get(url)
# df = f2(driver)
# print(df)
#
# driver=webdriver.Chrome()
# url = "http://www.sinochemitc.com/l/7239-18882-1.html"
# driver.get(url)
# for i in range(11, 13):
# df=f1(driver, i)
# print(df.values)
# for i in df[2].values:
# f = f3(driver, i)
# print(f) | zhulong4 | /www_q_z/www_sinochemitc_com.py | www_sinochemitc_com.py |
import pandas as pd
import re
from selenium import webdriver
from bs4 import BeautifulSoup
from lmf.dbv2 import db_write
from selenium.webdriver.common.keys import Keys
from selenium.webdriver.common.by import By
from selenium.common.exceptions import NoSuchElementException, StaleElementReferenceException
from selenium.common.exceptions import WebDriverException
from selenium.webdriver.support.select import Select
from selenium.webdriver.support.wait import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
import json
import time
from zhulong4.util.etl import est_html, est_meta, add_info,est_meta_large
_name_ = "www_zmzb_com"
def f1(driver, num):
locator = (By.XPATH, "//div[@class='lb-link']/ul[1]/li[last()]/a")
WebDriverWait(driver, 10).until(EC.presence_of_element_located(locator))
url = driver.current_url
try:
locator = (By.XPATH, "//div[@class='pag-txt']/em[1]")
cnum = WebDriverWait(driver, 10).until(EC.presence_of_element_located(locator)).text.strip()
except:
cnum = 1
if num != int(cnum):
val = driver.find_element_by_xpath("//div[@class='lb-link']/ul[1]/li[1]/a").get_attribute('href')[-20:]
tar = driver.find_element_by_xpath("//div[@class='lb-link']/ul[1]/li[last()]/a").get_attribute('href')[-20:]
if "index_" not in url:
s = "index_%d" % (num) if num > 1 else "index"
url = re.sub("index", s, url)
elif num == 1:
url = re.sub("index_[0-9]*", "index", url)
else:
s = "index_%d" % (num) if num > 1 else "index"
url = re.sub("index_[0-9]*", s, url)
driver.get(url)
locator = (By.XPATH, "//div[@class='lb-link']/ul[1]/li[1]/a[not(contains(@href, '%s'))]" % val)
WebDriverWait(driver, 20).until(EC.presence_of_element_located(locator))
locator = (By.XPATH, "//div[@class='lb-link']/ul[1]/li[last()]/a[not(contains(@href, '%s'))]" % tar)
WebDriverWait(driver, 20).until(EC.presence_of_element_located(locator))
page = driver.page_source
soup = BeautifulSoup(page, "html.parser")
div = soup.find("div", class_='lb-link')
uls = div.find_all('ul')
data = []
for ul in uls:
lis = ul.find_all('li')
for li in lis:
a = li.find("a")
try:
title = a['title'].strip()
except:
title = li.find('span', class_='bidLink').text.strip()
link = a["href"]
if 'http' in link:
href = link
else:
href = 'http://www.zmzb.com/' + link
span = li.find("span", class_='bidDate').text.strip()
tmp = [title, span, href]
data.append(tmp)
df = pd.DataFrame(data=data)
df['info'] = None
return df
def f2(driver):
locator = (By.XPATH, "//div[@class='lb-link']/ul[1]/li[1]/a")
WebDriverWait(driver, 10).until(EC.presence_of_element_located(locator))
try:
locator = (By.XPATH, "//div[@class='pag-txt']/em[last()]")
num = WebDriverWait(driver, 10).until(EC.presence_of_element_located(locator)).text.strip()
except:
num = 1
driver.quit()
return int(num)
def f3(driver, url):
driver.get(url)
if '无法访问此网站' in str(driver.page_source):
return 404
locator = (By.XPATH, "//table[@class='StdInputTable'][string-length()>60] | //div[@class='ninfo-con'][string-length()>60]")
WebDriverWait(driver, 10).until(EC.presence_of_all_elements_located(locator))
before = len(driver.page_source)
time.sleep(1)
after = len(driver.page_source)
i = 0
while before != after:
before = len(driver.page_source)
time.sleep(0.1)
after = len(driver.page_source)
i += 1
if i > 5: break
page = driver.page_source
soup = BeautifulSoup(page, 'html.parser')
div = soup.find('div', class_='mbox lpInfo')
if not div:
divs = soup.find_all('table', class_='StdInputTable')
div = ''
for d in divs:
div+=str(d)
return div
data = [
["qy_zhaobiao_huowu_gg",
"http://www.zmzb.com/gghw/index.jhtml",
["name", "ggstart_time", "href", "info"], add_info(f1,{'lx':'货物'}), f2],
["qy_zhaobiao_gongcheng_gg",
"http://www.zmzb.com/gggc/index.jhtml",
["name", "ggstart_time", "href", "info"], add_info(f1,{'lx':'工程'}), f2],
["qy_zhaobiao_fuwu_gg",
"http://www.zmzb.com/ggff/index.jhtml",
["name", "ggstart_time", "href", "info"], add_info(f1,{'lx':'服务'}), f2],
####
["qy_biangeng_huowu_gg",
"http://www.zmzb.com/bggghw/index.jhtml",
["name", "ggstart_time", "href", "info"], add_info(f1,{'lx':'货物'}), f2],
["qy_biangeng_gongcheng_gg",
"http://www.zmzb.com/bggggc/index.jhtml",
["name", "ggstart_time", "href", "info"], add_info(f1,{'lx':'工程'}), f2],
["qy_biangeng_fuwu_gg",
"http://www.zmzb.com/bgggfw/index.jhtml",
["name", "ggstart_time", "href", "info"], add_info(f1,{'lx':'服务'}), f2],
####
["qy_zhongbiaohx_huowu_gg",
"http://www.zmzb.com/pbhw/index.jhtml",
["name", "ggstart_time", "href", "info"], add_info(f1,{'lx':'货物'}), f2],
["qy_zhongbiaohx_gongcheng_gg",
"http://www.zmzb.com/pbgc/index.jhtml",
["name", "ggstart_time", "href", "info"], add_info(f1,{'lx':'工程'}), f2],
["qy_zhongbiaohx_fuwu_gg",
"http://www.zmzb.com/pbjg/index.jhtml",
["name", "ggstart_time", "href", "info"], add_info(f1,{'lx':'服务'}), f2],
####
["qy_zhaobiao_xunjia_huowu_gg",
"http://www.zmzb.com/xjgshw/index.jhtml",
["name", "ggstart_time", "href", "info"], add_info(f1,{'lx':'货物','zbfs':'询价'}), f2],
["qy_zhaobiao_xunjia_gongcheng_gg",
"http://www.zmzb.com/xjgsgc/index.jhtml",
["name", "ggstart_time", "href", "info"], add_info(f1,{'lx':'工程','zbfs':'询价'}), f2],
["qy_zhaobiao_xunjia_fuwu_gg",
"http://www.zmzb.com/xjgsfw/index.jhtml",
["name", "ggstart_time", "href", "info"], add_info(f1,{'lx':'服务','zbfs':'询价'}), f2],
["qy_zhaobiao_xunjia_dianshang_gg",
"http://www.zmzb.com/dsgg/index.jhtml",
["name", "ggstart_time", "href", "info"], add_info(f1,{'lx':'电商','zbfs':'询价'}), f2],
]
def work(conp, **args):
est_meta_large(conp, data=data, diqu="中煤集团", **args)
est_html(conp, f=f3, **args)
if __name__ == '__main__':
work(conp=["postgres", "since2015", "192.168.3.171", "guoziqiang3", "www_zmzb_com"],pageloadtimeout=120,pageLoadStrategy="none")
# driver = webdriver.Chrome()
# url = "http://www.zmzb.com/pbhw/index.jhtml"
# driver.get(url)
# f = f3(driver, 'http://cg.chinacoal.com:7002/b2b/web/two/indexinfoAction.do?actionType=showCgxjDetail&xjbm=E612AB351FE306A97E1B9190F55D3D3E')
# print(f)
# df = f2(driver)
# print(df)
#
# driver=webdriver.Chrome()
# url = "http://www.zmzb.com/xjgshw/index.jhtml"
# driver.get(url)
# for i in range(3, 5):
# df=f1(driver, i)
# print(df.values)
# for j in df[2].values:
# f = f3(driver, j)
# print(f) | zhulong4 | /www_q_z/www_zmzb_com.py | www_zmzb_com.py |
import pandas as pd
import re
from selenium import webdriver
from bs4 import BeautifulSoup
from lmf.dbv2 import db_write
from selenium.webdriver.common.keys import Keys
from selenium.webdriver.common.by import By
from selenium.common.exceptions import NoSuchElementException, StaleElementReferenceException
from selenium.common.exceptions import WebDriverException
from selenium.webdriver.support.select import Select
from selenium.webdriver.support.wait import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
import json
import time
from zhulong4.util.etl import est_html, est_meta, add_info
_name_ = "www_ykjtzb_com"
def f1(driver, num):
locator = (By.XPATH, "//ul[@id='list1']/li[1]/a")
WebDriverWait(driver, 10).until(EC.presence_of_element_located(locator))
url = driver.current_url
try:
locator = (By.XPATH, "//a[@class='pag-cur']")
cnum = WebDriverWait(driver, 10).until(EC.presence_of_element_located(locator)).text.strip()
except:
cnum = 1
if num != int(cnum):
val = driver.find_element_by_xpath("//ul[@id='list1']/li[1]/a").get_attribute('href')[-20:]
driver.execute_script('page({})'.format(num))
locator = (By.XPATH, "//ul[@id='list1']/li[1]/a[not(contains(@href, '%s'))]" % val)
WebDriverWait(driver, 20).until(EC.presence_of_element_located(locator))
page = driver.page_source
soup = BeautifulSoup(page, "html.parser")
ul = soup.find("ul", id='list1')
data = []
lis = ul.find_all('li')
for li in lis:
a = li.find("a")
try:
title = a['title'].strip()
except:
title = li.find('span', class_='bidLink').text.strip()
link = a["href"]
if 'http' in link:
href = link
else:
href = 'http://www.ykjtzb.com' + link
span = a.find_all("em")[-1].text.strip()
tmp = [title, span, href]
data.append(tmp)
df = pd.DataFrame(data=data)
df['info'] = None
return df
def f2(driver):
locator = (By.XPATH, "//ul[@id='list1']/li[1]/a")
WebDriverWait(driver, 10).until(EC.presence_of_element_located(locator))
locator = (By.XPATH, "//div[@class='pag-txt']/em[last()]")
num = WebDriverWait(driver, 10).until(EC.presence_of_element_located(locator)).text.strip()
driver.quit()
return int(num)
def f3(driver, url):
driver.get(url)
locator = (By.XPATH, "//div[@class='article-content'][string-length()>50]")
WebDriverWait(driver, 10).until(EC.presence_of_all_elements_located(locator))
before = len(driver.page_source)
time.sleep(0.5)
after = len(driver.page_source)
i = 0
while before != after:
before = len(driver.page_source)
time.sleep(0.1)
after = len(driver.page_source)
i += 1
if i > 5: break
page = driver.page_source
soup = BeautifulSoup(page, 'html.parser')
div = soup.find('div', class_='article-content')
return div
data = [
["qy_zhaobiao_huowu_gg",
"http://www.ykjtzb.com/cms/channel/zbywgg1hw/index.htm",
["name", "ggstart_time", "href", "info"], add_info(f1,{'lx':'货物'}), f2],
["qy_zhaobiao_gongcheng_gg",
"http://www.ykjtzb.com/cms/channel/zbywgg1gc/index.htm",
["name", "ggstart_time", "href", "info"], add_info(f1,{'lx':'工程'}), f2],
["qy_zhaobiao_fuwu_gg",
"http://www.ykjtzb.com/cms/channel/zbywgg1fw/index.htm",
["name", "ggstart_time", "href", "info"], add_info(f1,{'lx':'服务'}), f2],
# ####
["qy_biangeng_huowu_gg",
"http://www.ykjtzb.com/cms/channel/zbywgg6hw/index.htm",
["name", "ggstart_time", "href", "info"], add_info(f1,{'lx':'货物'}), f2],
["qy_biangeng_gongcheng_gg",
"http://www.ykjtzb.com/cms/channel/zbywgg6gc/index.htm",
["name", "ggstart_time", "href", "info"], add_info(f1,{'lx':'工程'}), f2],
["qy_biangeng_fuwu_gg",
"http://www.ykjtzb.com/cms/channel/zbywgg6fw/index.htm",
["name", "ggstart_time", "href", "info"], add_info(f1,{'lx':'服务'}), f2],
# ####
["qy_zhongbiaohx_huowu_gg",
"http://www.ykjtzb.com/cms/channel/zbywgg2hw/index.htm",
["name", "ggstart_time", "href", "info"], add_info(f1,{'lx':'货物'}), f2],
["qy_zhongbiaohx_gongcheng_gg",
"http://www.ykjtzb.com/cms/channel/zbywgg2gc/index.htm",
["name", "ggstart_time", "href", "info"], add_info(f1,{'lx':'工程'}), f2],
["qy_zhongbiaohx_fuwu_gg",
"http://www.ykjtzb.com/cms/channel/zbywgg2fw/index.htm",
["name", "ggstart_time", "href", "info"], add_info(f1,{'lx':'服务'}), f2],
###
["qy_zhongbiao_huowu_gg",
"http://www.ykjtzb.com/cms/channel/zbywgg3hw/index.htm",
["name", "ggstart_time", "href", "info"], add_info(f1, {'lx': '货物'}), f2],
["qy_zhongbiao_gongcheng_gg",
"http://www.ykjtzb.com/cms/channel/zbywgg3gc/index.htm",
["name", "ggstart_time", "href", "info"], add_info(f1, {'lx': '工程'}), f2],
["qy_zhongbiao_fuwu_gg",
"http://www.ykjtzb.com/cms/channel/zbywgg3fw/index.htm",
["name", "ggstart_time", "href", "info"], add_info(f1, {'lx': '服务'}), f2],
###
["qy_gqita_huowu_gg",
"http://www.ykjtzb.com/cms/channel/zbywgg4hw/index.htm",
["name", "ggstart_time", "href", "info"], add_info(f1, {'lx': '货物'}), f2],
["qy_gqita_gongcheng_gg",
"http://www.ykjtzb.com/cms/channel/zbywgg4gc/index.htm",
["name", "ggstart_time", "href", "info"], add_info(f1, {'lx': '工程'}), f2],
["qy_gqita_fuwu_gg",
"http://www.ykjtzb.com/cms/channel/zbywgg4fw/index.htm",
["name", "ggstart_time", "href", "info"], add_info(f1, {'lx': '服务'}), f2],
###
["qy_zhaobiao_erci_huowu_gg",
"http://www.ykjtzb.com/cms/channel/zbywgg5hw/index.htm",
["name", "ggstart_time", "href", "info"], add_info(f1, {'lx': '货物', 'gglx':'二次公告'}), f2],
["qy_zhaobiao_erci_gongcheng_gg",
"http://www.ykjtzb.com/cms/channel/zbywgg5gc/index.htm",
["name", "ggstart_time", "href", "info"], add_info(f1, {'lx': '工程', 'gglx':'二次公告'}), f2],
["qy_zhaobiao_erci_fuwu_gg",
"http://www.ykjtzb.com/cms/channel/zbywgg5fw/index.htm",
["name", "ggstart_time", "href", "info"], add_info(f1, {'lx': '服务', 'gglx':'二次公告'}), f2],
]
def work(conp, **args):
est_meta(conp, data=data, diqu="兖矿集团", **args)
est_html(conp, f=f3, **args)
# 网址变更
# 更改日期:2019/7/6
# 网址:http://www.ykjtzb.com/cms/index.htm
if __name__ == '__main__':
work(conp=["postgres", "since2015", "192.168.3.171", "guoziqiang3", "www_ykjtzb_com"])
# for d in data[4:]:
# driver=webdriver.Chrome()
# url=d[1]
# print(url)
# driver.get(url)
# df = f2(driver)
# print(df)
# driver = webdriver.Chrome()
# driver.get(url)
#
# df=f1(driver, 1)
# print(df.values)
# for f in df[2].values:
# d = f3(driver, f)
# print(d) | zhulong4 | /www_q_z/www_ykjtzb_com.py | www_ykjtzb_com.py |
import time
from zhulong4.www_q_z import www_qhbidding_com
from zhulong4.www_q_z import www_sinochemitc_com
from zhulong4.www_q_z import www_sztc_com
from zhulong4.www_q_z import www_wiscobidding_com_cn
from zhulong4.www_q_z import www_ykjtzb_com
from zhulong4.www_q_z import www_zeec_cn
from zhulong4.www_q_z import www_zmzb_com
from lmf.dbv2 import db_command
from os.path import join ,dirname
from zhulong4.util.conf import get_conp,get_conp1
#1
def task_www_qhbidding_com(**args):
conp=get_conp(www_qhbidding_com._name_)
www_qhbidding_com.work(conp,**args)
#2
def task_www_sinochemitc_com(**args):
conp=get_conp(www_sinochemitc_com._name_)
www_sinochemitc_com.work(conp,**args)
#3
def task_www_sztc_com(**args):
conp=get_conp(www_sztc_com._name_)
www_sztc_com.work(conp,pageloadtimeout=120,pageLoadStrategy="none",**args)
#4
def task_www_wiscobidding_com_cn(**args):
conp=get_conp(www_wiscobidding_com_cn._name_)
www_wiscobidding_com_cn.work(conp,**args)
#5
def task_www_ykjtzb_com(**args):
conp=get_conp(www_ykjtzb_com._name_)
www_ykjtzb_com.work(conp,**args)
#6
def task_www_zeec_cn(**args):
conp=get_conp(www_zeec_cn._name_)
www_zeec_cn.work(conp,**args)
#7
def task_www_zmzb_com(**args):
conp=get_conp(www_zmzb_com._name_)
www_zmzb_com.work(conp,pageloadtimeout=200,pageLoadStrategy="none",**args)
# def task_all():
# bg=time.time()
#
# try:
# task_www_zmzb_com()
# task_www_qhbidding_com()
# task_www_sinochemitc_com()
# except:
# print("part1 error!")
# try:
# task_www_sztc_com()
# task_www_wiscobidding_com_cn()
# task_www_ykjtzb_com()
# task_www_zeec_cn()
#
# except:
# print("part2 error!")
#
# ed=time.time()
# cos=int((ed-bg)/60)
#
# print("共耗时%d min"%cos)
#
#
#
# def create_schemas():
# conp=get_conp1('qycg')
# arr=['www_qhbidding_com','www_sinochemitc_com','www_sztc_com',
# 'www_wiscobidding_com_cn','www_ykjtzb_com','www_zeec_cn','www_zmzb_com']
# for diqu in arr:
# sql="create schema if not exists %s"%diqu
# db_command(sql,dbtype="postgresql",conp=conp) | zhulong4 | /www_q_z/task.py | task.py |
import pandas as pd
import re
from selenium import webdriver
from bs4 import BeautifulSoup
from lmf.dbv2 import db_write
from selenium.webdriver.common.keys import Keys
from selenium.webdriver.common.by import By
from selenium.common.exceptions import NoSuchElementException, StaleElementReferenceException
from selenium.common.exceptions import WebDriverException
from selenium.webdriver.support.select import Select
from selenium.webdriver.support.wait import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
import json
import time
from zhulong4.util.etl import est_html, est_meta, add_info,est_meta_large
_name_ = "www_sztc_com"
def f1(driver, num):
locator = (By.XPATH, "//div[@class='lb-link']/ul[1]/li[1]/a")
WebDriverWait(driver, 10).until(EC.presence_of_element_located(locator))
url = driver.current_url
try:
locator = (By.XPATH, "//div[@class='pag-txt']/em[2]")
cnum = WebDriverWait(driver, 10).until(EC.presence_of_element_located(locator)).text.strip()
except:
cnum = 1
if num != int(cnum):
val = driver.find_element_by_xpath("//div[@class='lb-link']/ul[1]/li[1]/a").get_attribute('href')[-15:]
if "index_" not in url:
s = "index_%d" % (num) if num > 1 else "index"
url = re.sub("index", s, url)
elif num == 1:
url = re.sub("index_[0-9]*", "index", url)
else:
s = "index_%d" % (num) if num > 1 else "index"
url = re.sub("index_[0-9]*", s, url)
driver.get(url)
locator = (By.XPATH, "//div[@class='lb-link']/ul[1]/li[1]/a[not(contains(@href, '%s'))]" % val)
WebDriverWait(driver, 20).until(EC.presence_of_element_located(locator))
page = driver.page_source
soup = BeautifulSoup(page, "html.parser")
div = soup.find("div", class_='lb-link')
uls = div.find_all('ul')
data = []
for ul in uls:
lis = ul.find_all('li')
for li in lis:
a = li.find("a")
try:
title = a['title'].strip()
except:
title = li.find('span', class_='bidLink').text.strip()
link = a["href"]
if 'http' in link:
href = link
else:
href = 'http://www.sztc.com/' + link
span = li.find("span", class_='bidDate').text.strip()
tmp = [title, span, href]
data.append(tmp)
# f = f3(driver, href)
# print(f)
df = pd.DataFrame(data=data)
df['info'] = None
return df
def f2(driver):
locator = (By.XPATH, "//div[@class='lb-link']/ul[1]/li[1]/a")
WebDriverWait(driver, 10).until(EC.presence_of_element_located(locator))
try:
locator = (By.XPATH, "//div[@class='pag-txt']/em[last()]")
num = WebDriverWait(driver, 10).until(EC.presence_of_element_located(locator)).text.strip()
except:
num = 1
driver.quit()
return int(num)
def f3(driver, url):
driver.get(url)
locator = (By.XPATH, "//table[@class='StdInputTable'] | //div[@class='mbox lpInfo']")
WebDriverWait(driver, 10).until(EC.presence_of_all_elements_located(locator))
before = len(driver.page_source)
time.sleep(0.1)
after = len(driver.page_source)
i = 0
while before != after:
before = len(driver.page_source)
time.sleep(0.1)
after = len(driver.page_source)
i += 1
if i > 5: break
page = driver.page_source
soup = BeautifulSoup(page, 'html.parser')
div = soup.find('table', class_='StdInputTable')
if div == None:
div = soup.find('div', class_='mbox lpInfo')
return div
data = [
["qy_zhaobiao_gg",
"http://www.sztc.com/bidBulletin/index.jhtml",
["name", "ggstart_time", "href", "info"], f1, f2],
["qy_zhaobiao_wuzi_gg",
"http://www.sztc.com/bulkmaterialBulletin/index.jhtml",
["name", "ggstart_time", "href", "info"], add_info(f1, {'gglx':'大宗物资公告'}), f2],
["qy_zgys_gg",
"http://www.sztc.com/preBulletin/index.jhtml",
["name", "ggstart_time", "href", "info"], f1, f2],
####
["qy_biangeng_gg",
"http://www.sztc.com/changeBulletin/index.jhtml",
["name", "ggstart_time", "href", "info"], f1, f2],
["qy_gqita_zhong_liu_gg",
"http://www.sztc.com/preBidBulletin/index.jhtml",
["name", "ggstart_time", "href", "info"], f1, f2],
["qy_yucai_gg",
"http://www.sztc.com/purchaseNotice/index.jhtml",
["name", "ggstart_time", "href", "info"], f1, f2],
]
def work(conp, **args):
est_meta_large(conp, data=data, diqu="深圳国际", **args)
est_html(conp, f=f3, **args)
# qy_zhaobiao_gg跑不完,f1数据获取不全,一次性跑不完
if __name__ == '__main__':
work(conp=["postgres", "since2015", "192.168.3.171", "guoziqiang3", "www_sztc_com"])
# driver = webdriver.Chrome()
# url = "http://www.sztc.com/bidBulletin/index.jhtml"
# driver.get(url)
# df = f2(driver)
# print(df)
#
# driver=webdriver.Chrome()
# url = "http://www.sztc.com/bidBulletin/index.jhtml"
# driver.get(url)
# for i in range(3, 5):
# df=f1(driver, i)
# print(df.values)
# for i in df[2].values:
# f = f3(driver, i)
# print(f) | zhulong4 | /www_q_z/www_sztc_com.py | www_sztc_com.py |
import pandas as pd
import re
from selenium import webdriver
from bs4 import BeautifulSoup
from lmf.dbv2 import db_write
from selenium.webdriver.common.keys import Keys
from selenium.webdriver.common.by import By
from selenium.common.exceptions import NoSuchElementException, StaleElementReferenceException
from selenium.common.exceptions import WebDriverException
from selenium.webdriver.support.select import Select
from selenium.webdriver.support.wait import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
import json
import time
from zhulong4.util.etl import est_html, est_meta, add_info
_name_ = "www_qhbidding_com"
def f1(driver, num):
locator = (By.XPATH, "//div[@class='lb-link']/ul[1]/li[1]/a")
WebDriverWait(driver, 10).until(EC.presence_of_element_located(locator))
url = driver.current_url
try:
locator = (By.XPATH, "//div[@class='pag-txt']/em[2]")
cnum = WebDriverWait(driver, 10).until(EC.presence_of_element_located(locator)).text.strip()
except:
cnum = 1
if num != int(cnum):
val = driver.find_element_by_xpath("//div[@class='lb-link']/ul[1]/li[1]/a").get_attribute('href')[-15:]
if "index_" not in url:
s = "index_%d" % (num) if num > 1 else "index"
url = re.sub("index", s, url)
elif num == 1:
url = re.sub("index_[0-9]*", "index", url)
else:
s = "index_%d" % (num) if num > 1 else "index"
url = re.sub("index_[0-9]*", s, url)
driver.get(url)
locator = (By.XPATH, "//div[@class='lb-link']/ul[1]/li[1]/a[not(contains(@href, '%s'))]" % val)
WebDriverWait(driver, 20).until(EC.presence_of_element_located(locator))
page = driver.page_source
soup = BeautifulSoup(page, "html.parser")
div = soup.find("div", class_='lb-link')
uls = div.find_all('ul')
data = []
for ul in uls:
ul.find('div', class_='pagination').extract()
lis = ul.find_all('li')
for li in lis:
a = li.find("a")
try:
title = a['title'].strip()
except:
title = li.find('span', class_='bidLink').text.strip()
link = a["href"]
if 'http' in link:
href = link
else:
href = 'http://www.qhbidding.com:80/' + link
span = li.find("span", class_='bidDate').text.strip()
tmp = [title, span, href]
data.append(tmp)
df = pd.DataFrame(data=data)
df['info'] = None
return df
def f2(driver):
locator = (By.XPATH, "//div[@class='lb-link']/ul[1]/li[1]/a")
WebDriverWait(driver, 10).until(EC.presence_of_element_located(locator))
try:
locator = (By.XPATH, "//div[@class='pag-txt']/em[last()]")
num = WebDriverWait(driver, 10).until(EC.presence_of_element_located(locator)).text.strip()
except:
num = 1
driver.quit()
return int(num)
def f3(driver, url):
driver.get(url)
locator = (By.XPATH, "//div[@class='conMain']")
WebDriverWait(driver, 10).until(EC.presence_of_all_elements_located(locator))
before = len(driver.page_source)
time.sleep(0.1)
after = len(driver.page_source)
i = 0
while before != after:
before = len(driver.page_source)
time.sleep(0.1)
after = len(driver.page_source)
i += 1
if i > 5: break
page = driver.page_source
soup = BeautifulSoup(page, 'html.parser')
div = soup.find('div', class_='conMain')
return div
data = [
["qy_zhaobiao_huowu_gg",
"http://www.qhbidding.com/hw/index.jhtml",
["name", "ggstart_time", "href", "info"], add_info(f1, {'lx':'货物'}), f2],
["qy_zhaobiao_gongcheng_gg",
"http://www.qhbidding.com/gc/index.jhtml",
["name", "ggstart_time", "href", "info"], add_info(f1, {'lx':'工程'}), f2],
["qy_zhaobiao_fuwu_gg",
"http://www.qhbidding.com/fw/index.jhtml",
["name", "ggstart_time", "href", "info"], add_info(f1, {'lx':'服务'}), f2],
["qy_zhongbiaohx_gg",
"http://www.qhbidding.com/gs/index.jhtml",
["name", "ggstart_time", "href", "info"], f1, f2],
["qy_zhongbiao_gg",
"http://www.qhbidding.com/zhbgg/index.jhtml",
["name", "ggstart_time", "href", "info"], f1, f2],
]
def work(conp, **args):
est_meta(conp, data=data, diqu="青海机电招标", **args)
est_html(conp, f=f3, **args)
if __name__ == '__main__':
work(conp=["postgres", "since2015", "192.168.3.171", "guoziqiang3", "www_qhbidding_com"])
# driver = webdriver.Chrome()
# url = "http://www.qhbidding.com/zhbgg/index.jhtml"
# driver.get(url)
# df = f2(driver)
# print(df)
# driver=webdriver.Chrome()
# url = "http://www.qhbidding.com/hw/index.jhtml"
# driver.get(url)
# for i in range(13, 15):
# df=f1(driver, i)
# print(df.values)
# for i in df[2].values:
# f = f3(driver, i)
# print(f) | zhulong4 | /www_q_z/www_qhbidding_com.py | www_qhbidding_com.py |
import pandas as pd
import re
from selenium import webdriver
from bs4 import BeautifulSoup
from lmf.dbv2 import db_write
from selenium.webdriver.common.keys import Keys
from selenium.webdriver.common.by import By
from selenium.common.exceptions import NoSuchElementException, StaleElementReferenceException
from selenium.common.exceptions import WebDriverException
from selenium.webdriver.support.select import Select
from selenium.webdriver.support.wait import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
import json
import time
from zhulong4.util.etl import est_html, est_meta, add_info
_name_ = "www_wiscobidding_com_cn"
def f1(driver, num):
locator = (By.XPATH, "//div[@class='lb-link']/ul[1]/li[1]/a")
WebDriverWait(driver, 10).until(EC.presence_of_element_located(locator))
url = driver.current_url
try:
locator = (By.XPATH, "//div[@class='pag-txt']/em[1]")
cnum = WebDriverWait(driver, 10).until(EC.presence_of_element_located(locator)).text.strip()
except:
cnum = 1
if num != int(cnum):
val = driver.find_element_by_xpath("//div[@class='lb-link']/ul[1]/li[1]/a").get_attribute('href')[-15:]
if "index_" not in url:
s = "index_%d" % (num) if num > 1 else "index"
url = re.sub("index", s, url)
elif num == 1:
url = re.sub("index_[0-9]*", "index", url)
else:
s = "index_%d" % (num) if num > 1 else "index"
url = re.sub("index_[0-9]*", s, url)
driver.get(url)
locator = (By.XPATH, "//div[@class='lb-link']/ul[1]/li[1]/a[not(contains(@href, '%s'))]" % val)
WebDriverWait(driver, 20).until(EC.presence_of_element_located(locator))
page = driver.page_source
soup = BeautifulSoup(page, "html.parser")
div = soup.find("div", class_='lb-link')
uls = div.find_all('ul')
data = []
for ul in uls:
lis = ul.find_all('li')
for li in lis:
a = li.find("a")
try:
title = a['title'].strip()
except:
title = li.find('span', class_='bidLink').text.strip()
link = a["href"]
if 'http' in link:
href = link
else:
href = 'http://www.wiscobidding.com.cn/' + link
span = li.find("span", class_='bidDate').text.strip()
tmp = [title, span, href]
data.append(tmp)
df = pd.DataFrame(data=data)
df['info'] = None
return df
def f2(driver):
locator = (By.XPATH, "//div[@class='lb-link']/ul[1]/li[1]/a")
WebDriverWait(driver, 10).until(EC.presence_of_element_located(locator))
try:
locator = (By.XPATH, "//div[@class='pag-txt']/em[last()]")
num = WebDriverWait(driver, 10).until(EC.presence_of_element_located(locator)).text.strip()
except:
num = 1
driver.quit()
return int(num)
def f3(driver, url):
driver.get(url)
locator = (By.XPATH, "//table[@class='StdInputTable'] | //div[@class='mbox lpInfo']")
WebDriverWait(driver, 10).until(EC.presence_of_all_elements_located(locator))
before = len(driver.page_source)
time.sleep(0.1)
after = len(driver.page_source)
i = 0
while before != after:
before = len(driver.page_source)
time.sleep(0.1)
after = len(driver.page_source)
i += 1
if i > 5: break
page = driver.page_source
soup = BeautifulSoup(page, 'html.parser')
div = soup.find('table', class_='StdInputTable')
if div == None:
div = soup.find('div', class_='mbox lpInfo')
return div
data = [
["qy_gqita_zhao_bian_huowu_gg",
"http://www.wiscobidding.com.cn/gghw/index.jhtml",
["name", "ggstart_time", "href", "info"], add_info(f1,{'lx':'货物'}), f2],
["qy_gqita_zhao_bian_gongcheng_gg",
"http://www.wiscobidding.com.cn/gggc/index.jhtml",
["name", "ggstart_time", "href", "info"], add_info(f1,{'lx':'工程'}), f2],
["qy_gqita_zhao_bian_fuwu_gg",
"http://www.wiscobidding.com.cn/ggff/index.jhtml",
["name", "ggstart_time", "href", "info"], add_info(f1,{'lx':'服务'}), f2],
# # ####
["qy_gqita_zhong_liu_huowu_gg",
"http://www.wiscobidding.com.cn/pbhw/index.jhtml",
["name", "ggstart_time", "href", "info"], add_info(f1,{'lx':'货物'}), f2],
["qy_gqita_zhong_liu_gongcheng_gg",
"http://www.wiscobidding.com.cn/pbgc/index.jhtml",
["name", "ggstart_time", "href", "info"], add_info(f1,{'lx':'工程'}), f2],
["qy_gqita_zhong_liu_fuwu_gg",
"http://www.wiscobidding.com.cn/pbjg/index.jhtml",
["name", "ggstart_time", "href", "info"], add_info(f1,{'lx':'服务'}), f2],
####
["qy_gqita_xunjia_huowu_gg",
"http://www.wiscobidding.com.cn/zbhw/index.jhtml",
["name", "ggstart_time", "href", "info"], add_info(f1,{'lx':'货物','zbfs':'询价'}), f2],
["qy_gqita_xunjia_gongcheng_gg",
"http://www.wiscobidding.com.cn/zbgc/index.jhtml",
["name", "ggstart_time", "href", "info"], add_info(f1,{'lx':'工程','zbfs':'询价'}), f2],
["qy_gqita_xunjia_fuwu_gg",
"http://www.wiscobidding.com.cn/zbjg/index.jhtml",
["name", "ggstart_time", "href", "info"], add_info(f1,{'lx':'服务','zbfs':'询价'}), f2],
# # ####
["qy_gqita_jingjia_huowu_gg",
"http://www.wiscobidding.com.cn/bggghw/index.jhtml",
["name", "ggstart_time", "href", "info"], add_info(f1,{'lx':'货物','zbfs':'竞价'}), f2],
["qy_gqita_jingjia_fuwu_gg",
"http://www.wiscobidding.com.cn/bgggfw/index.jhtml",
["name", "ggstart_time", "href", "info"], add_info(f1,{'lx':'服务','zbfs':'竞价'}), f2],
####
["qy_gqita_danyilaiyuan_huowu_gg",
"http://www.wiscobidding.com.cn/dylyhw/index.jhtml",
["name", "ggstart_time", "href", "info"], add_info(f1,{'lx':'货物','zbfs':'单一来源'}), f2],
["qy_gqita_danyilaiyuan_fuwu_gg",
"http://www.wiscobidding.com.cn/dylyfw/index.jhtml",
["name", "ggstart_time", "href", "info"], add_info(f1,{'lx':'服务','zbfs':'单一来源'}), f2],
####
["qy_gqita_jinzheng_huowu_gg",
"http://www.wiscobidding.com.cn/jtgghw/index.jhtml",
["name", "ggstart_time", "href", "info"], add_info(f1,{'lx':'货物','zbfs':'竞争性谈判'}), f2],
["qy_gqita_jinzheng_fuwu_gg",
"http://www.wiscobidding.com.cn/jtggfw/index.jhtml",
["name", "ggstart_time", "href", "info"], add_info(f1,{'lx':'服务','zbfs':'竞争性谈判'}), f2],
]
def work(conp, **args):
est_meta(conp, data=data, diqu="武钢集团", **args)
est_html(conp, f=f3, **args)
if __name__ == '__main__':
work(conp=["postgres", "since2015", "192.168.3.171", "guoziqiang3", "www_wiscobidding_com_cn"])
# driver = webdriver.Chrome()
# url = "http://www.wiscobidding.com.cn/gghw/index.jhtml"
# driver.get(url)
# df = f2(driver)
# print(df)
#
# driver=webdriver.Chrome()
# url = "http://www.wiscobidding.com.cn/bggghw/index.jhtml"
# driver.get(url)
# for i in range(3, 5):
# df=f1(driver, i)
# print(df.values)
# for i in df[2].values:
# f = f3(driver, i)
# print(f) | zhulong4 | /www_q_z/www_wiscobidding_com_cn.py | www_wiscobidding_com_cn.py |
import pandas as pd
import re
from selenium import webdriver
from bs4 import BeautifulSoup
from lmf.dbv2 import db_write
from selenium.webdriver.common.keys import Keys
from selenium.webdriver.common.by import By
from selenium.common.exceptions import NoSuchElementException, StaleElementReferenceException
from selenium.common.exceptions import WebDriverException
from selenium.webdriver.support.select import Select
from selenium.webdriver.support.wait import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
import json
import time
from zhulong4.util.etl import est_html, est_meta, add_info
_name_ = "www_zeec_cn"
def f1(driver, num):
locator = (By.XPATH, "//div[@class='lb-link']/ul[1]/li[1]/a")
WebDriverWait(driver, 10).until(EC.presence_of_element_located(locator))
url = driver.current_url
try:
locator = (By.XPATH, "//div[@class='pag-txt']/em[1]")
cnum = WebDriverWait(driver, 10).until(EC.presence_of_element_located(locator)).text.strip()
except:
cnum = 1
if num != int(cnum):
val = driver.find_element_by_xpath("//div[@class='lb-link']/ul[1]/li[1]/a").get_attribute('href')[-15:]
if "index_" not in url:
s = "index_%d" % (num) if num > 1 else "index"
url = re.sub("index", s, url)
elif num == 1:
url = re.sub("index_[0-9]*", "index", url)
else:
s = "index_%d" % (num) if num > 1 else "index"
url = re.sub("index_[0-9]*", s, url)
driver.get(url)
locator = (By.XPATH, "//div[@class='lb-link']/ul[1]/li[1]/a[not(contains(@href, '%s'))]" % val)
WebDriverWait(driver, 20).until(EC.presence_of_element_located(locator))
page = driver.page_source
soup = BeautifulSoup(page, "html.parser")
div = soup.find("div", class_='lb-link')
uls = div.find_all('ul')
data = []
for ul in uls:
lis = ul.find_all('li')
for li in lis:
a = li.find("a")
try:
title = a['title'].strip()
except:
title = li.find('span', class_='bidLink').text.strip()
link = a["href"]
if 'http' in link:
href = link
else:
href = 'http://60.191.4.146:18600/' + link
span = li.find("span", class_='bidDate').text.strip()
tmp = [title, span, href]
data.append(tmp)
# f = f3(driver, href)
# print(f)
df = pd.DataFrame(data=data)
df['info'] = None
return df
def f2(driver):
locator = (By.XPATH, "//div[@class='lb-link']/ul[1]/li[1]/a")
WebDriverWait(driver, 10).until(EC.presence_of_element_located(locator))
try:
locator = (By.XPATH, "//div[@class='pag-txt']/em[last()]")
num = WebDriverWait(driver, 10).until(EC.presence_of_element_located(locator)).text.strip()
except:
num = 1
driver.quit()
return int(num)
def f3(driver, url):
driver.get(url)
locator = (By.XPATH, "//div[@class='m-bd']")
WebDriverWait(driver, 10).until(EC.presence_of_all_elements_located(locator))
before = len(driver.page_source)
time.sleep(0.1)
after = len(driver.page_source)
i = 0
while before != after:
before = len(driver.page_source)
time.sleep(0.1)
after = len(driver.page_source)
i += 1
if i > 5: break
page = driver.page_source
soup = BeautifulSoup(page, 'html.parser')
div = soup.find('div', class_='m-bd')
return div
data = [
["qy_zhaobiao_hw_gg",
"http://60.191.4.146:18600/gghw/index.jhtml",
["name", "ggstart_time", "href", "info"], add_info(f1,{'lx':'货物'}), f2],
["qy_zhaobiao_gc_gg",
"http://60.191.4.146:18600/gggc/index.jhtml",
["name", "ggstart_time", "href", "info"], add_info(f1,{'lx':'工程'}), f2],
["qy_zhaobiao_fw_gg",
"http://60.191.4.146:18600/ggff/index.jhtml",
["name", "ggstart_time", "href", "info"], add_info(f1,{'lx':'服务'}), f2],
["qy_biangeng_hw_gg",
"http://60.191.4.146:18600/bggghw/index.jhtml",
["name", "ggstart_time", "href", "info"], add_info(f1,{'lx':'货物'}), f2],
["qy_biangeng_gc_gg",
"http://60.191.4.146:18600/bggggc/index.jhtml",
["name", "ggstart_time", "href", "info"], add_info(f1,{'lx':'工程'}), f2],
["qy_biangeng_fw_gg",
"http://60.191.4.146:18600/bgggfw/index.jhtml",
["name", "ggstart_time", "href", "info"], add_info(f1,{'lx':'服务'}), f2],
["qy_kaibiao_hw_gg",
"http://60.191.4.146:18600/zbggghw/index.jhtml",
["name", "ggstart_time", "href", "info"], add_info(f1,{'lx':'货物'}), f2],
["qy_kaibiao_gc_gg",
"http://60.191.4.146:18600/zbggggc/index.jhtml",
["name", "ggstart_time", "href", "info"], add_info(f1,{'lx':'工程'}), f2],
["qy_kaibiao_fw_gg",
"http://60.191.4.146:18600/zbgggfw/index.jhtml",
["name", "ggstart_time", "href", "info"], add_info(f1,{'lx':'服务'}), f2],
["qy_zhongbiaohx_hw_gg",
"http://60.191.4.146:18600/pbhw/index.jhtml",
["name", "ggstart_time", "href", "info"], add_info(f1,{'lx':'货物'}), f2],
["qy_zhongbiaohx_gc_gg",
"http://60.191.4.146:18600/pbgc/index.jhtml",
["name", "ggstart_time", "href", "info"], add_info(f1,{'lx':'工程'}), f2],
["qy_zhongbiaohx_fw_gg",
"http://60.191.4.146:18600/pbjg/index.jhtml",
["name", "ggstart_time", "href", "info"], add_info(f1,{'lx':'服务'}), f2],
["qy_zhongbiao_gg",
"http://60.191.4.146:18600/jggg/index.jhtml",
["name", "ggstart_time", "href", "info"], f1, f2],
]
def work(conp, **args):
est_meta(conp, data=data, diqu="浙江能源", **args)
est_html(conp, f=f3, **args)
if __name__ == '__main__':
work(conp=["postgres", "since2015", "192.168.3.171", "guoziqiang3", "www_zeec_cn"])
# driver = webdriver.Chrome()
# url = "http://60.191.4.146:18600/jggg/index.jhtml"
# driver.get(url)
# df = f2(driver)
# print(df)
#
# driver=webdriver.Chrome()
# url = "http://60.191.4.146:18600/pbgc/index.jhtml"
# driver.get(url)
# for i in range(3, 5):
# df=f1(driver, i)
# print(df.values)
# for i in df[2].values:
# f = f3(driver, i)
# print(f) | zhulong4 | /www_q_z/www_zeec_cn.py | www_zeec_cn.py |
# *z*Hunter
*z*Hunter is a Graphical User Interface (GUI) tool to visualize and perform basic manipulation of 1D and 2D astronomical spectra.
It is originally developped to help find (hunt for) the redshift *z* of transient sources observed spectroscopically, hence its name.
It uses [`Python 3.9`](https://www.python.org/downloads/release/python-3916/) and is based on the [`pyqtgraph`](https://pyqtgraph.readthedocs.io/en/latest/introduction.html) library for speed (as opposed to the more commonly used [`matplotlib`](https://matplotlib.org/)).
| zhunter | /zhunter-0.10.2.tar.gz/zhunter-0.10.2/README_short.md | README_short.md |
# *z*Hunter
*z*Hunter is a Graphical User Interface (GUI) tool to visualize and perform basic manipulation of 1D and 2D astronomical spectra.
It is originally developped to help find (hunt for) the redshift *z* of transient sources observed spectroscopically, hence its name.
It uses [`Python 3.9`](https://www.python.org/downloads/release/python-3916/) and is based on the [`pyqtgraph`](https://pyqtgraph.readthedocs.io/en/latest/introduction.html) library for speed (as opposed to the more commonly used [`matplotlib`](https://matplotlib.org/)).
# Installation
If you use a virtual environment manager for python (which we recommend), you can create an environment specific for *z*Hunter with:
```
$ conda env create -f zhunter_conda_env.yaml
```
## Using pip
```
$ pip install zhunter
```
If you want the latest development you can switch to the `dev` branch and to
```
$ pip install .
```
*Note:* sometimes the pip installation of PyQt5 fails, in this case try to install PyQt5 with conda:
```
$ conda install pyqt
```
## Launching the GUI
If the installation went smoothly, you can launch the GUI by simply typing in your terminal:
```
$ zhunter
```
You can make sure the code works by loading the 2D file `./example_data/example_2D.fits` (*hint*: GRB redshift is around 6.3).
# Usage
For a summary of the various key bindings discussed below, press `h` (for help).
## Loading a file
To load a file, simply use the `1D File` or `2D File` buttons corresponding to your case. The files should be in a standard `FITS` format.
## Hunting for a redshift
Once you've loaded a file, the hunt begins. You can move around the spectrum by left-clicking and dragging, by right-clicking and zooming or with the keys `a`, `s`, `d` and `w`.
### From 2D to 1D: extracting the trace
The first thing you want to look for is the trace in the 2D; once you have it, place the red rectangle (you can adjust the width) to encase the continuum as shown below.

### Looking for features
Now that you have your 2D with a good corresponding 1D, you can start looking for the easiest features to identify; these include damped Lyman-alpha absorption (DLA) as shown below circled in blue, any emission lines and common absorption lines.

### Using line ratios to find the redshift
If you don't see any obvious DLA, you can scan through the spectrum to find absorption lines. While the wavelength of each absorption line may be redshifted due to the expansion of the Universe, the ratio between the wavelengths of two lines remains constant. Try to look for doublets, or lines close to each other with similar line profiles; this may indicate they originate from the same absorbing gas and therefore are related.
To calculate the ratio between two wavelengths, place your cursor above the first line and press the `e` key, this should fill the `Lambda_1` box, then move your cursor to the second line and press the `q` key for `Lambda_2`.
You can then calculate the ratio by pressing the corresponding button, and by specifying an error margin you can look up the possible line ratios. If you find too many options for the line ratios, try reducing the error margin if you are confident of your lambda placements.

### Adding an absorber
Finally, if you are confident you have identified the correct transitions, you can select them in the list and use the `I'm feeling lucky` button. This will overplot a number of transitions, indicated by vertical lines, at the correct redshift on your 1D spectrum; note that not all of these transitions need to be present in your spectrum, some have low transition probabilites but are included for completeness. The list of lines used for adding absorbers probably will evolve as we work out which ones are most relevant.
This process is illustrated below.

Note: you can also add an absorber manually with the button on the top right. | zhunter | /zhunter-0.10.2.tar.gz/zhunter-0.10.2/README.md | README.md |
# 卓学课堂软文写作全套
## 下载
### Docker
```
docker pull apachecn0/zhuoxue-ketang-ruanwen-xiezuo-quantao
docker run -tid -p <port>:80 apachecn0/zhuoxue-ketang-ruanwen-xiezuo-quantao
# 访问 http://localhost:{port} 查看文档
```
### PYPI
```
pip install zhuoxue-ketang-ruanwen-xiezuo-quantao
zhuoxue-ketang-ruanwen-xiezuo-quantao <port>
# 访问 http://localhost:{port} 查看文档
```
### NPM
```
npm install -g zhuoxue-ketang-ruanwen-xiezuo-quantao
zhuoxue-ketang-ruanwen-xiezuo-quantao <port>
# 访问 http://localhost:{port} 查看文档
``` | zhuoxue-ketang-ruanwen-xiezuo-quantao | /zhuoxue_ketang_ruanwen_xiezuo_quantao-2022.10.15.0-py3-none-any.whl/ZhuoxueKetangRuanwenXiezuoQuantao/README.md | README.md |
import os.path
import yaml
LOCAL_DATASET_PATH = '/INPUT/datasets/'
LOCAL_DATALOADER_PATH = '/INPUT/dataloaders/'
OGB_LOAD_MAP = {
('pyg', 'node'): 'PygNodePropPredDataset',
('pyg', 'link'): 'PygLinkPropPredDataset',
('pyg', 'graph'): 'PygGraphPropPredDataset',
('dgl', 'node'): 'DglNodePropPredDataset',
('dgl', 'link'): 'DglLinkPropPredDataset',
('dgl', 'graph'): 'DglGraphPropPredDataset',
('none', 'node'): 'NodePropPredDataset',
('none', 'link'): 'LinkPropPredDataset',
('none', 'graph'): 'GraphPropPredDataset'
}
OGB_GS_LOAD_MAP = {
'ogbl_collab': 'load_ogbl_collab',
'ogbl_ddi': 'load_ogbl_ddi',
'ogbn_arxiv': 'load_ogbn_arxiv',
'ogbn_mag_small': 'load_ogbn_mag',
'ogbn_proteins': 'load_ogbn_proteins'
}
NAS_BASE = '/mnt/zhuque_goofys'
k8s_volumes = {
"data": {
"type": "hostPath",
"field": {
"path": NAS_BASE,
"type": "Directory"
},
"mounts": {
"mountPath": "/zhuque_goofys"
}
}
}
def load_graph(args):
data_loader = args.data_loader
if not data_loader:
raise Exception('please check dataloader parameters')
dataloader_path = os.path.join(LOCAL_DATALOADER_PATH, data_loader + '.yml')
try:
dl_file = open(dataloader_path, "r", encoding="UTF-8")
dataloader = yaml.load(dl_file, Loader=yaml.FullLoader)
except:
raise Exception('read dataloader file error,path=', dataloader_path)
# 检测dataloader文件字段是否存在
if 'storageFormat' not in dataloader.keys():
raise Exception('check the dataloader, storageFormat does not exsits')
storage_format = dataloader['storageFormat']
if storage_format in ['csv', 'ogb', 'bin'] and 'platform' not in dataloader.keys():
raise Exception('check the dataloader, platform does not exsits')
# 朱雀平台格式csv数据集
if storage_format == 'csv':
platform = dataloader['platform']
if platform == 'nmc':
return load_graph_csv_gs(dataloader)
elif platform == 'smc':
import sys
sys.path.append('.')
from wg_load.data_load import load_graph_wg
return load_graph_wg(LOCAL_DATASET_PATH, dataloader['dataset'], 'csv')
elif platform == 'dgl':
return load_graph_csv_dgl(dataloader)
elif platform == 'pyg':
return load_graph_csv_pyg(dataloader)
elif storage_format == 'ogb':
platform = dataloader['platform']
if platform == 'ogbofficial':
return load_graph_ogb_official(dataloader, args.frame_type)
elif platform == 'smc':
import sys
sys.path.append('.')
from wg_load.data_load import load_graph_wg
return load_graph_wg(LOCAL_DATASET_PATH, dataloader['ogbName'], 'ogb')
elif platform == 'nmc':
return load_graph_ogb_gs(dataloader)
# elif storage_format in ('npy', 'npz'):
# return load_graph_numpy(dataloader)
elif storage_format == 'bin':
return load_graph_bin_dgl(dataloader)
elif storage_format == 'other':
return load_graph_other(dataloader)
raise Exception('load data fail')
# 获取数据集路径
def get_dataset_path(args):
data_loader = args.data_loader
if not data_loader:
raise Exception('please check dataloader parameters')
dataloader_path = os.path.join(LOCAL_DATALOADER_PATH, data_loader + '.yml')
try:
dl_file = open(dataloader_path, "r", encoding="UTF-8")
dataloader = yaml.load(dl_file, Loader=yaml.FullLoader)
except:
raise Exception('read dataloader file error,path=', dataloader_path)
# 检测dataloader文件字段是否存在
if 'storageFormat' not in dataloader.keys():
raise Exception('check the dataloader, storageFormat does not exsits')
storage_format = dataloader['storageFormat']
if storage_format in ['csv', 'ogb', 'bin'] and 'platform' not in dataloader.keys():
raise Exception('check the dataloader, platform does not exsits')
# 朱雀平台格式csv数据集
if storage_format == 'csv':
return os.path.join(LOCAL_DATASET_PATH, dataloader['dataset'])
elif storage_format == 'ogb':
return os.path.join(LOCAL_DATASET_PATH, dataloader['ogbRoot'])
elif storage_format == 'bin':
return os.path.join(LOCAL_DATASET_PATH, dataloader['dataPath'])
elif storage_format == 'other':
return os.path.join(LOCAL_DATASET_PATH, dataloader['dataPath'])
raise Exception('load data fail')
# 加载ogb格式数据集 官方方式
def load_graph_ogb_official(dataloader, frame_type):
from ogb.graphproppred import PygGraphPropPredDataset, DglGraphPropPredDataset, GraphPropPredDataset
from ogb.linkproppred import PygLinkPropPredDataset, DglLinkPropPredDataset, LinkPropPredDataset
from ogb.nodeproppred import PygNodePropPredDataset, DglNodePropPredDataset, NodePropPredDataset
if not frame_type:
raise Exception('please check frame info')
ogb_name = dataloader['ogbName']
ogb_root = dataloader['ogbRoot']
task = get_ogb_task(ogb_name)
params = []
normal_ogb_name = ogb_name.replace("_", "-")
params.append('name=\'' + normal_ogb_name + '\'')
params.append('root=\'' + ogb_root + '\'')
# obj_str : PygNodePropPredDataset(name='ogbl-ddi', root=data_path)
data_frame = get_data_frame(frame_type)
obj_str = OGB_LOAD_MAP.get((data_frame, task)) + construct_param(params)
print(obj_str)
return eval(obj_str)
# 在graphscope框架加载ogb格式数据集
def load_graph_ogb_gs(dataloader):
import graphscope as gs
from graphscope.dataset.ogbl_collab import load_ogbl_collab
from graphscope.dataset.ogbl_ddi import load_ogbl_ddi
from graphscope.dataset.ogbn_arxiv import load_ogbn_arxiv
from graphscope.dataset.ogbn_mag import load_ogbn_mag
from graphscope.dataset.ogbn_proteins import load_ogbn_proteins
sess = None
ogb_name = dataloader['ogbName']
ogb_root = dataloader['ogbRoot']
normal_ogb_name = ogb_name.replace("-", "_")
if normal_ogb_name not in OGB_GS_LOAD_MAP.keys():
raise Exception(normal_ogb_name + ' is not supported in GraphScope,\n'
'supported list contains: \nogbl_collab\nogbl_ddi \nogbn_arxiv\nogbn_mag\nogbn_proteins\n')
try:
sess = gs.session(addr='127.0.0.1:59001', mount_dataset='/dataset')
print(sess)
params = []
params.append('sess')
params.append('\'' + ogb_root + '\'')
obj_str = OGB_GS_LOAD_MAP.get(normal_ogb_name) + construct_param(params)
# graph = load_ogbn_mag(sess, '/FILES/INPUT/ogbn_mag_small')
print(obj_str)
return eval(obj_str)
except:
print('load ogb graph in GraphScope error')
raise Exception('load ogb graph in GraphScope error')
finally:
sess.close()
# 加载numpy格式数据集
def load_graph_numpy(dataloader):
params = dataloader['params']
obj_str = 'np.load' + construct_param(params)
print(obj_str)
return eval(obj_str)
# 在Dgl框架下加载bin格式数据集
def load_graph_bin_dgl(dataloader):
import dgl
from dgl.data import DGLDataset
from dgl.dataloading import GraphDataLoader
class LoadDataset(DGLDataset):
def __init__(self):
super().__init__(name='my_data')
def process(self):
pass
def __getitem__(self, idx):
return self.graph[idx]
def __len__(self):
return len(self.graph)
def load(self):
path = dataloader['dataPath']
graph = dgl.load_graphs(path)[0]
self.graph = graph
def has_cache(self):
if(os.path.exists(dataloader['dataPath'])):
return True
else:
return False
dataset = LoadDataset()
dataloader = GraphDataLoader(dataset, batch_size=32, drop_last=False, shuffle=True, pin_memory=True)
return dataset, dataloader
# 加载other格式数据集
def load_graph_other(dataloader):
code = dataloader['code']
data_path = dataloader['dataPath']
print(code)
exec_data = {'data_path': data_path}
exec(code, globals(), exec_data)
return exec_data["data"]
# 在graphscope框架加载csv格式数据集
def load_graph_csv_gs(dataloader):
import graphscope as gs
data_name = dataloader['dataset']
data_path = os.path.join(LOCAL_DATASET_PATH, data_name)
# sess = graphscope.session(mount_dataset="/dataset", k8s_volumes=k8s_volumes, k8s_coordinator_cpu=4,
# k8s_coordinator_mem="8Gi")
sess = gs.session(addr='127.0.0.1:59001', mount_dataset='/dataset')
if dataloader["oidType"] == 'string':
graph = sess.g(oid_type=dataloader["oidType"])
else:
graph = sess.g()
for key, value in dataloader['vertices'].items():
pro = []
for feature in value['features']:
pro.append((feature['name'], feature['type']))
graph = graph.add_vertices(os.path.join(data_path, value['path']), label=key, vid_field=value['vidField'],
properties=pro)
for key, value in dataloader['edges'].items():
pro = []
for feature in value['features']:
pro.append((feature['name'], feature['type']))
graph = graph.add_edges(os.path.join(data_path, value['path']), label=key, src_label=value['srcLabel'],
dst_label=value['dstLabel'], src_field=0, dst_field=1, properties=pro)
return graph
# 在pytorch框架加载csv格式数据集
def load_graph_csv_pyg(dataloader):
import os.path as osp
import pandas as pd
import torch
from sentence_transformers import SentenceTransformer
from torch_geometric.data import HeteroData, download_url, extract_zip
from torch_geometric.transforms import RandomLinkSplit, ToUndirected
class SequenceEncoder:
# The 'SequenceEncoder' encodes raw column strings into embeddings.
def __init__(self, model_name='all-MiniLM-L6-v2', device=None):
self.device = device
self.model = SentenceTransformer(model_name, device=device)
@torch.no_grad()
def __call__(self, df):
x = self.model.encode(df.values, show_progress_bar=True,
convert_to_tensor=True, device=self.device)
return x.cpu()
class IdentityEncoder:
def __init__(self, dtype=None):
self.dtype = dtype
def __call__(self, df):
return torch.from_numpy(df.values).view(-1, 1).to(self.dtype)
data_name = dataloader['dataset']
data_path = os.path.join(LOCAL_DATASET_PATH, data_name)
data = HeteroData()
node_map_dic = {}
for key, value in dataloader['vertices'].items():
pro = {}
for feature in value['features']:
if (feature['type'] == 'string'):
pro[feature['name']] = SequenceEncoder()
elif (feature['type'] == 'int'):
pro[feature['name']] = IdentityEncoder()
print(pro)
node_x, node_mapping = load_pyg_node_csv(os.path.join(data_path, value['path']), index_col=value['vidField'],
encoders=pro)
data[key].x = node_x
node_map_dic[key] = node_mapping
for key, value in dataloader['edges'].items():
pro = {}
for feature in value['features']:
if (feature['type'] == 'string'):
pro[feature['name']] = SequenceEncoder()
elif (feature['type'] == 'int'):
pro[feature['name']] = IdentityEncoder()
print(pro)
edge_index, edge_label = load_pyg_edge_csv(
os.path.join(data_path, value['path']),
src_index_col='src_vid',
src_mapping=node_map_dic[value['srcLabel']],
dst_index_col='dst_vid',
dst_mapping=node_map_dic[value['dstLabel']],
encoders=pro
)
data[value['srcLabel'], value['edgeLabel'], value['dstLabel']].edge_index = edge_index
data[value['srcLabel'], value['edgeLabel'], value['dstLabel']].edge_label = edge_label
print(data)
return data
# 在dgl框架加载csv格式数据集(同构图)
def load_graph_csv_dgl(dataloader):
import dgl
import torch as th
import pandas as pd
data_name = dataloader['dataset']
data_path = os.path.join(LOCAL_DATASET_PATH, data_name)
g = None
for key, value in dataloader['edges'].items():
edges_data = pd.read_csv(os.path.join(data_path, value['path']))
src = edges_data['src_vid'].to_numpy()
dst = edges_data['dst_vid'].to_numpy()
g = dgl.graph((src, dst))
pro = []
for feature in value['features']:
pro.append((feature['name']))
edges_feature = th.tensor(edges_data[pro].to_numpy())
g.edata['edge'] = edges_feature
break
for key, value in dataloader['vertices'].items():
nodes_data = pd.read_csv(os.path.join(data_path, value['path']))
pro = []
for feature in value['features']:
pro.append((feature['name']))
nodes_feature = th.tensor(nodes_data[pro].to_numpy())
g.ndata['node'] = nodes_feature
break
return g
# 判断ogb数据集的任务类型
def get_ogb_task(data_name):
if 'ogbn' in data_name:
return 'node'
elif 'ogbl' in data_name:
return 'link'
elif 'ogbg' in data_name:
return 'graph'
else:
raise Exception('dataset name is invalid')
# 组装参数
def construct_param(params):
param_str = ''
for param in params:
param_str = param_str + ',' + param
if len(param_str) > 0:
param_str = param_str[1:]
param_str = '(' + param_str + ')'
return param_str
def get_data_frame(frame_type):
if frame_type == 'pytorch':
return 'pyg'
elif frame_type == 'dgl':
return 'dgl'
else:
return 'none'
# pyg加载node csv
def load_pyg_node_csv(path, index_col, encoders=None, **kwargs):
import pandas as pd
import torch
df = pd.read_csv(path, index_col=index_col, **kwargs)
mapping = {index: i for i, index in enumerate(df.index.unique())}
x = None
if encoders is not None and len(encoders) != 0:
xs = [encoder(df[col]) for col, encoder in encoders.items()]
x = torch.cat(xs, dim=-1)
return x, mapping
# pyg加载edge csv
def load_pyg_edge_csv(path, src_index_col, src_mapping, dst_index_col, dst_mapping,
encoders=None, **kwargs):
import pandas as pd
import torch
df = pd.read_csv(path, **kwargs)
src = [src_mapping[index] for index in df[src_index_col]]
dst = [dst_mapping[index] for index in df[dst_index_col]]
edge_index = torch.tensor([src, dst])
edge_attr = None
if encoders is not None and len(encoders) != 0:
edge_attrs = [encoder(df[col]) for col, encoder in encoders.items()]
edge_attr = torch.cat(edge_attrs, dim=-1)
return edge_index, edge_attr
if __name__ == '__main__':
from munch import DefaultMunch
print('11')
param = {'dataPath': '/mnt/zhuque_goofys/datasets/test.bin'}
dataloader = DefaultMunch.fromDict(param)
dataset, dataloader = load_graph_bin_dgl(dataloader)
print(dataset)
print(dataloader) | zhuque-dataloader | /zhuque_dataloader-0.3.13-py3-none-any.whl/loader.py | loader.py |
import os
from optparse import OptionParser
import torch
from mpi4py import MPI
from wholegraph.torch import wholegraph_pytorch as wg
from wg_torch import graph_ops as graph_ops
from wg_torch.wm_tensor import *
from .load_ops import (
load_data_split,
load_node_pro,
load_edge_pro
)
def load_graph_wg(root_dir, graph_name, source, use_nccl=False):
wg.init_lib()
torch.set_num_threads(1)
comma = MPI.COMM_WORLD
shared_comma = comma.Split_type(MPI.COMM_TYPE_SHARED)
os.environ["RANK"] = str(comma.Get_rank())
os.environ["WORLD_SIZE"] = str(comma.Get_size())
# slurm in Selene has MASTER_ADDR env
if "MASTER_ADDR" not in os.environ:
os.environ["MASTER_ADDR"] = "localhost"
if "MASTER_PORT" not in os.environ:
os.environ["MASTER_PORT"] = "12335"
local_rank = shared_comma.Get_rank()
local_size = shared_comma.Get_size()
print("Rank=%d, local_rank=%d" % (local_rank, comma.Get_rank()))
dev_count = torch.cuda.device_count()
assert dev_count > 0
assert local_size <= dev_count
torch.cuda.set_device(local_rank)
torch.distributed.init_process_group(backend="nccl", init_method="env://")
wm_comm = create_intra_node_communicator(
comma.Get_rank(), comma.Get_size(), local_size
)
wm_embedding_comm = None
if use_nccl:
if comma.Get_rank() == 0:
print("Using nccl embeddings.")
wm_embedding_comm = create_global_communicator(
comma.Get_rank(), comma.Get_size()
)
# train set, valid set, test set
data_split = load_data_split(root_dir, graph_name, True)
print("data_split", data_split)
normalized_graph_name = graph_ops.graph_name_normalize(graph_name)
save_dir = os.path.join(root_dir, normalized_graph_name, "converted")
meta_data = graph_ops.load_meta_file(save_dir, normalized_graph_name)
# node property
node_pro = None
if meta_data["nodes"][0]["has_pro"]:
node_pro = load_node_pro(root_dir, graph_name, True)
if node_pro is not None:
print("node_pro_shape", node_pro.shape)
# edge property
edge_pro = None
if meta_data["edges"][0]["has_pro"]:
edge_pro = load_edge_pro(root_dir, graph_name, True)
if edge_pro is not None:
print("edge_pro_shape", edge_pro.shape)
dist_homo_graph = graph_ops.HomoGraph()
use_chunked = True
use_host_memory = False
dist_homo_graph.load(
root_dir,
graph_name,
wm_comm,
use_chunked,
use_host_memory,
wm_embedding_comm,
)
return data_split, dist_homo_graph | zhuque-dataloader | /zhuque_dataloader-0.3.13-py3-none-any.whl/wg_load/data_load.py | data_load.py |
import datetime
import os
import time
import torch
import torch.nn.functional as F
import torchmetrics.functional as MF
from torch.utils.data import DataLoader
from torch.utils.data.distributed import DistributedSampler
from wg_torch import comm as comm
from wg_torch import embedding_ops as embedding_ops
from wg_torch import graph_ops as graph_ops
from wg_torch.wm_tensor import *
from wholegraph.torch import wholegraph_pytorch as wg
def parse_max_neighbors(num_layer, neighbor_str):
neighbor_str_vec = neighbor_str.split(",")
max_neighbors = []
for ns in neighbor_str_vec:
max_neighbors.append(int(ns))
assert len(max_neighbors) == 1 or len(max_neighbors) == num_layer
if len(max_neighbors) != num_layer:
for i in range(1, num_layer):
max_neighbors.append(max_neighbors[0])
# max_neighbors.reverse()
return max_neighbors
def create_gnn_layers(in_feat_dim, hidden_feat_dim, num_layer, num_head, args):
if args.framework == "dgl":
import dgl
from dgl.nn.pytorch.conv import SAGEConv, GATConv
elif args.framework == "pyg":
from torch_sparse import SparseTensor
from torch_geometric.nn import SAGEConv, GATConv
elif args.framework == "wg":
from wg_torch.gnn.SAGEConv import SAGEConv
from wg_torch.gnn.GATConv import GATConv
gnn_layers = torch.nn.ModuleList()
for i in range(num_layer):
layer_output_dim = hidden_feat_dim // num_head
layer_input_dim = in_feat_dim if i == 0 else hidden_feat_dim
mean_output = True if i == num_layer - 1 else False
if args.framework == "pyg":
if args.model == "sage":
gnn_layers.append(SAGEConv(layer_input_dim, layer_output_dim))
elif args.model == "gat":
concat = not mean_output
gnn_layers.append(
GATConv(
layer_input_dim, layer_output_dim, heads=num_head, concat=concat
)
)
else:
assert args.model == "gcn"
gnn_layers.append(
SAGEConv(layer_input_dim, layer_output_dim, root_weight=False)
)
elif args.framework == "dgl":
if args.model == "sage":
gnn_layers.append(SAGEConv(layer_input_dim, layer_output_dim, "mean"))
elif args.model == "gat":
gnn_layers.append(
GATConv(
layer_input_dim,
layer_output_dim,
num_heads=num_head,
allow_zero_in_degree=True,
)
)
else:
assert args.model == "gcn"
gnn_layers.append(SAGEConv(layer_input_dim, layer_output_dim, "gcn"))
elif args.framework == "wg":
if args.model == "sage":
gnn_layers.append(SAGEConv(layer_input_dim, layer_output_dim))
elif args.model == "gat":
gnn_layers.append(
GATConv(
layer_input_dim,
layer_output_dim,
num_heads=num_head,
mean_output=mean_output,
)
)
else:
assert args.model == "gcn"
gnn_layers.append(
SAGEConv(layer_input_dim, layer_output_dim, aggregator="gcn")
)
return gnn_layers
def create_sub_graph(
target_gid,
target_gid_1,
edge_data,
csr_row_ptr,
csr_col_ind,
sample_dup_count,
add_self_loop: bool,
args
):
if args.framework == "pyg":
neighboor_dst_unique_ids = csr_col_ind
neighboor_src_unique_ids = edge_data[1]
target_neighbor_count = target_gid.size()[0]
if add_self_loop:
self_loop_ids = torch.arange(
0,
target_gid_1.size()[0],
dtype=neighboor_dst_unique_ids.dtype,
device=target_gid.device,
)
edge_index = SparseTensor(
row=torch.cat([neighboor_src_unique_ids, self_loop_ids]).long(),
col=torch.cat([neighboor_dst_unique_ids, self_loop_ids]).long(),
sparse_sizes=(target_gid_1.size()[0], target_neighbor_count),
)
else:
edge_index = SparseTensor(
row=neighboor_src_unique_ids.long(),
col=neighboor_dst_unique_ids.long(),
sparse_sizes=(target_gid_1.size()[0], target_neighbor_count),
)
return edge_index
elif args.framework == "dgl":
if add_self_loop:
self_loop_ids = torch.arange(
0,
target_gid_1.numel(),
dtype=edge_data[0].dtype,
device=target_gid.device,
)
block = dgl.create_block(
(
torch.cat([edge_data[0], self_loop_ids]),
torch.cat([edge_data[1], self_loop_ids]),
),
num_src_nodes=target_gid.size(0),
num_dst_nodes=target_gid_1.size(0),
)
else:
block = dgl.create_block(
(edge_data[0], edge_data[1]),
num_src_nodes=target_gid.size(0),
num_dst_nodes=target_gid_1.size(0),
)
return block
else:
assert args.framework == "wg"
return [csr_row_ptr, csr_col_ind, sample_dup_count]
return None
def layer_forward(layer, x_feat, x_target_feat, sub_graph, args):
if args.framework == "pyg":
x_feat = layer((x_feat, x_target_feat), sub_graph)
elif args.framework == "dgl":
x_feat = layer(sub_graph, (x_feat, x_target_feat))
elif args.framework == "wg":
x_feat = layer(sub_graph[0], sub_graph[1], sub_graph[2], x_feat, x_target_feat)
return x_feat
class LinkPredictionWgGNNModel(torch.nn.Module):
def __init__(
self,
graph: graph_ops.HomoGraph,
num_layer,
hidden_feat_dim,
max_neighbors: str,
args
):
super().__init__()
self.args = args
self.graph = graph
self.num_layer = num_layer
self.hidden_feat_dim = hidden_feat_dim
self.max_neighbors = parse_max_neighbors(num_layer, max_neighbors)
num_head = args.heads if (args.model == "gat") else 1
assert hidden_feat_dim % num_head == 0
in_feat_dim = self.graph.node_feat_shape()[1]
self.gnn_layers = create_gnn_layers(
in_feat_dim, hidden_feat_dim, num_layer, num_head, args
)
self.mean_output = True if args.model == "gat" else False
self.add_self_loop = True if args.model == "gat" else False
self.gather_fn = embedding_ops.EmbeddingLookUpModule(need_backward=False)
self.predictor = torch.nn.Sequential(
torch.nn.Linear(hidden_feat_dim, hidden_feat_dim),
torch.nn.ReLU(),
torch.nn.Linear(hidden_feat_dim, hidden_feat_dim),
torch.nn.ReLU(),
torch.nn.Linear(hidden_feat_dim, 1),
)
def gnn_forward(self, ids, exclude_edge_hashset=None):
ids = ids.to(self.graph.id_type()).cuda()
(
target_gids,
edge_indice,
csr_row_ptrs,
csr_col_inds,
sample_dup_counts,
) = self.graph.unweighted_sample_without_replacement(
ids, self.max_neighbors, exclude_edge_hashset=exclude_edge_hashset
)
x_feat = self.gather_fn(target_gids[0], self.graph.node_feat)
# x_feat = self.graph.gather(target_gids[0])
# num_nodes = [target_gid.shape[0] for target_gid in target_gids]
# print('num_nodes %s' % (num_nodes, ))
for i in range(self.num_layer):
x_target_feat = x_feat[: target_gids[i + 1].numel()]
sub_graph = create_sub_graph(
target_gids[i],
target_gids[i + 1],
edge_indice[i],
csr_row_ptrs[i],
csr_col_inds[i],
sample_dup_counts[i],
self.add_self_loop,
self.args
)
x_feat = layer_forward(self.gnn_layers[i], x_feat, x_target_feat, sub_graph, self.args)
if i != self.num_layer - 1:
if self.args.framework == "dgl":
x_feat = x_feat.flatten(1)
x_feat = F.relu(x_feat)
# x_feat = F.dropout(x_feat, self.args.dropout, training=self.training)
if self.args.framework == "dgl" and self.mean_output:
out_feat = x_feat.mean(1)
else:
out_feat = x_feat
return out_feat
def predict(self, h_src, h_dst):
return self.predictor(h_src * h_dst)
def fullbatch_single_layer_forward(
self, dist_homo_graph, i, input_feat, output_feat, batch_size
):
start_node_id = (
dist_homo_graph.node_count
* wg.get_rank(dist_homo_graph.wm_comm)
// wg.get_size(dist_homo_graph.wm_comm)
)
end_node_id = (
dist_homo_graph.node_count
* (wg.get_rank(dist_homo_graph.wm_comm) + 1)
// wg.get_size(dist_homo_graph.wm_comm)
)
min_node_count = dist_homo_graph.node_count // wg.get_size(
dist_homo_graph.wm_comm
)
total_node_count = end_node_id - start_node_id
batch_count = max((min_node_count + batch_size - 1) // batch_size, 1)
last_batchsize = total_node_count - (batch_count - 1) * batch_size
embedding_lookup_fn = embedding_ops.EmbeddingLookupFn.apply
for batch_id in range(batch_count):
current_batchsize = (
last_batchsize if batch_id == batch_count - 1 else batch_size
)
batch_start_node_id = start_node_id + batch_id * batch_size
target_ids = torch.arange(
batch_start_node_id,
batch_start_node_id + current_batchsize,
dtype=dist_homo_graph.edges_csr_col.dtype,
device="cuda",
)
(
neighboor_gids_offset,
neighboor_gids_vdata,
neighboor_src_lids,
) = graph_ops.unweighted_sample_without_replacement_single_layer(
target_ids,
dist_homo_graph.edges_csr_row,
dist_homo_graph.edges_csr_col,
-1,
)
(
unique_gids,
neighbor_raw_to_unique_mapping,
unique_output_neighbor_count,
) = torch.ops.wholegraph.append_unique(target_ids, neighboor_gids_vdata)
csr_row_ptr = neighboor_gids_offset
csr_col_ind = neighbor_raw_to_unique_mapping
sample_dup_count = unique_output_neighbor_count
neighboor_count = neighboor_gids_vdata.size()[0]
edge_indice_i = torch.cat(
[
torch.reshape(neighbor_raw_to_unique_mapping, (1, neighboor_count)),
torch.reshape(neighboor_src_lids, (1, neighboor_count)),
]
)
target_ids_i = unique_gids
x_feat = embedding_lookup_fn(target_ids_i, input_feat)
sub_graph = create_sub_graph(
target_ids_i,
target_ids,
edge_indice_i,
csr_row_ptr,
csr_col_ind,
sample_dup_count,
self.add_self_loop,
self.args
)
x_target_feat = x_feat[: target_ids.numel()]
x_feat = layer_forward(self.gnn_layers[i], x_feat, x_target_feat, sub_graph, self.args)
if i != self.num_layer - 1:
if self.args.framework == "dgl":
x_feat = x_feat.flatten(1)
x_feat = F.relu(x_feat)
else:
if self.args.framework == "dgl" and self.mean_output:
x_feat = x_feat.mean(1)
embedding_ops.embedding_2d_sub_tensor_assign(
x_feat, output_feat, batch_start_node_id
)
def forward(self, src_ids, pos_dst_ids, neg_dst_ids):
assert src_ids.shape == pos_dst_ids.shape and src_ids.shape == neg_dst_ids.shape
id_count = src_ids.size(0)
ids = torch.cat([src_ids, pos_dst_ids, neg_dst_ids])
# add both forward and reverse edge into hashset
exclude_edge_hashset = torch.ops.wholegraph.create_edge_hashset(
torch.cat([src_ids, pos_dst_ids]), torch.cat([pos_dst_ids, src_ids])
)
ids_unique, reverse_map = torch.unique(ids, return_inverse=True)
out_feat_unique = self.gnn_forward(
ids_unique, exclude_edge_hashset=exclude_edge_hashset
)
out_feat = torch.nn.functional.embedding(reverse_map, out_feat_unique)
src_feat, pos_dst_feat, neg_dst_feat = torch.split(out_feat, id_count)
scores = self.predict(
torch.cat([src_feat, src_feat]), torch.cat([pos_dst_feat, neg_dst_feat])
)
return scores[:id_count], scores[id_count:] | zhuque | /zhuque-1.0.2-py3-none-any.whl/zhuque_graph/nn/pytorch/model/LinkPredictionWgGNNModel.py | LinkPredictionWgGNNModel.py |
import datetime
import os
import time
import torch
import torch.nn.functional as F
import torchmetrics.functional as MF
from torch.utils.data import DataLoader
from torch.utils.data.distributed import DistributedSampler
from wg_torch import comm as comm
from wg_torch import embedding_ops as embedding_ops
from wg_torch import graph_ops as graph_ops
from wg_torch.wm_tensor import *
from wholegraph.torch import wholegraph_pytorch as wg
def parse_max_neighbors(num_layer, neighbor_str):
neighbor_str_vec = neighbor_str.split(",")
max_neighbors = []
for ns in neighbor_str_vec:
max_neighbors.append(int(ns))
assert len(max_neighbors) == 1 or len(max_neighbors) == num_layer
if len(max_neighbors) != num_layer:
for i in range(1, num_layer):
max_neighbors.append(max_neighbors[0])
# max_neighbors.reverse()
return max_neighbors
def create_gnn_layers(in_feat_dim, hidden_feat_dim, num_layer, num_head, args):
if args.framework == "dgl":
import dgl
from dgl.nn.pytorch.conv import SAGEConv, GATConv
elif args.framework == "pyg":
from torch_sparse import SparseTensor
from torch_geometric.nn import SAGEConv, GATConv
elif args.framework == "wg":
from wg_torch.gnn.SAGEConv import SAGEConv
from wg_torch.gnn.GATConv import GATConv
gnn_layers = torch.nn.ModuleList()
for i in range(num_layer):
layer_output_dim = hidden_feat_dim // num_head if i != num_layer - 1 else args.classnum
layer_input_dim = in_feat_dim if i == 0 else hidden_feat_dim
mean_output = True if i == num_layer - 1 else False
if args.framework == "pyg":
if args.model == "sage":
gnn_layers.append(SAGEConv(layer_input_dim, layer_output_dim))
elif args.model == "gat":
concat = not mean_output
gnn_layers.append(
GATConv(
layer_input_dim, layer_output_dim, heads=num_head, concat=concat
)
)
else:
assert args.model == "gcn"
gnn_layers.append(
SAGEConv(layer_input_dim, layer_output_dim, root_weight=False)
)
elif args.framework == "dgl":
if args.model == "sage":
gnn_layers.append(SAGEConv(layer_input_dim, layer_output_dim, "mean"))
elif args.model == "gat":
gnn_layers.append(
GATConv(
layer_input_dim,
layer_output_dim,
num_heads=num_head,
allow_zero_in_degree=True,
)
)
else:
assert args.model == "gcn"
gnn_layers.append(SAGEConv(layer_input_dim, layer_output_dim, "gcn"))
elif args.framework == "wg":
if args.model == "sage":
gnn_layers.append(SAGEConv(layer_input_dim, layer_output_dim))
elif args.model == "gat":
gnn_layers.append(
GATConv(
layer_input_dim,
layer_output_dim,
num_heads=num_head,
mean_output=mean_output,
)
)
else:
assert args.model == "gcn"
gnn_layers.append(
SAGEConv(layer_input_dim, layer_output_dim, aggregator="gcn")
)
return gnn_layers
def create_sub_graph(
target_gid,
target_gid_1,
edge_data,
csr_row_ptr,
csr_col_ind,
sample_dup_count,
add_self_loop: bool,
args
):
if args.framework == "pyg":
neighboor_dst_unique_ids = csr_col_ind
neighboor_src_unique_ids = edge_data[1]
target_neighbor_count = target_gid.size()[0]
if add_self_loop:
self_loop_ids = torch.arange(
0,
target_gid_1.size()[0],
dtype=neighboor_dst_unique_ids.dtype,
device=target_gid.device,
)
edge_index = SparseTensor(
row=torch.cat([neighboor_src_unique_ids, self_loop_ids]).long(),
col=torch.cat([neighboor_dst_unique_ids, self_loop_ids]).long(),
sparse_sizes=(target_gid_1.size()[0], target_neighbor_count),
)
else:
edge_index = SparseTensor(
row=neighboor_src_unique_ids.long(),
col=neighboor_dst_unique_ids.long(),
sparse_sizes=(target_gid_1.size()[0], target_neighbor_count),
)
return edge_index
elif args.framework == "dgl":
if add_self_loop:
self_loop_ids = torch.arange(
0,
target_gid_1.numel(),
dtype=edge_data[0].dtype,
device=target_gid.device,
)
block = dgl.create_block(
(
torch.cat([edge_data[0], self_loop_ids]),
torch.cat([edge_data[1], self_loop_ids]),
),
num_src_nodes=target_gid.size(0),
num_dst_nodes=target_gid_1.size(0),
)
else:
block = dgl.create_block(
(edge_data[0], edge_data[1]),
num_src_nodes=target_gid.size(0),
num_dst_nodes=target_gid_1.size(0),
)
return block
else:
assert args.framework == "wg"
return [csr_row_ptr, csr_col_ind, sample_dup_count]
return None
def layer_forward(layer, x_feat, x_target_feat, sub_graph, args):
if args.framework == "pyg":
x_feat = layer((x_feat, x_target_feat), sub_graph)
elif args.framework == "dgl":
x_feat = layer(sub_graph, (x_feat, x_target_feat))
elif args.framework == "wg":
x_feat = layer(sub_graph[0], sub_graph[1], sub_graph[2], x_feat, x_target_feat)
return x_feat
class NodeClassificationWgGNNModel(torch.nn.Module):
def __init__(
self,
graph: graph_ops.HomoGraph,
num_layer,
hidden_feat_dim,
max_neighbors: str,
args
):
super().__init__()
self.args = args
self.graph = graph
self.num_layer = num_layer
self.hidden_feat_dim = hidden_feat_dim
self.max_neighbors = parse_max_neighbors(num_layer, max_neighbors)
num_head = args.heads if (args.model == "gat") else 1
assert hidden_feat_dim % num_head == 0
in_feat_dim = self.graph.node_feat_shape()[1]
self.gnn_layers = create_gnn_layers(
in_feat_dim, hidden_feat_dim, num_layer, num_head, args
)
self.mean_output = True if args.model == "gat" else False
self.add_self_loop = True if args.model == "gat" else False
self.gather_fn = embedding_ops.EmbeddingLookUpModule(need_backward=False)
def forward(self, ids):
ids = ids.to(self.graph.id_type()).cuda()
(
target_gids,
edge_indice,
csr_row_ptrs,
csr_col_inds,
sample_dup_counts,
) = self.graph.unweighted_sample_without_replacement(ids, self.max_neighbors)
x_feat = self.gather_fn(target_gids[0], self.graph.node_feat)
# x_feat = self.graph.gather(target_gids[0])
for i in range(self.num_layer):
x_target_feat = x_feat[: target_gids[i + 1].numel()]
sub_graph = create_sub_graph(
target_gids[i],
target_gids[i + 1],
edge_indice[i],
csr_row_ptrs[i],
csr_col_inds[i],
sample_dup_counts[i],
self.add_self_loop,
self.args
)
x_feat = layer_forward(self.gnn_layers[i], x_feat, x_target_feat, sub_graph, self.args)
if i != self.num_layer - 1:
if self.args.framework == "dgl":
x_feat = x_feat.flatten(1)
x_feat = F.relu(x_feat)
x_feat = F.dropout(x_feat, self.args.dropout, training=self.training)
if self.args.framework == "dgl" and self.mean_output:
out_feat = x_feat.mean(1)
else:
out_feat = x_feat
return out_feat | zhuque | /zhuque-1.0.2-py3-none-any.whl/zhuque_graph/nn/pytorch/model/NodeClassificationWgGNNModel.py | NodeClassificationWgGNNModel.py |
import logging
import zmq
import os
from util import *
logger = logging.getLogger(__name__)
class TabooAttacker:
def __init__(self, host="127.0.0.1", port=10086, name="Attacker"):
self.name = name
self.context = zmq.Context()
self.connection = self.context.socket(zmq.REP)
self.connection.bind("tcp://%s:%d" % (host, port))
data = self.connection.recv_json()
if data["code"] == INIT:
logger.info("Connection to server established.")
self.task_setting = data["data"]
self.connection.send_json({
"code": ATTACKER_FEEDBACK,
"data": self.name
})
else:
logger.error("Unknown codes from server, raise error")
raise NotImplementedError
data = self.connection.recv_json()
if data["code"] == WORD_SELECT:
logger.info("Receive word list from judger.")
self.word_list = data["data"]
else:
logger.error("Unknown codes from server, raise error")
raise NotImplementedError
def get_task_setting(self):
return self.task_setting
def select_word(self, idx):
logger.info("Selects the word: %s" % self.word_list[idx])
self.connection.send_json({
"code": WORD_SELECT,
"data": idx
})
self.connection.recv_json()
def attack(self, sent):
self.connection.send_json({
"code": ATTACK,
"data": sent
})
data = self.connection.recv_json()
if data["code"] == DEFENDER_CORRUPT:
logger.info("Defender corrupts")
logging.info("Defender says: %s" % data["data"])
if data["code"] in END_CODE_SET:
if data["code"] == ATTACKER_WIN:
logger.info("Attacker wins")
elif data["code"] == DEFENDER_WIN:
logger.info("Defender wins")
elif data["code"] == DRAW:
logger.info("Draw")
return data
if __name__ == "__main__":
os.system("clear")
logging.basicConfig(format='%(asctime)s - %(levelname)s - %(name)s - %(message)s',
datefmt='%m/%d/%Y %H:%M:%S',
level=logging.INFO)
attacker = TabooAttacker(port=23333)
attacker.select_word(0)
while True:
print("Type your sentence: ", end='')
sent = input().strip()
data = attacker.attack(sent)
if data["code"] in END_CODE_SET:
break | zhx | /zhx-0.0.1.tar.gz/zhx-0.0.1/taboo/attacker.py | attacker.py |
import logging
import zmq
import os
from util import *
logger = logging.getLogger(__name__)
class TabooDefender:
def __init__(self, host="127.0.0.1", port=10087, name="Defender"):
self.name = name
self.context = zmq.Context()
self.connection = self.context.socket(zmq.REP)
self.connection.bind("tcp://%s:%d" % (host, port))
data = self.connection.recv_json()
if data["code"] == INIT:
logger.info("Connection to server established.")
self.task_setting = data["data"]
self.connection.send_json({
"code": DEFENDER_FEEDBACK,
"data": self.name
})
else:
logger.error("Unknown codes from server, raise error")
raise NotImplementedError
def get_task_setting(self):
return self.task_setting
def receive_msg(self):
data = self.connection.recv_json()
if data["code"] == ATTACKER_CORRUPT:
logger.info("Attacker corrupts")
logging.info("Attacker says: %s" % data["data"])
if data["code"] in END_CODE_SET:
if data["code"] == ATTACKER_WIN:
logger.info("Attacker wins")
elif data["code"] == DEFENDER_WIN:
logger.info("Defender wins")
elif data["code"] == DRAW:
logger.info("Draw")
return data
def defend(self, sent):
self.connection.send_json({
"code": DEFEND,
"data": sent
})
def begin_guessing(self):
self.connection.send_json({
"code": ACTIVELY_GUESS,
"data": ACTIVELY_GUESS
})
data = self.connection.recv_json()
return data["data"]
def guess_word(self, p):
self.connection.send_json({
"code": GUESS,
"data": p
})
if data["code"] in END_CODE_SET:
if data["code"] == ATTACKER_WIN:
logger.info("Attacker wins")
elif data["code"] == DEFENDER_WIN:
logger.info("Defender wins")
elif data["code"] == DRAW:
logger.info("Draw")
return data
if __name__ == "__main__":
os.system("clear")
logging.basicConfig(format='%(asctime)s - %(levelname)s - %(name)s - %(message)s',
datefmt='%m/%d/%Y %H:%M:%S',
level=logging.INFO)
defender = TabooDefender()
while True:
data = defender.receive_msg()
if data["code"] in END_CODE_SET:
break
print("Type your sentence: ", end='')
sent = input().strip()
defender.defend(sent) | zhx | /zhx-0.0.1.tar.gz/zhx-0.0.1/taboo/defender.py | defender.py |
import logging
import json
import zmq
import os
from util import *
logger = logging.getLogger(__name__)
class TabooJudger:
def __init__(self, task_setting, attacker_host="127.0.0.1", attacker_port=10086, defender_host="127.0.0.1",
defender_port=10087):
self.task_setting = task_setting
self.history = {
"task_setting": task_setting
}
self.attacker_context = zmq.Context()
self.attacker = self.attacker_context.socket(zmq.REQ)
self.attacker.connect("tcp://%s:%d" % (attacker_host, attacker_port))
self.attacker.send_json({
"code": INIT,
"data": self.task_setting
})
data = self.attacker.recv_json()
if data["code"] != ATTACKER_FEEDBACK:
logger.error("Unknown code, bad attacker.")
raise NotImplementedError
else:
self.attacker_name = data["data"]
logger.info("Connected to attacker %s:%d" % (attacker_host, attacker_port))
self.defender_context = zmq.Context()
self.defender = self.defender_context.socket(zmq.REQ)
self.defender.connect("tcp://%s:%d" % (defender_host, defender_port))
self.defender.send_json({
"code": INIT,
"data": self.task_setting
})
data = self.defender.recv_json()
if data["code"] != DEFENDER_FEEDBACK:
logger.error("Unknown code, bad defender.")
raise NotImplementedError
else:
self.defender_name = data["data"]
logger.info("Connected to defender %s:%d" % (defender_host, defender_port))
def select_word(self, word_list):
self.history["word_list"] = word_list
logger.info("Word candidate set: %s" % json.dumps(word_list, ensure_ascii=False))
self.attacker.send_json({
"code": WORD_SELECT,
"data": word_list
})
data = self.attacker.recv_json()
if data["code"] != WORD_SELECT:
logger.error("Unknown code, bad attacker.")
raise NotImplementedError
self.word = word_list[data["data"]]
self.history["select_word"] = self.word
logger.info("Attacker selects the word: %s" % self.word)
self.attacker.send_json({
"code": RECEIVED,
"data": RECEIVED
})
def play_game(self, checker, formatter, ruler):
self.history["sentences"] = []
while True:
attack_msg = self.attacker.recv_json()
if attack_msg["code"] != ATTACK:
logger.error("Unknown code, bad attacker.")
raise NotImplementedError
attack_msg = formatter.format(attack_msg["data"])
self.history["sentences"].append(attack_msg)
logger.info("Attacker says: %s" % attack_msg)
if not ruler.check_available(attack_msg) or not ruler.check_relevance(attack_msg,
self.history["sentences"]):
logger.error("Bad quality of attacker")
self.history["result"] = "Bad quality of attacker."
self.attacker.send_json({
"code": BAD_ATTACKER,
"data": BAD_ATTACKER
})
self.defender.send_json({
"code": BAD_ATTACKER,
"data": BAD_ATTACKER
})
break
self.defender.send_json({
"code": DEFEND,
"data": attack_msg
})
defend_msg = self.defender.recv_json()
if defend_msg["code"] == ACTIVELY_GUESS:
guess_list = self.history["word_list"]
self.history["guess_word_list"] = guess_list
self.defender.send_json({
"code": GUESS_LIST,
"data": self.history["word_list"]
})
data = self.defender.recv_json()
if data["code"] != GUESS:
logger.error("Unknown code, bad defender.")
raise NotImplementedError
self.history["guess_id"] = data["data"]
if guess_list[data["data"]] == self.history["word"]:
self.attacker.send_json({
"code": DEFENDER_WIN,
"data": guess_list[data["data"]]
})
self.defender.send_json({
"code": DEFENDER_WIN,
"data": DEFENDER_WIN
})
self.history["result"] = "Defender wins."
logger.info("Defender wins")
break
else:
self.attacker.send_json({
"code": ATTACKER_WIN,
"data": {
"guess_list": guess_list,
"idx": data["data"]
}
})
self.defender.send_json({
"code": ATTACKER_WIN,
"data": self.word
})
self.history["result"] = "Attacker wins."
logger.info("Attacker wins")
break
elif defend_msg["code"] == DEFEND:
defend_msg = formatter.format(defend_msg["data"])
self.history["sentences"].append(defend_msg)
logger.info("Defender says: %s" % defend_msg)
if not ruler.check_available(defend_msg) or not ruler.check_relevance(defend_msg,
self.history["sentences"]):
logger.error("Bad quality of defender")
self.history["result"] = "Bad quality of defender."
self.attacker.send_json({
"code": BAD_DEFENDER,
"data": BAD_DEFENDER
})
self.defender.send_json({
"code": BAD_DEFENDER,
"data": BAD_DEFENDER
})
break
if checker.check(defend_msg, self.word):
self.attacker.send_json({
"code": ATTACKER_WIN,
"data": defend_msg
})
self.defender.send_json({
"code": ATTACKER_WIN,
"data": self.word
})
self.history["result"] = "Attacker wins."
logger.info("Attacker wins")
break
else:
self.attacker.send_json({
"code": ATTACK,
"data": defend_msg
})
else:
logger.error("Unknown code, bad defender.")
raise NotImplementedError
if __name__ == "__main__":
os.system("clear")
logging.basicConfig(format='%(asctime)s - %(levelname)s - %(name)s - %(message)s',
datefmt='%m/%d/%Y %H:%M:%S',
level=logging.INFO)
judger = TabooJudger({}, attacker_port=23333)
judger.select_word(["apple", "peach", "ubuntu", "windows", "hello", "what", "fuck"])
from taboo import formatter, ruler, checker
judger.play_game(checker.TabooEnglishChecker(), formatter.TabooStupidFormatter(), ruler.TabooStupidRuler()) | zhx | /zhx-0.0.1.tar.gz/zhx-0.0.1/taboo/judger.py | judger.py |
# my dash component
my dash component is a Dash component library.
Get started with:
1. Install Dash and its dependencies: https://dash.plot.ly/installation
2. Run `python usage.py`
3. Visit http://localhost:8050 in your web browser
## Contributing
See [CONTRIBUTING.md](./CONTRIBUTING.md)
### Install dependencies
If you have selected install_dependencies during the prompt, you can skip this part.
1. Install npm packages
```
$ npm install
```
2. Create a virtual env and activate.
```
$ virtualenv venv
$ . venv/bin/activate
```
_Note: venv\Scripts\activate for windows_
3. Install python packages required to build components.
```
$ pip install -r requirements.txt
```
4. Install the python packages for testing (optional)
```
$ pip install -r tests/requirements.txt
```
### Write your component code in `src/lib/components/MyTree.react.js`.
- The demo app is in `src/demo` and you will import your example component code into your demo app.
- Test your code in a Python environment:
1. Build your code
```
$ npm run build
```
2. Run and modify the `usage.py` sample dash app:
```
$ python usage.py
```
- Write tests for your component.
- A sample test is available in `tests/test_usage.py`, it will load `usage.py` and you can then automate interactions with selenium.
- Run the tests with `$ pytest tests`.
- The Dash team uses these types of integration tests extensively. Browse the Dash component code on GitHub for more examples of testing (e.g. https://github.com/plotly/dash-core-components)
- Add custom styles to your component by putting your custom CSS files into your distribution folder (`mdc`).
- Make sure that they are referenced in `MANIFEST.in` so that they get properly included when you're ready to publish your component.
- Make sure the stylesheets are added to the `_css_dist` dict in `mdc/__init__.py` so dash will serve them automatically when the component suite is requested.
- [Review your code](./review_checklist.md)
### Create a production build and publish:
1. Build your code:
```
$ npm run build
```
2. Create a Python tarball
```
$ python setup.py sdist
```
This distribution tarball will get generated in the `dist/` folder
3. Test your tarball by copying it into a new environment and installing it locally:
```
$ pip install mdc-0.0.1.tar.gz
```
4. If it works, then you can publish the component to NPM and PyPI:
1. Publish on PyPI
```
$ twine upload dist/*
```
2. Cleanup the dist folder (optional)
```
$ rm -rf dist
```
3. Publish on NPM (Optional if chosen False in `publish_on_npm`)
```
$ npm publish
```
_Publishing your component to NPM will make the JavaScript bundles available on the unpkg CDN. By default, Dash serves the component library's CSS and JS locally, but if you choose to publish the package to NPM you can set `serve_locally` to `False` and you may see faster load times._
5. Share your component with the community! https://community.plot.ly/c/dash
1. Publish this repository to GitHub
2. Tag your GitHub repository with the plotly-dash tag so that it appears here: https://github.com/topics/plotly-dash
3. Create a post in the Dash community forum: https://community.plot.ly/c/dash
| zhy-dash-comp | /zhy_dash_comp-0.0.2.tar.gz/zhy_dash_comp-0.0.2/README.md | README.md |
from dli.dli_client import DliClient
from dli.download_job import DownloadJob
from dli.logger import logger
import pandas as pd
import time
region = "cn-north-4"
endpoint = "dli.cn-north-4.myhuaweicloud.com"
obs_endpoint = ""
project_id = "0dfc32615b00f3802f12c00408441dc6"
username = ""
ak = "N5VJSZR9AIQXHBFJTLLU"
sk = "4iRGqUc8mGYGnHuYehbtVpBsMLvbtsIlQ91F18ZM"
queue_name = "schedule_normal_sql"
#电
endpoint = "dli.cn-north-4.myhuaweicloud.com"
project_id = "0e837d51b400f3452fadc002163f58d5"
ak = "SCWIFW2DSFAXUI4V3N5P"
sk = "4LYu6ZCjmM8lTDz7hBVHOt2mEKBNf3gG4LIuNaEu"
def print_res(download_job):
status = download_job.get_download_status()
while status not in ('FINISHED', 'CANCELLED'):
status = download_job.get_download_status()
time.sleep(1)
obs_reader = download_job.create_reader()
s=[]
count = 0
for record in obs_reader:
count += 1
s.append(record)
return s
# print(record)
logger.info("total records: %d" % count)
def read_sql(sql,target_columns=None):
kwargs = {
'region': region,
'project_id': project_id,
'auth_mode': 'aksk',
'ak': ak,
'sk': sk,
'endpoint': endpoint,
}
dli_client = DliClient(**kwargs)
sql_conf = {'spark.sql.enableToString': 'false' , 'spark.sql.adaptive.join.enabled': 'true', 'spark.sql.adaptive.enabled':'true',
'spark.sql.adaptive.skewedJoin.enabled':'true','spark.sql.adaptive.enableToString':'false','spark.sql.adaptive.skewedPartitionMaxSplits':'10'}
# logger.info("download specify table data")
# print_result(DownloadJob(dli_client, queue_name, "tpch", "nation"))
a = time.time()
logger.info("download query result")
sql_job = dli_client.execute_sql(sql, queue_name=queue_name,options=sql_conf)
if target_columns == None : return 0
df_copy = print_res(DownloadJob(dli_client, queue_name, "", "", job=sql_job))
df_copys = [eval(str(x)) for x in df_copy]
if target_columns :
df_features = pd.DataFrame(df_copys, columns=target_columns)
else:
df_features = pd.DataFrame(df_copys)
b = time.time()
print("时间:{}".format((b-a)/60))
return df_features
if __name__ == '__main__':
target_columns = ['user_id', 'expire_date_start'] | zhy-tools | /zhy_tools-0.0.2.tar.gz/zhy_tools-0.0.2/zhidian_dli.py | zhidian_dli.py |
<!-- Improved compatibility of back to top link: See: https://github.com/othneildrew/Best-README-Template/pull/73 -->
<a name="readme-top"></a>
<!--
*** Thanks for checking out the Best-README-Template. If you have a suggestion
*** that would make this better, please fork the repo and create a pull request
*** or simply open an issue with the tag "enhancement".
*** Don't forget to give the project a star!
*** Thanks again! Now go create something AMAZING! :D
-->
<!-- PROJECT SHIELDS -->
<!--
*** I'm using markdown "reference style" links for readability.
*** Reference links are enclosed in brackets [ ] instead of parentheses ( ).
*** See the bottom of this document for the declaration of the reference variables
*** for contributors-url, forks-url, etc. This is an optional, concise syntax you may use.
*** https://www.markdownguide.org/basic-syntax/#reference-style-links
-->
<!-- PROJECT LOGO -->
<br />
<div align="center">
<a href="https://github.com/othneildrew/Best-README-Template">
<img src="images/logo.png" alt="Logo" width="80" height="80">
</a>
<h3 align="center">zhy_tools</h3>
</div>
<!-- TABLE OF CONTENTS -->
<details>
<summary>Table of Contents</summary>
<ol>
<li>
<a href="#about-the-project">About The Project</a>
<ul>
<li><a href="#built-with">Built With</a></li>
</ul>
</li>
<li>
<a href="#getting-started">Getting Started</a>
<ul>
<li><a href="#prerequisites">Prerequisites</a></li>
<li><a href="#installation">Installation</a></li>
</ul>
</li>
<li><a href="#usage">Usage</a></li>
<li><a href="#roadmap">Roadmap</a></li>
<li><a href="#contributing">Contributing</a></li>
<li><a href="#license">License</a></li>
<li><a href="#contact">Contact</a></li>
<li><a href="#acknowledgments">Acknowledgments</a></li>
</ol>
</details>
<!-- ABOUT THE PROJECT -->
## About The Project
[![Product Name Screen Shot][product-screenshot]](https://example.com)
This project is built for normal work.
It contains timer, tag and so on sub-project for different kinds of functions which are used for common job.
<p align="right">(<a href="#readme-top">back to top</a>)</p>
### Built With
<p align="right">(<a href="#readme-top">back to top</a>)</p>
<!-- GETTING STARTED -->
## Getting Started
This is an example of how you may give instructions on setting up your project locally.
To get a local copy up and running follow these simple example steps.
### Prerequisites
This is an example of how to list things you need to use the software and how to install them.
* pip install
```sh
pip install zhy_tools
```
<!-- USAGE EXAMPLES -->
## Usage
Use this space to show useful examples of how a project can be used. Additional screenshots, code examples and demos work well in this space. You may also link to more resources.
_For more examples, please refer to the [Documentation](https://example.com)_
<p align="right">(<a href="#readme-top">back to top</a>)</p>
<!-- LICENSE -->
## License
Distributed under the MIT License. See `LICENSE` for more information.
<p align="right">(<a href="#readme-top">back to top</a>)</p>
<!-- CONTACT -->
## Contact
Your Name - [email protected]
<p align="right">(<a href="#readme-top">back to top</a>)</p>
<!-- ACKNOWLEDGMENTS -->
## Acknowledgments
Use this space to list resources you find helpful and would like to give credit to. I've included a few of my favorites to kick things off!
* [Choose an Open Source License](https://choosealicense.com)
* [GitHub Emoji Cheat Sheet](https://www.webpagefx.com/tools/emoji-cheat-sheet)
* [Malven's Flexbox Cheatsheet](https://flexbox.malven.co/)
* [Malven's Grid Cheatsheet](https://grid.malven.co/)
* [Img Shields](https://shields.io)
* [GitHub Pages](https://pages.github.com)
* [Font Awesome](https://fontawesome.com)
* [React Icons](https://react-icons.github.io/react-icons/search)
<p align="right">(<a href="#readme-top">back to top</a>)</p>
<!-- MARKDOWN LINKS & IMAGES -->
<!-- https://www.markdownguide.org/basic-syntax/#reference-style-links -->
[contributors-shield]: https://img.shields.io/github/contributors/othneildrew/Best-README-Template.svg?style=for-the-badge
[contributors-url]: https://github.com/othneildrew/Best-README-Template/graphs/contributors
[forks-shield]: https://img.shields.io/github/forks/othneildrew/Best-README-Template.svg?style=for-the-badge
[forks-url]: https://github.com/othneildrew/Best-README-Template/network/members
[stars-shield]: https://img.shields.io/github/stars/othneildrew/Best-README-Template.svg?style=for-the-badge
[stars-url]: https://github.com/othneildrew/Best-README-Template/stargazers
[issues-shield]: https://img.shields.io/github/issues/othneildrew/Best-README-Template.svg?style=for-the-badge
[issues-url]: https://github.com/othneildrew/Best-README-Template/issues
[license-shield]: https://img.shields.io/github/license/othneildrew/Best-README-Template.svg?style=for-the-badge
[license-url]: https://github.com/othneildrew/Best-README-Template/blob/master/LICENSE.txt
[linkedin-shield]: https://img.shields.io/badge/-LinkedIn-black.svg?style=for-the-badge&logo=linkedin&colorB=555
[linkedin-url]: https://linkedin.com/in/othneildrew
[product-screenshot]: images/screenshot.png
[Next.js]: https://img.shields.io/badge/next.js-000000?style=for-the-badge&logo=nextdotjs&logoColor=white
[Next-url]: https://nextjs.org/
[React.js]: https://img.shields.io/badge/React-20232A?style=for-the-badge&logo=react&logoColor=61DAFB
[React-url]: https://reactjs.org/
[Vue.js]: https://img.shields.io/badge/Vue.js-35495E?style=for-the-badge&logo=vuedotjs&logoColor=4FC08D
[Vue-url]: https://vuejs.org/
[Angular.io]: https://img.shields.io/badge/Angular-DD0031?style=for-the-badge&logo=angular&logoColor=white
[Angular-url]: https://angular.io/
[Svelte.dev]: https://img.shields.io/badge/Svelte-4A4A55?style=for-the-badge&logo=svelte&logoColor=FF3E00
[Svelte-url]: https://svelte.dev/
[Laravel.com]: https://img.shields.io/badge/Laravel-FF2D20?style=for-the-badge&logo=laravel&logoColor=white
[Laravel-url]: https://laravel.com
[Bootstrap.com]: https://img.shields.io/badge/Bootstrap-563D7C?style=for-the-badge&logo=bootstrap&logoColor=white
[Bootstrap-url]: https://getbootstrap.com
[JQuery.com]: https://img.shields.io/badge/jQuery-0769AD?style=for-the-badge&logo=jquery&logoColor=white
[JQuery-url]: https://jquery.com
| zhy-tools | /zhy_tools-0.0.2.tar.gz/zhy_tools-0.0.2/README.md | README.md |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.