metadata
dict | text
stringlengths 60
3.49M
|
---|---|
{
"source": "jmchilton/bdbag",
"score": 2
} |
#### File: bdbag/test/test_cli.py
```python
import atexit
import unittest
import subprocess
from os.path import join as ospj
from test.test_common import BaseTest
ARGS = ['python', 'bdbag/bdbag_cli.py']
logfile = open('test_cli.log', mode='w')
atexit.register(logfile.close)
class TestCli(BaseTest):
def setUp(self):
super(TestCli, self).setUp()
def tearDown(self):
super(TestCli, self).tearDown()
logfile.flush()
def _test_successful_invocation(self, args, expected=None):
output = ''
try:
output = subprocess.check_output(args, stderr=subprocess.STDOUT, universal_newlines=True)
except subprocess.CalledProcessError as e:
output = e.output
self.fail(output)
finally:
logfile.writelines(output)
if expected:
self.assertExpectedMessages(expected, output)
def test_create(self):
args = ARGS + [self.test_data_dir]
logfile.writelines(self.getTestHeader('create bag', args))
self._test_successful_invocation(args)
def test_update(self):
args = ARGS + [self.test_bag_dir, '--update']
logfile.writelines(self.getTestHeader('update bag', args))
with open(ospj(self.test_bag_dir, 'data', 'NEWFILE.txt'), 'w') as nf:
nf.write('Additional file added via unit test.')
self._test_successful_invocation(args, ["NEWFILE.txt"])
def test_archive(self):
args = ARGS + [self.test_bag_dir, '--archive', 'zip']
logfile.writelines(self.getTestHeader('archive bag', args))
self._test_successful_invocation(args, ["Created bag archive"])
def test_extract(self):
args = ARGS + [ospj(self.test_archive_dir, 'test-bag.zip')]
logfile.writelines(self.getTestHeader('extract bag', args))
self._test_successful_invocation(args, ["test-bag.zip was successfully extracted to directory"])
def test_resolve_fetch(self):
pass
def test_validate_full(self):
args = ARGS + [self.test_bag_dir, '--validate', 'full']
logfile.writelines(self.getTestHeader('validate bag', args))
self._test_successful_invocation(args, ["test-bag is valid"])
def test_validate_fast(self):
args = ARGS + [self.test_bag_dir, '--validate', 'fast']
logfile.writelines(self.getTestHeader('validate bag', args))
self._test_successful_invocation(args, ["test-bag is valid"])
def test_validate_structure(self):
args = ARGS + [self.test_bag_dir, '--validate', 'structure']
logfile.writelines(self.getTestHeader('validate bag', args))
self._test_successful_invocation(args, ["test-bag is a valid bag structure"])
def test_validate_profile(self):
args = ARGS + [ospj(self.test_archive_dir, 'test-bag.zip'), '--validate-profile']
logfile.writelines(self.getTestHeader('validate-profile', args))
self._test_successful_invocation(
args, ["Bag structure conforms to specified profile", "Bag serialization conforms to specified profile"])
class TestCliArgParsing(BaseTest):
test_type = "Arg parsing test"
def setUp(self):
super(TestCliArgParsing, self).setUp()
def tearDown(self):
super(TestCliArgParsing, self).tearDown()
logfile.flush()
def _test_bad_argument_error_handling(self, args, expected):
output = ''
try:
output = subprocess.check_output(args, stderr=subprocess.STDOUT, universal_newlines=True)
except subprocess.CalledProcessError as e:
output = e.output
self.assertEqual(2, e.returncode)
finally:
logfile.writelines(output)
self.assertExpectedMessages(expected, output)
def test_create_bag_already_exists(self):
args = ARGS + [self.test_bag_dir]
logfile.writelines(self.getTestHeader('create bag already exists', args))
output = ''
try:
output = subprocess.check_output(args, stderr=subprocess.STDOUT, universal_newlines=True)
except subprocess.CalledProcessError as e:
output = e.output
self.fail(output)
finally:
logfile.writelines(output)
self.assertExpectedMessages(["is already a bag"], output)
def test_create_bag_bad_path(self):
args = ARGS + ['./not_found']
logfile.writelines(self.getTestHeader('create bag with bad path', args))
self._test_bad_argument_error_handling(args, ["Error: file or directory not found"])
def test_create_bag_archive_from_existing_archive(self):
args = ARGS + [ospj(self.test_archive_dir, 'test-bag.zip'),
'--archive', 'tgz']
logfile.writelines(self.getTestHeader('create bag from existing archive', args))
self._test_bad_argument_error_handling(
args, ["Error: A bag archive cannot be created from an existing bag archive"])
def test_set_checksum_on_existing_archive(self):
args = ARGS + [ospj(self.test_archive_dir, 'test-bag.zip'),
'--checksum', 'md5']
logfile.writelines(self.getTestHeader('--checksum on existing archive', args))
self._test_bad_argument_error_handling(
args, ["Error: A checksum manifest cannot be added to an existing bag archive"])
def test_update_existing_archive(self):
args = ARGS + [ospj(self.test_archive_dir, 'test-bag.zip'),
'--update']
logfile.writelines(self.getTestHeader('--update an existing archive file', args))
self._test_bad_argument_error_handling(
args, ["Error: An existing bag archive cannot be updated in-place"])
def test_update_with_resolve_fetch(self):
args = ARGS + [ospj(self.test_bag_dir),
'--update',
'--resolve-fetch', 'all']
logfile.writelines(self.getTestHeader('--update with --resolve-fetch', args))
self._test_bad_argument_error_handling(args, ["argument is not compatible"])
def test_remote_manifest_with_resolve_fetch(self):
args = ARGS + [ospj(self.test_bag_dir),
'--resolve-fetch', 'all',
'--remote-file-manifest', ospj(self.test_config_dir, 'test-fetch-manifest.json')]
logfile.writelines(self.getTestHeader('--remote-file-manifest with --resolve-fetch', args))
self._test_bad_argument_error_handling(args, ["argument is not compatible"])
def test_checksum_without_update(self):
args = ARGS + [ospj(self.test_bag_dir),
'--checksum', 'md5']
logfile.writelines(self.getTestHeader('--checksum without --update', args))
self._test_bad_argument_error_handling(
args, ["an existing bag requires the", "argument in order to apply any changes"])
def test_remote_file_manifest_without_update(self):
args = ARGS + [ospj(self.test_bag_dir),
'--remote-file-manifest', ospj(self.test_config_dir, 'test-fetch-manifest.json')]
logfile.writelines(self.getTestHeader('--remote-file-manifest without --update', args))
self._test_bad_argument_error_handling(
args, ["an existing bag requires the", "argument in order to apply any changes"])
def test_metadata_file_without_update(self):
args = ARGS + [ospj(self.test_bag_dir),
'--metadata-file', ospj(self.test_config_dir, 'test-metadata.json')]
logfile.writelines(self.getTestHeader('--metadata-file without --update', args))
self._test_bad_argument_error_handling(
args, ["an existing bag requires the", "argument in order to apply any changes"])
def test_prune_manifests_without_update(self):
args = ARGS + [ospj(self.test_bag_dir),
'--prune-manifests']
logfile.writelines(self.getTestHeader('--prune-manifests without --update', args))
self._test_bad_argument_error_handling(
args, ["an existing bag requires the", "argument in order to apply any changes"])
def test_skip_manifests_without_update(self):
args = ARGS + [ospj(self.test_bag_dir),
'--skip-manifests']
logfile.writelines(self.getTestHeader('--skip-manifests without --update', args))
self._test_bad_argument_error_handling(
args, ["Specifying", "requires the", "argument"])
if __name__ == '__main__':
unittest.main()
``` |
{
"source": "jmchilton/cloudman",
"score": 2
} |
#### File: services/apps/lwr.py
```python
import os
from cm.util import paths
from cm.util import misc
from cm.services import service_states
from cm.services import ServiceRole
from cm.services import ServiceDependency
from cm.services.apps import ApplicationService
import logging
log = logging.getLogger('cloudman')
INVOKE_SUCCESS = "Successfully invoked LWR."
INVOKE_FAILURE = "Error invoking LWR."
DEFAULT_LWR_PORT = 8913
class LwrService(ApplicationService):
def __init__(self, app):
super(LwrService, self).__init__(app)
self.lwr_home = self.app.path_resolver.lwr_home
self.lwr_port = DEFAULT_LWR_PORT
self.name = ServiceRole.to_string(ServiceRole.LWR)
self.svc_roles = [ServiceRole.LWR]
self.dependencies = [
ServiceDependency(self, ServiceRole.SGE), # Well someday anyway :)
ServiceDependency(self, ServiceRole.GALAXY_TOOLS) # Anyway to make this depend on where LWR installed?
]
def __rel_path(self, *args):
return os.path.join(self.lwr_home, *args)
def __ini_path(self):
return self.__rel_path("server.ini")
def _check_lwr_running(self):
return self._port_bound(self.lwr_port)
def start(self):
self.state = service_states.STARTING
self.status()
if not self.state == service_states.RUNNING:
self._setup()
started = self._run("--daemon")
if not started:
log.warn("Failed to setup or run LWR server.")
self.start = service_states.ERROR
def _setup(self):
ini_path = self.__ini_path()
if not os.path.exists(ini_path):
misc.run("cp '%s.sample' '%s'" % (ini_path, ini_path))
# TODO: Configure LWR.
def remove(self, synchronous=False):
log.info("Removing '%s' service" % self.name)
super(LwrService, self).remove(synchronous)
self.state = service_states.SHUTTING_DOWN
log.info("Shutting down LWR service...")
if self._run("--stop-daemon"):
self.state = service_states.SHUT_DOWN
# TODO: Handle log files.
else:
log.info("Failed to shutdown down LWR service...")
self.state = service_states.ERROR
def _run(self, args):
command = '%s - galaxy -c "bash %s/run.sh %s"' % (
paths.P_SU, self.lwr_home, args)
return misc.run(command, INVOKE_FAILURE, INVOKE_SUCCESS)
def status(self):
if self.state == service_states.SHUTTING_DOWN or \
self.state == service_states.SHUT_DOWN or \
self.state == service_states.UNSTARTED or \
self.state == service_states.WAITING_FOR_USER_ACTION:
pass
elif self._check_daemon('lwr'):
if self._check_lwr_running():
self.state = service_states.RUNNING
elif self.state != service_states.STARTING:
log.error("LWR error; LWR not runnnig")
self.state = service_states.ERROR
```
#### File: cm/util/worker.py
```python
import commands
import grp
import logging
import os
import os.path
import pwd
import subprocess
import threading
import datetime as dt
import shutil
import json
from cm.util.bunch import Bunch
from cm.util import misc, comm, paths
from cm.util.manager import BaseConsoleManager
from cm.services import ServiceRole
from cm.services.apps.pss import PSSService
from cm.services.data.filesystem import Filesystem
from cm.services.apps.hadoop import HadoopService
from cm.services.apps.htcondor import HTCondorService
from cm.services.apps import sge
log = logging.getLogger('cloudman')
sge_install_template = """
SGE_ROOT="/opt/sge"
SGE_QMASTER_PORT="6444"
SGE_EXECD_PORT="6445"
SGE_ENABLE_SMF="false"
SGE_CLUSTER_NAME="GalaxyCloudMan"
SGE_JMX_PORT=""
SGE_JMX_SSL="false"
SGE_JMX_SSL_CLIENT="false"
SGE_JMX_SSL_KEYSTORE=""
SGE_JMX_SSL_KEYSTORE_PW=""
SGE_JVM_LIB_PATH=""
SGE_ADDITIONAL_JVM_ARGS=""
CELL_NAME="default"
ADMIN_USER=""
QMASTER_SPOOL_DIR="/opt/sge/default/spool/qmaster"
EXECD_SPOOL_DIR="/opt/sge/default/spool/execd"
GID_RANGE="20000-20100"
SPOOLING_METHOD="classic"
DB_SPOOLING_SERVER="none"
DB_SPOOLING_DIR="/opt/sge/default/spooldb"
PAR_EXECD_INST_COUNT="20"
ADMIN_HOST_LIST="%s"
SUBMIT_HOST_LIST="%s"
EXEC_HOST_LIST="%s"
EXECD_SPOOL_DIR_LOCAL=""
HOSTNAME_RESOLVING="true"
SHELL_NAME="ssh"
COPY_COMMAND="scp"
DEFAULT_DOMAIN="none"
ADMIN_MAIL="none"
ADD_TO_RC="false"
SET_FILE_PERMS="true"
RESCHEDULE_JOBS="wait"
SCHEDD_CONF="1"
SHADOW_HOST=""
EXEC_HOST_LIST_RM=""
REMOVE_RC="false"
WINDOWS_SUPPORT="false"
WIN_ADMIN_NAME="Administrator"
WIN_DOMAIN_ACCESS="false"
CSP_RECREATE="true"
CSP_COPY_CERTS="false"
CSP_COUNTRY_CODE="DE"
CSP_STATE="Germany"
CSP_LOCATION="Building"
CSP_ORGA="Organisation"
CSP_ORGA_UNIT="Organisation_unit"
CSP_MAIL_ADDRESS="<EMAIL>"
"""
# Worker states
worker_states = Bunch(
WAKE='Wake',
INITIAL_STARTUP='Startup',
WAIT_FOR_MASTER_PKEY='Startup',
WAIT_FOR_SGE='Startup',
READY='Ready',
SHUTTING_DOWN="Stopping",
ERROR='Error'
)
class ConsoleManager(BaseConsoleManager):
node_type = "worker"
def __init__(self, app):
self.app = app
self.console_monitor = ConsoleMonitor(self.app)
self.worker_status = worker_states.WAKE
self.worker_instances = [] # Needed because of UI and number of nodes value
# Default to the most comprehensive type
self.cluster_type = self.app.ud.get('cluster_type', 'Galaxy')
self.mount_points = [] # A list of current mount points; each list element must have the
# following structure: (label, local_path, type, server_path)
self.nfs_data = 0
self.nfs_tools = 0
self.nfs_indices = 0
self.nfs_tfs = 0 # transient file system/storage from the master
self.nfs_sge = 0
self.get_cert = 0
self.sge_started = 0
self.custom_hostname = None
self.load = 0
@property
def local_hostname(self):
"""
Returns the local hostname for this instance.
"""
return self.app.cloud_interface.get_local_hostname()
# Not working (SGE cannot resolve master host)
if not self.custom_hostname:
self.custom_hostname = "{0}-{1}".format(self.app.cloud_interface.get_instance_id(),
self.app.cloud_interface.get_private_ip())
return self.custom_hostname
def start(self):
self._handle_prestart_commands()
# self.mount_nfs( self.app.ud['master_ip'] )
misc.add_to_etc_hosts(
self.app.ud['master_hostname'], self.app.ud['master_ip'])
def shutdown(self, delete_cluster=None):
self.worker_status = worker_states.SHUTTING_DOWN
self.console_monitor.send_node_shutting_down()
self.console_monitor.shutdown()
def get_cluster_status(self):
return "This is a worker node, cluster status not available."
def mount_disk(self, fs_type, server, path, mount_options):
# If a path is not specific for an nfs server, and only its ip is provided, assume that the target path to mount at is
# the path on the server as well
if fs_type == 'nfs' and ':' not in server:
server = server + ":" + path
# Before mounting, check if the file system is already mounted
mnt_location = commands.getstatusoutput("cat /proc/mounts | grep %s[[:space:]] | cut -d' ' -f1,2"
% path)
if mnt_location[0] == 0 and mnt_location[1] != '':
log.debug("{0} is already mounted".format(path))
return mnt_location[0]
else:
log.debug("Mounting fs of type: %s from: %s to: %s..." % (fs_type, server, path))
if not os.path.exists(path):
os.mkdir(path)
options = "-o {0}".format(mount_options) if mount_options else ""
ret_code = subprocess.call(
"mount -t %s %s %s %s" % (fs_type, options, server, path), shell=True)
log.debug("Process mounting '%s' returned code '%s'" % (path, ret_code))
return ret_code
def mount_nfs(self, master_ip, mount_json):
if self.app.TESTFLAG is True:
log.debug("Attempted to mount NFS, but TESTFLAG is set.")
return
mount_points = []
try:
# Try to load mount points from json dispatch
try:
mount_points_dict = json.loads(mount_json.strip())
except Exception, e:
log.error("json load exception: %s" % e)
log.debug("mount_points_dict: %s" % mount_points_dict)
if 'mount_points' in mount_points_dict:
for mp in mount_points_dict['mount_points']:
# TODO use the actual filesystem name for accounting/status
# updates
mount_points.append(
(mp['fs_name'], mp['shared_mount_path'], mp['fs_type'], mp['server'], mp.get('mount_options', None)))
else:
raise Exception("Mount point parsing failure.")
except Exception, e:
log.error("Error mounting devices: {0}\n Attempting to continue, but failure likely...".format(e))
# Mount SGE regardless of cluster type
mount_points.append(('nfs_sge', self.app.path_resolver.sge_root, 'nfs', master_ip, ''))
# Mount Hadoop regardless of cluster type
mount_points.append(('nfs_hadoop', paths.P_HADOOP_HOME, 'nfs', master_ip, ''))
for i, extra_mount in enumerate(self._get_extra_nfs_mounts()):
mount_points.append(('extra_mount_%d' % i, extra_mount, 'nfs', master_ip, ''))
# For each main mount point, mount it and set status based on label
for (label, path, fs_type, server, mount_options) in mount_points:
log.debug("Mounting FS w/ label '{0}' to path: {1} from server: {2} of type: {3} with mount_options: {4}".format(
label, path, server, fs_type, mount_options))
do_mount = self.app.ud.get('mount_%s' % label, True)
if not do_mount:
continue
ret_code = self.mount_disk(fs_type, server, path, mount_options)
status = 1 if ret_code == 0 else -1
# Provide a mapping between the mount point labels and the local fields
# Given tools & data file systems have been merged, this mapping does
# not distinguish bewteen those but simply chooses the data field.
labels_to_fields = {
'galaxy': 'nfs_data',
'galaxyIndices': 'nfs_indices',
'transient_nfs': 'nfs_tfs'
}
setattr(self, labels_to_fields.get(label, label), status)
# Filter out any differences between new and old mount points and unmount
# the extra ones
umount_points = [ump for ump in self.mount_points if ump not in mount_points]
for (_, old_path, _) in umount_points:
self._umount(old_path)
# Update the current list of mount points
self.mount_points = mount_points
# If the instance is not ``READY``, it means it's still being configured
# so send a message to continue the handshake
if self.worker_status != worker_states.READY:
self.console_monitor.conn.send("MOUNT_DONE")
def unmount_filesystems(self):
log.info("Unmounting directories: {0}".format(self.mount_points))
for mp in self.mount_points:
self._umount(mp[1])
def _umount(self, path):
ret_code = subprocess.call("umount -lf '%s'" % path, shell=True)
log.debug("Process unmounting '%s' returned code '%s'" % (path, ret_code))
def get_host_cert(self):
if self.app.TESTFLAG is True:
log.debug("Attempted to get host cert, but TESTFLAG is set.")
return "TEST_WORKERHOSTCERT"
w_cert_file = '/tmp/wCert.txt'
cmd = '%s - sgeadmin -c "ssh-keyscan -t rsa %s > %s"' % (
paths.P_SU, self.app.cloud_interface.get_fqdn(), w_cert_file)
log.info("Retrieving worker host certificate; cmd: {0}".format(cmd))
ret_code = subprocess.call(cmd, shell=True)
if ret_code == 0:
f = open(w_cert_file, 'r')
host_cert = f.readline()
f.close()
self.get_cert = 1
self.console_monitor.send_node_status()
return host_cert
else:
log.error(
"Error retrieving host cert. Process returned code '%s'" % ret_code)
self.get_cert = -1
self.console_monitor.send_node_status()
return None
def save_authorized_key(self, m_key):
if self.app.TESTFLAG is True:
log.debug("Attempted to save authorized key, but TESTFLAG is set.")
return
log.info(
"Saving master's (i.e., root) authorized key to ~/.ssh/authorized_keys...")
with open("/root/.ssh/authorized_keys", 'a') as f:
f.write(m_key)
def start_sge(self):
if self.app.TESTFLAG is True:
fakeretcode = 0
log.debug("Attempted to start SGE, but TESTFLAG is set. Returning retcode %s" %
fakeretcode)
return fakeretcode
log.info("Configuring SGE...")
sge.fix_libc()
# Ensure lines starting with 127.0.1. are not included in /etc/hosts
# because SGE fails to install if that's the case. This line is added
# to /etc/hosts by cloud-init
# (http://www.cs.nott.ac.uk/~aas/Software%2520Installation%2520and%2520Development%2520Problems.html)
misc.run(
"sed -i.bak '/^127.0.1./s/^/# (Commented by CloudMan) /' /etc/hosts")
log.debug("Configuring users' SGE profiles...")
f = open(paths.LOGIN_SHELL_SCRIPT, 'a')
f.write("\nexport SGE_ROOT=%s" % self.app.path_resolver.sge_root)
f.write("\n. $SGE_ROOT/default/common/settings.sh\n")
f.close()
SGE_config_file = '/tmp/galaxyEC2_configuration.conf'
f = open(SGE_config_file, 'w')
print >> f, sge_install_template % (self.local_hostname,
self.local_hostname, self.local_hostname)
f.close()
os.chown(SGE_config_file, pwd.getpwnam("sgeadmin")[2],
grp.getgrnam("sgeadmin")[2])
log.info(
"Created SGE install template as file '%s'." % SGE_config_file)
cmd = 'cd %s; ./inst_sge -x -noremote -auto %s' % (
self.app.path_resolver.sge_root, SGE_config_file)
log.info("Setting up SGE; cmd: {0}".format(cmd))
ret_code = subprocess.call(cmd, shell=True)
if ret_code == 0:
self.sge_started = 1
log.debug("Successfully configured SGE.")
else:
self.sge_started = -1
log.error(
"Setting up SGE did not go smoothly, process returned with code '%s'" % ret_code)
self.console_monitor.send_node_status()
return ret_code
# # Configure hadoop necessary environment for further use
# # by hadoop instalation process through SGE
def start_hadoop(self):
# KWS: Optionally add Hadoop service based on config setting
if self.app.config.hadoop_enabled:
self.hadoop = HadoopService(self.app)
self.hadoop.configure_hadoop()
def start_condor(self, host_ip):
"""
Configure and start condor worker node to join the master pool.
"""
if self.app.config.condor_enabled:
self.condor = HTCondorService(self.app, "worker", host_ip)
self.condor.start()
# #<KWS>
# Updating etc host by fetching the master's etc/hosts file
# # this is necessary for hadoop ssh component
def sync_etc_host(self, sync_path=paths.P_ETC_TRANSIENT_PATH):
if os.path.exists(sync_path):
log.debug("Synced /etc/hosts with %s" % sync_path)
shutil.copyfile(sync_path, "/etc/hosts")
else:
log.warning("Sync path %s not available; cannot sync /etc/hosts"
% sync_path)
def _get_extra_nfs_mounts(self):
return self.app.ud.get('extra_nfs_mounts', [])
class ConsoleMonitor(object):
def __init__(self, app):
self.app = app
self.waiting = []
self.state = worker_states.INITIAL_STARTUP
self.running = True
# Helper for interruptible sleep
self.sleeper = misc.Sleeper()
self.conn = comm.CMWorkerComm(self.app.cloud_interface.get_instance_id(
), self.app.ud['master_ip'])
if self.app.TESTFLAG is True:
log.debug("Attempted to get host cert, but TESTFLAG is set.")
else:
self.conn.setup()
self.monitor_thread = threading.Thread(target=self.__monitor)
def start(self):
self.app.manager.worker_status = worker_states.WAKE
self.last_state_change_time = dt.datetime.utcnow()
self.monitor_thread.start()
def get_msg(self, m_tag):
msg = self.conn.recv()
if msg:
if msg.body.startswith(m_tag):
log.debug("Got message: %s" % msg.body)
return msg.body
def send_alive_message(self):
# If necessary, do some cloud-specific instance adjustments first
if self.app.cloud_type in ('opennebula',):
if not open('/etc/hostname').readline().startswith('i-'):
# Augment /etc/hostname w/ the custom local hostname
log.debug("Configuring hostname...")
with open("/etc/hostname", 'w') as f:
f.write(self.app.manager.local_hostname)
# Augment /etc/hosts w/ the custom local hostname
misc.add_to_etc_hosts(
self.app.ud['master_hostname'], self.app.ud['master_ip'])
misc.add_to_etc_hosts(self.app.manager.local_hostname,
self.app.cloud_interface.get_private_ip())
misc.add_to_etc_hosts(
self.app.ud['master_ip'], 'ubuntu') # For opennebula
# Restart hostname process or the node process?
# ret_code = subprocess.call( "/etc/init.d/hostname restart",
# shell=True )
ret_code = subprocess.call("sudo telinit 6", shell=True)
if ret_code == 0:
log.debug("Initiated reboot...")
else:
log.debug("Problem initiating reboot!?")
# Compose the ALIVE message
msg = "ALIVE | %s | %s | %s | %s | %s | %s" % (self.app.cloud_interface.get_private_ip(),
self.app.cloud_interface.get_public_ip(),
self.app.cloud_interface.get_zone(
),
self.app.cloud_interface.get_type(
),
self.app.cloud_interface.get_ami(),
self.app.manager.local_hostname)
self.conn.send(msg)
log.debug("Sending message '%s'" % msg)
def send_worker_hostcert(self):
host_cert = self.app.manager.get_host_cert()
if host_cert is not None:
m_response = "WORKER_H_CERT | %s " % host_cert
log.debug("Composing worker host cert message: '%s'" % m_response)
self.conn.send(m_response)
else:
log.error("Sending HostCert failed, HC is None.")
def send_node_ready(self):
num_cpus = commands.getoutput(
"cat /proc/cpuinfo | grep processor | wc -l")
msg_body = "NODE_READY | %s | %s" % (
self.app.cloud_interface.get_instance_id(), num_cpus)
log.debug("Sending message '%s'" % msg_body)
log.info("Instance '%s' done configuring itself, sending NODE_READY." %
self.app.cloud_interface.get_instance_id())
self.conn.send(msg_body)
def send_node_shutting_down(self):
msg_body = "NODE_SHUTTING_DOWN | %s | %s" \
% (self.app.manager.worker_status, self.app.cloud_interface.get_instance_id())
log.debug("Sending message '%s'" % msg_body)
self.conn.send(msg_body)
def send_node_status(self):
# Gett the system load in the following format:
# "0.00 0.02 0.39" for the past 1, 5, and 15 minutes, respectivley
self.app.manager.load = (
commands.getoutput("cat /proc/loadavg | cut -d' ' -f1-3")).strip()
msg_body = "NODE_STATUS | %s | %s | %s | %s | %s | %s | %s | %s | %s" \
% (self.app.manager.nfs_data,
self.app.manager.nfs_tools,
self.app.manager.nfs_indices,
self.app.manager.nfs_sge,
self.app.manager.get_cert,
self.app.manager.sge_started,
self.app.manager.load,
self.app.manager.worker_status,
self.app.manager.nfs_tfs)
# log.debug("Sending message '%s'" % msg_body)
self.conn.send(msg_body)
def handle_message(self, message):
if message.startswith("RESTART"):
m_ip = message.split(' | ')[1]
log.info("Master at %s requesting RESTART" % m_ip)
self.app.ud['master_ip'] = m_ip
self.app.manager.unmount_filesystems()
self.app.manager.mount_nfs(self.app.ud['master_ip'])
self.send_alive_message()
elif message.startswith("MASTER_PUBKEY"):
m_key = message.split(' | ')[1]
log.info(
"Got master public key (%s). Saving root's public key..." % m_key)
self.app.manager.save_authorized_key(m_key)
self.send_worker_hostcert()
log.info("WORKER_H_CERT message sent; changing state to '%s'" %
worker_states.WAIT_FOR_SGE)
self.app.manager.worker_status = worker_states.WAIT_FOR_SGE
self.last_state_change_time = dt.datetime.utcnow()
elif message.startswith("START_SGE"):
ret_code = self.app.manager.start_sge()
if ret_code == 0:
log.info("SGE daemon started successfully.")
# Now that the instance is ready, run the PSS service in a
# separate thread
pss = PSSService(self.app, instance_role='worker')
threading.Thread(target=pss.start).start()
self.send_node_ready()
self.app.manager.worker_status = worker_states.READY
self.last_state_change_time = dt.datetime.utcnow()
else:
log.error(
"Starting SGE daemon did not go smoothly; process returned code: %s" % ret_code)
self.app.manager.worker_status = worker_states.ERROR
self.last_state_change_time = dt.datetime.utcnow()
self.app.manager.start_condor(self.app.ud['master_public_ip'])
self.app.manager.start_hadoop()
elif message.startswith("MOUNT"):
# MOUNT everything in json blob.
self.app.manager.mount_nfs(self.app.ud['master_ip'],
mount_json=message.split(' | ')[1])
elif message.startswith("STATUS_CHECK"):
self.send_node_status()
elif message.startswith("REBOOT"):
log.info("Received reboot command")
ret_code = subprocess.call("sudo telinit 6", shell=True)
elif message.startswith("ADDS3FS"):
bucket_name = message.split(' | ')[1]
svc_roles = message.split(' | ')[2]
log.info("Adding s3fs file system from bucket {0}".format(bucket_name))
fs = Filesystem(self.app, bucket_name, ServiceRole.from_string_array(svc_roles))
fs.add_bucket(bucket_name)
fs.add()
log.debug("Worker done adding FS from bucket {0}".format(bucket_name))
# elif message.startswith("ADD_NFS_FS"):
# nfs_server_info = message.split(' | ')[1]
# # Try to load NFS server info from JSON message body
# try:
# nfs_server_info = json.loads(nfs_server_info.strip())
# except Exception, e:
# log.error("NFS server JSON load exception: %s" % e)
# self.app.manager.add_nfs_fs(nfs_server_info)
elif message.startswith('ALIVE_REQUEST'):
self.send_alive_message()
elif message.startswith('SYNC_ETC_HOSTS'):
# <KWS> syncing etc host using the master one
self.app.manager.sync_etc_host()
else:
log.debug("Unknown message '%s'" % message)
def __monitor(self):
self.app.manager.start()
while self.running:
# In case queue connection was not established, try again (this will happen if
# RabbitMQ does not start in time for CloudMan)
if not self.conn.is_connected():
log.debug(
"Trying to setup AMQP connection; conn = '%s'" % self.conn)
self.conn.setup()
continue
# Make this more robust, trying to reconnect to a lost queue, etc.
# self.app.manager.introspect.check_all_worker_services()
if self.conn:
if self.app.manager.worker_status == worker_states.WAKE:
self.send_alive_message()
self.app.manager.worker_status = worker_states.INITIAL_STARTUP
# elif (dt.datetime.utcnow() - self.last_state_change_time).seconds > 720 and self.app.manager.worker_status != worker_states.ERROR:
# log.info( "Stuck in state '%s' too long, reseting and trying again..." % self.app.manager.worker_status )
# self.app.manager.worker_status = worker_states.INITIAL_STARTUP
# self.last_state_change_time = dt.datetime.utcnow()
try:
m = self.conn.recv()
except IOError, e:
if self.app.cloud_type == 'opennebula':
log.debug("Failed connecting to master: %s" % e)
log.debug("Trying to reboot the system")
subprocess.call('sudo telinit 6', shell=True)
else:
log.warning("IO trouble receiving msg: {0}".format(e))
while m is not None:
self.handle_message(m.body)
m = self.conn.recv()
# Regularly send a status update message
self.send_node_status()
else:
self.running = False
log.error("Communication queue not available, terminating.")
self.sleeper.sleep(10)
def shutdown(self):
"""Attempts to gracefully shut down the worker thread"""
log.info("Sending stop signal to worker thread")
self.running = False
self.sleeper.wake()
log.info("Console manager stopped")
``` |
{
"source": "jmchilton/gravity",
"score": 3
} |
#### File: gravity/gravity/supervisor.py
```python
import xmlrpclib
import operator
def get_client(url='http://localhost:9001/'):
if not url.endswith("/"):
url = "%s/" % url
rpc_url = "%sRPC2" % url
print rpc_url
server = xmlrpclib.Server(rpc_url)
return Client(server)
class Client(object):
def __init__(self, server):
self._server = server
@property
def supervisor(self):
return self._server.supervisor
@property
def processes(self):
raw_processes_info = self.supervisor.getAllProcessInfo()
return map(self._build_process, raw_processes_info)
def find_process(self, name):
raw_process_info = lambda: self.supervisor.getProcessInfo(name)
return self._build_process(raw_process_info, name=name)
def _build_process(self, process_info, name=None):
return Process(self, process_info, name=name)
class Process(object):
def __init__(self, client, process_info, name=None):
self.client = client
self._raw_process_info = process_info
self._name = name
def as_dict(self):
return self.raw_process_info
@property
def name(self):
if self._name is None:
self._name = self.raw_process_info["name"]
return self._name
@property
def raw_process_info(self):
raw_process_info = self._raw_process_info
# Allow delayed evaluation...
if operator.isCallable(raw_process_info):
raw_process_info = raw_process_info()
return raw_process_info
@property
def stdout(self):
return self._supervisor.readProcessStdoutLog(self.name, 0, 1024 * 1024)
@property
def stderr(self):
return self._supervisor.readProcessStderrLog(self.name, 0, 1024 * 1024)
def clear_logs(self):
self._supervisor.clearProcessLogs(self.name)
def start(self):
self._supervisor.startProcessGroup(self.name)
def stop(self):
self._supervisor.stopProcessGroup(self.name)
@property
def _supervisor(self):
return self.client.supervisor
``` |
{
"source": "jmchilton/gxformat2",
"score": 3
} |
#### File: gxformat2/gxformat2/converter.py
```python
import argparse
import copy
import json
import logging
import os
import sys
import uuid
from collections import OrderedDict
from typing import Dict, Optional
from ._labels import Labels
from .model import (
convert_dict_to_id_list_if_needed,
ensure_step_position,
inputs_as_native_steps,
with_step_ids,
)
from .yaml import ordered_load
SCRIPT_DESCRIPTION = """
Convert a Format 2 Galaxy workflow description into a native format.
"""
# source: step#output and $link: step#output instead of outputSource: step/output and $link: step/output
SUPPORT_LEGACY_CONNECTIONS = os.environ.get("GXFORMAT2_SUPPORT_LEGACY_CONNECTIONS") == "1"
STEP_TYPES = [
"subworkflow",
"data_input",
"data_collection_input",
"tool",
"pause",
"parameter_input",
]
STEP_TYPE_ALIASES = {
'input': 'data_input',
'input_collection': 'data_collection_input',
'parameter': 'parameter_input',
}
RUN_ACTIONS_TO_STEPS = {
'GalaxyWorkflow': 'run_workflow_to_step',
'GalaxyTool': 'run_tool_to_step',
}
POST_JOB_ACTIONS = {
'hide': {
'action_class': "HideDatasetAction",
'default': False,
'arguments': lambda x: x,
},
'rename': {
'action_class': 'RenameDatasetAction',
'default': {},
'arguments': lambda x: {'newname': x},
},
'delete_intermediate_datasets': {
'action_class': 'DeleteIntermediatesAction',
'default': False,
'arguments': lambda x: x,
},
'change_datatype': {
'action_class': 'ChangeDatatypeAction',
'default': {},
'arguments': lambda x: {'newtype': x},
},
'set_columns': {
'action_class': 'ColumnSetAction',
'default': {},
'arguments': lambda x: x,
},
'add_tags': {
'action_class': 'TagDatasetAction',
'default': [],
'arguments': lambda x: {'tags': ",".join(x)},
},
'remove_tags': {
'action_class': 'RemoveTagDatasetAction',
'default': [],
'arguments': lambda x: {'tags': ",".join(x)},
},
}
log = logging.getLogger(__name__)
def rename_arg(argument):
return argument
def clean_connection(value):
if value and "#" in value and SUPPORT_LEGACY_CONNECTIONS:
# Hope these are just used by Galaxy testing workflows and such, and not in production workflows.
log.warn(f"Legacy workflow syntax for connections [{value}] will not be supported in the future")
value = value.replace("#", "/", 1)
else:
return value
class ImportOptions:
def __init__(self):
self.deduplicate_subworkflows = False
def yaml_to_workflow(has_yaml, galaxy_interface, workflow_directory, import_options=None):
"""Convert a Format 2 workflow into standard Galaxy format from supplied stream."""
as_python = ordered_load(has_yaml)
return python_to_workflow(as_python, galaxy_interface, workflow_directory, import_options=import_options)
def python_to_workflow(as_python, galaxy_interface, workflow_directory=None, import_options=None):
"""Convert a Format 2 workflow into standard Galaxy format from supplied dictionary."""
if "yaml_content" in as_python:
as_python = ordered_load(as_python["yaml_content"])
if workflow_directory is None:
workflow_directory = os.path.abspath(".")
conversion_context = ConversionContext(
galaxy_interface,
workflow_directory,
import_options,
)
as_python = _preprocess_graphs(as_python, conversion_context)
subworkflows = None
if conversion_context.import_options.deduplicate_subworkflows:
# TODO: import only required workflows...
# TODO: dag sort these...
subworkflows = OrderedDict()
for graph_id, subworkflow_content in conversion_context.graph_ids.items():
if graph_id == "main":
continue
subworkflow_conversion_context = conversion_context.get_subworkflow_conversion_context_graph("#" + graph_id)
subworkflows[graph_id] = _python_to_workflow(copy.deepcopy(subworkflow_content), subworkflow_conversion_context)
converted = _python_to_workflow(as_python, conversion_context)
if subworkflows is not None:
converted["subworkflows"] = subworkflows
return converted
# move to a utils file?
def steps_as_list(format2_workflow: dict, add_ids: bool = False, inputs_offset: int = 0, mutate: bool = False):
"""Return steps as a list, converting ID map to list representation if needed.
This method does mutate the supplied steps, try to make progress toward not doing this.
Add keys as labels instead of IDs. Why am I doing this?
"""
if "steps" not in format2_workflow:
raise Exception(f"No 'steps' key in dict, keys are {format2_workflow.keys()}")
steps = format2_workflow["steps"]
steps = convert_dict_to_id_list_if_needed(steps, add_label=True, mutate=mutate)
if add_ids:
if mutate:
_append_step_id_to_step_list_elements(steps, inputs_offset=inputs_offset)
else:
steps = with_step_ids(steps, inputs_offset=inputs_offset)
return steps
def _append_step_id_to_step_list_elements(steps: list, inputs_offset: int = 0):
assert isinstance(steps, list)
for i, step in enumerate(steps):
if "id" not in step:
step["id"] = i + inputs_offset
assert step["id"] is not None
def _python_to_workflow(as_python, conversion_context):
if "class" not in as_python:
raise Exception("This is not a not a valid Galaxy workflow definition, must define a class.")
if as_python["class"] != "GalaxyWorkflow":
raise Exception("This is not a not a valid Galaxy workflow definition, 'class' must be 'GalaxyWorkflow'.")
# .ga files don't have this, drop it so it isn't interpreted as a format 2 workflow.
as_python.pop("class")
_ensure_defaults(as_python, {
"a_galaxy_workflow": "true",
"format-version": "0.1",
"name": as_python.pop("label", "Workflow"),
"uuid": str(uuid.uuid4()),
})
_populate_annotation(as_python)
steps = steps_as_list(as_python, mutate=True)
convert_inputs_to_steps(as_python, steps)
if isinstance(steps, list):
_append_step_id_to_step_list_elements(steps)
steps_as_dict = OrderedDict()
for i, step in enumerate(steps):
steps_as_dict[str(i)] = step
if "label" in step:
label = step["label"]
conversion_context.labels[label] = i
# TODO: this really should be optional in Galaxy API.
ensure_step_position(step, i)
as_python["steps"] = steps_as_dict
steps = steps_as_dict
for step in steps.values():
step_type = step.get("type", None)
if "run" in step:
if step_type is not None:
raise Exception("Steps specified as run actions cannot specify a type.")
run_action = step.get("run")
run_action = conversion_context.get_runnable_description(run_action)
if isinstance(run_action, dict):
run_class = run_action["class"]
run_to_step_function = eval(RUN_ACTIONS_TO_STEPS[run_class])
run_to_step_function(conversion_context, step, run_action)
else:
step["content_id"] = run_action
step["type"] = "subworkflow"
del step["run"]
for step in steps.values():
step_type = step.get("type", "tool")
step_type = STEP_TYPE_ALIASES.get(step_type, step_type)
if step_type not in STEP_TYPES:
raise Exception(f"Unknown step type encountered {step_type}")
step["type"] = step_type
eval(f"transform_{step_type}")(conversion_context, step)
outputs = as_python.pop("outputs", [])
outputs = convert_dict_to_id_list_if_needed(outputs)
for output in outputs:
assert isinstance(output, dict), "Output definition must be dictionary"
assert "source" in output or "outputSource" in output, "Output definition must specify source"
if "label" in output and "id" in output:
raise Exception("label and id are aliases for outputs, may only define one")
if "label" not in output and "id" not in output:
label = ""
raw_label = output.pop("label", None)
raw_id = output.pop("id", None)
label = raw_label or raw_id
if Labels.is_anonymous_output_label(label):
label = None
source = clean_connection(output.get("outputSource"))
if source is None and SUPPORT_LEGACY_CONNECTIONS:
source = output.get("source").replace("#", "/", 1)
id, output_name = conversion_context.step_output(source)
step = steps[str(id)]
workflow_output = {
"output_name": output_name,
"label": label,
"uuid": output.get("uuid", None)
}
if "workflow_outputs" not in step:
step["workflow_outputs"] = []
step["workflow_outputs"].append(workflow_output)
return as_python
def _preprocess_graphs(as_python, conversion_context):
if not isinstance(as_python, dict):
raise Exception("This is not a not a valid Galaxy workflow definition.")
format_version = as_python.get("format-version", "v2.0")
assert format_version == "v2.0"
if "class" not in as_python and "$graph" in as_python:
for subworkflow in as_python["$graph"]:
if not isinstance(subworkflow, dict):
raise Exception("Malformed workflow content in $graph")
if "id" not in subworkflow:
raise Exception("No subworkflow ID found for entry in $graph.")
subworkflow_id = subworkflow["id"]
if subworkflow_id == "main":
as_python = subworkflow
conversion_context.register_runnable(subworkflow)
return as_python
def convert_inputs_to_steps(workflow_dict: dict, steps: list):
"""Convert workflow inputs to a steps in array - like in native Galaxy.
workflow_dict is a Format 2 representation of a workflow and steps is a
list of steps. This method will prepend all the inputs as as steps to the
steps list. This method modifies both workflow_dict and steps.
"""
if "inputs" not in workflow_dict:
return
input_steps = inputs_as_native_steps(workflow_dict)
workflow_dict.pop("inputs")
for i, new_step in enumerate(input_steps):
steps.insert(i, new_step)
def run_workflow_to_step(conversion_context, step, run_action):
step["type"] = "subworkflow"
if conversion_context.import_options.deduplicate_subworkflows and _is_graph_id_reference(run_action):
step["content_id"] = run_action
else:
subworkflow_conversion_context = conversion_context.get_subworkflow_conversion_context(step)
step["subworkflow"] = _python_to_workflow(
copy.deepcopy(run_action),
subworkflow_conversion_context,
)
def _is_graph_id_reference(run_action):
return run_action and not isinstance(run_action, dict)
def transform_data_input(context, step):
transform_input(context, step, default_name="Input dataset")
def transform_data_collection_input(context, step):
transform_input(context, step, default_name="Input dataset collection")
def transform_parameter_input(context, step):
transform_input(context, step, default_name="input_parameter")
def transform_input(context, step, default_name):
default_name = step.get("label", default_name)
_populate_annotation(step)
_ensure_inputs_connections(step)
if "inputs" not in step:
step["inputs"] = [{}]
step_inputs = step["inputs"][0]
if "name" in step_inputs:
name = step_inputs["name"]
else:
name = default_name
_ensure_defaults(step_inputs, {
"name": name,
"description": "",
})
tool_state = {
"name": name
}
for attrib in ["collection_type", "parameter_type", "optional", "default", "format", "restrictions", "restrictOnConnections", "suggestions"]:
if attrib in step:
tool_state[attrib] = step[attrib]
_populate_tool_state(step, tool_state)
def transform_pause(context, step, default_name="Pause for dataset review"):
default_name = step.get("label", default_name)
_populate_annotation(step)
_ensure_inputs_connections(step)
if "inputs" not in step:
step["inputs"] = [{}]
step_inputs = step["inputs"][0]
if "name" in step_inputs:
name = step_inputs["name"]
else:
name = default_name
_ensure_defaults(step_inputs, {
"name": name,
})
tool_state = {
"name": name
}
connect = _init_connect_dict(step)
_populate_input_connections(context, step, connect)
_populate_tool_state(step, tool_state)
def transform_subworkflow(context, step):
_populate_annotation(step)
_ensure_inputs_connections(step)
tool_state = {
}
connect = _init_connect_dict(step)
_populate_input_connections(context, step, connect)
_populate_tool_state(step, tool_state)
def _runtime_value():
return {"__class__": "RuntimeValue"}
def transform_tool(context, step):
if "tool_id" not in step:
raise Exception("Tool steps must define a tool_id.")
_ensure_defaults(step, {
"name": step['tool_id'],
"post_job_actions": {},
"tool_version": None,
})
post_job_actions = step["post_job_actions"]
_populate_annotation(step)
tool_state = {
# TODO: Galaxy should not require tool state actually specify a __page__.
"__page__": 0,
}
connect = _init_connect_dict(step)
def append_link(key, value):
if key not in connect:
connect[key] = []
assert "$link" in value
link_value = value["$link"]
connect[key].append(clean_connection(link_value))
def replace_links(value, key=""):
if _is_link(value):
append_link(key, value)
# Filled in by the connection, so to force late
# validation of the field just mark as RuntimeValue.
# It would be better I guess if this were some other
# value dedicated to this purpose (e.g. a ficitious
# {"__class__": "ConnectedValue"}) that could be further
# validated by Galaxy.
return _runtime_value()
if isinstance(value, dict):
new_values = {}
for k, v in value.items():
new_key = _join_prefix(key, k)
new_values[k] = replace_links(v, new_key)
return new_values
elif isinstance(value, list):
new_values = []
for i, v in enumerate(value):
# If we are a repeat we need to modify the key
# but not if values are actually $links.
if _is_link(v):
append_link(key, v)
new_values.append(None)
else:
new_key = "%s_%d" % (key, i)
new_values.append(replace_links(v, new_key))
return new_values
else:
return value
# TODO: handle runtime inputs and state together.
runtime_inputs = step.get("runtime_inputs", [])
if "state" in step or runtime_inputs:
step_state = step.pop("state", {})
step_state = replace_links(step_state)
for key, value in step_state.items():
tool_state[key] = json.dumps(value)
for runtime_input in runtime_inputs:
tool_state[runtime_input] = json.dumps(_runtime_value())
elif "tool_state" in step:
tool_state.update(step.get("tool_state"))
# Fill in input connections
_populate_input_connections(context, step, connect)
_populate_tool_state(step, tool_state)
# Handle outputs.
out = step.pop("out", None)
if out is None:
# Handle LEGACY 19.XX outputs key.
out = step.pop("outputs", [])
out = convert_dict_to_id_list_if_needed(out)
for output in out:
name = output["id"]
for action_key, action_dict in POST_JOB_ACTIONS.items():
action_argument = output.get(action_key, action_dict['default'])
if action_argument:
action_class = action_dict['action_class']
action_name = action_class + name
action = _action(
action_class,
name,
arguments=action_dict['arguments'](action_argument)
)
post_job_actions[action_name] = action
def run_tool_to_step(conversion_context, step, run_action):
tool_description = conversion_context.galaxy_interface.import_tool(
run_action
)
step["type"] = "tool"
step["tool_id"] = tool_description["tool_id"]
step["tool_version"] = tool_description["tool_version"]
step["tool_hash"] = tool_description.get("tool_hash")
step["tool_uuid"] = tool_description.get("uuid")
class BaseConversionContext:
def __init__(self):
self.labels = {}
self.subworkflow_conversion_contexts = {}
def step_id(self, label_or_id):
if label_or_id in self.labels:
id_ = self.labels[label_or_id]
else:
id_ = label_or_id
return int(id_)
def step_output(self, value):
value_parts = str(value).split("/")
if len(value_parts) == 1:
value_parts.append("output")
id = self.step_id(value_parts[0])
return id, value_parts[1]
def get_subworkflow_conversion_context(self, step):
# TODO: sometimes this method takes format2 steps and some times converted native ones
# (for input connections) - redo this so the type signature is stronger.
step_id = step.get("id")
run_action = step.get("run")
if self.import_options.deduplicate_subworkflows and _is_graph_id_reference(run_action):
subworkflow_conversion_context = self.get_subworkflow_conversion_context_graph(run_action)
return subworkflow_conversion_context
if "content_id" in step:
subworkflow_conversion_context = self.get_subworkflow_conversion_context_graph(step["content_id"])
return subworkflow_conversion_context
if step_id not in self.subworkflow_conversion_contexts:
subworkflow_conversion_context = SubworkflowConversionContext(
self
)
self.subworkflow_conversion_contexts[step_id] = subworkflow_conversion_context
return self.subworkflow_conversion_contexts[step_id]
def get_runnable_description(self, run_action):
if "@import" in run_action:
if len(run_action) > 1:
raise Exception("@import must be only key if present.")
run_action_path = run_action["@import"]
runnable_path = os.path.join(self.workflow_directory, run_action_path)
with open(runnable_path) as f:
runnable_description = ordered_load(f)
run_action = runnable_description
if not self.import_options.deduplicate_subworkflows and _is_graph_id_reference(run_action):
run_action = self.graph_ids[run_action[1:]]
return run_action
class ConversionContext(BaseConversionContext):
def __init__(self, galaxy_interface, workflow_directory, import_options: Optional[ImportOptions] = None):
super().__init__()
self.import_options = import_options or ImportOptions()
self.graph_ids = OrderedDict() # type: Dict
self.graph_id_subworkflow_conversion_contexts = {} # type: Dict
self.workflow_directory = workflow_directory
self.galaxy_interface = galaxy_interface
def register_runnable(self, run_action):
assert "id" in run_action
self.graph_ids[run_action["id"]] = run_action
def get_subworkflow_conversion_context_graph(self, graph_id):
if graph_id not in self.graph_id_subworkflow_conversion_contexts:
subworkflow_conversion_context = SubworkflowConversionContext(
self
)
self.graph_id_subworkflow_conversion_contexts[graph_id] = subworkflow_conversion_context
return self.graph_id_subworkflow_conversion_contexts[graph_id]
class SubworkflowConversionContext(BaseConversionContext):
def __init__(self, parent_context):
super().__init__()
self.parent_context = parent_context
@property
def graph_ids(self):
return self.parent_context.graph_ids
@property
def workflow_directory(self):
return self.parent_context.workflow_directory
@property
def import_options(self):
return self.parent_context.import_options
@property
def galaxy_interface(self):
return self.parent_context.galaxy_interface
def get_subworkflow_conversion_context_graph(self, graph_id):
return self.parent_context.get_subworkflow_conversion_context_graph(graph_id)
def _action(type, name, arguments):
return {
"action_arguments": arguments,
"action_type": type,
"output_name": name,
}
def _is_link(value):
return isinstance(value, dict) and "$link" in value
def _join_prefix(prefix, key):
if prefix:
new_key = f"{prefix}|{key}"
else:
new_key = key
return new_key
def _init_connect_dict(step):
if "connect" not in step:
step["connect"] = {}
connect = step["connect"]
del step["connect"]
# handle CWL-style in dict connections.
if "in" in step:
step_in = step["in"]
assert isinstance(step_in, dict)
connection_keys = set()
for key, value in step_in.items():
# TODO: this can be a list right?
if isinstance(value, dict) and 'source' in value:
value = value["source"]
elif isinstance(value, dict) and 'default' in value:
continue
elif isinstance(value, dict):
raise KeyError(f'step input must define either source or default {value}')
connect[key] = [value]
connection_keys.add(key)
for key in connection_keys:
del step_in[key]
if len(step_in) == 0:
del step['in']
return connect
def _populate_input_connections(context, step, connect):
_ensure_inputs_connections(step)
input_connections = step["input_connections"]
is_subworkflow_step = step.get("type") == "subworkflow"
for key, values in connect.items():
input_connection_value = []
if not isinstance(values, list):
values = [values]
for value in values:
if not isinstance(value, dict):
if key == "$step":
value += "/__NO_INPUT_OUTPUT_NAME__"
id, output_name = context.step_output(value)
value = {"id": id, "output_name": output_name}
if is_subworkflow_step:
subworkflow_conversion_context = context.get_subworkflow_conversion_context(step)
input_subworkflow_step_id = subworkflow_conversion_context.step_id(key)
value["input_subworkflow_step_id"] = input_subworkflow_step_id
input_connection_value.append(value)
if key == "$step":
key = "__NO_INPUT_OUTPUT_NAME__"
input_connections[key] = input_connection_value
def _populate_annotation(step):
if "annotation" not in step and "doc" in step:
annotation = step.pop("doc")
step["annotation"] = annotation
elif "annotation" not in step:
step["annotation"] = ""
def _ensure_inputs_connections(step):
if "input_connections" not in step:
step["input_connections"] = {}
def _ensure_defaults(in_dict, defaults):
for key, value in defaults.items():
if key not in in_dict:
in_dict[key] = value
def _populate_tool_state(step, tool_state):
step["tool_state"] = json.dumps(tool_state)
def main(argv=None):
"""Entry point for script to conversion from Format 2 interface."""
if argv is None:
argv = sys.argv[1:]
args = _parser().parse_args(argv)
format2_path = args.input_path
output_path = args.output_path or (format2_path + ".gxwf.yml")
workflow_directory = os.path.abspath(format2_path)
galaxy_interface = None
with open(format2_path) as f:
has_workflow = ordered_load(f)
output = python_to_workflow(has_workflow, galaxy_interface=galaxy_interface, workflow_directory=workflow_directory)
with open(output_path, "w") as f:
json.dump(output, f, indent=4)
def _parser():
parser = argparse.ArgumentParser(description=SCRIPT_DESCRIPTION)
parser.add_argument('input_path', metavar='INPUT', type=str,
help='input workflow path (.ga)')
parser.add_argument('output_path', metavar='OUTPUT', type=str, nargs="?",
help='output workflow path (.gxfw.yml)')
return parser
if __name__ == "__main__":
main(sys.argv)
__all__ = (
'main',
'python_to_workflow',
'yaml_to_workflow',
)
```
#### File: gxformat2/gxformat2/markdown_parse.py
```python
import re
BLOCK_FENCE_START = re.compile(r'```.*')
BLOCK_FENCE_END = re.compile(r'```[\s]*')
GALAXY_FLAVORED_MARKDOWN_CONTAINER_LINE_PATTERN = re.compile(
r"```\s*galaxy\s*"
)
VALID_CONTAINER_END_PATTERN = re.compile(r"^```\s*$")
VALID_ARGUMENTS = {
"history_dataset_display": ["input", "output", "history_dataset_id"],
"history_dataset_embedded": ["input", "output", "history_dataset_id"],
"history_dataset_as_image": ["input", "output", "history_dataset_id", "path"],
"history_dataset_peek": ["input", "output", "history_dataset_id"],
"history_dataset_info": ["input", "output", "history_dataset_id"],
"history_dataset_link": ["input", "output", "history_dataset_id", "path", "label"],
"history_dataset_index": ["input", "output", "history_dataset_id", "path"],
"history_dataset_name": ["input", "output", "history_dataset_id"],
"history_dataset_type": ["input", "output", "history_dataset_id"],
"history_dataset_collection_display": ["input", "output", "history_dataset_collection_id"],
"workflow_display": ["workflow_id"],
"job_metrics": ["step", "job_id"],
"job_parameters": ["step", "job_id"],
"tool_stderr": ["step", "job_id"],
"tool_stdout": ["step", "job_id"],
"generate_galaxy_version": [],
"generate_time": [],
"invocation_time": ["invocation_id"],
# Invocation Flavored Markdown
"invocation_outputs": [],
"invocation_inputs": [],
}
GALAXY_FLAVORED_MARKDOWN_CONTAINERS = list(VALID_ARGUMENTS.keys())
GALAXY_FLAVORED_MARKDOWN_CONTAINER_REGEX = r'(?P<container>%s)' % "|".join(GALAXY_FLAVORED_MARKDOWN_CONTAINERS)
ARG_VAL_REGEX = r'''[\w_\-]+|\"[^\"]+\"|\'[^\']+\''''
FUNCTION_ARG = fr'\s*\w+\s*=\s*(?:{ARG_VAL_REGEX})\s*'
# embed commas between arguments
FUNCTION_MULTIPLE_ARGS = fr'(?P<firstargcall>{FUNCTION_ARG})(?P<restargcalls>(?:,{FUNCTION_ARG})*)'
FUNCTION_MULTIPLE_ARGS_PATTERN = re.compile(FUNCTION_MULTIPLE_ARGS)
FUNCTION_CALL_LINE_TEMPLATE = r'\s*%s\s*\((?:' + FUNCTION_MULTIPLE_ARGS + r')?\)\s*'
GALAXY_MARKDOWN_FUNCTION_CALL_LINE = re.compile(FUNCTION_CALL_LINE_TEMPLATE % (GALAXY_FLAVORED_MARKDOWN_CONTAINER_REGEX, ))
WHITE_SPACE_ONLY_PATTERN = re.compile(r"^[\s]+$")
def validate_galaxy_markdown(galaxy_markdown, internal=True):
"""Validate the supplied markdown and throw an ValueError with reason if invalid."""
expecting_container_close_for = None
last_line_no = 0
function_calls = 0
for (line, fenced, open_fence, line_no) in _split_markdown_lines(galaxy_markdown):
last_line_no = line_no
def invalid_line(template, **kwd):
if "line" in kwd:
kwd["line"] = line.rstrip("\r\n")
raise ValueError("Invalid line %d: %s" % (line_no + 1, template.format(**kwd)))
expecting_container_close = expecting_container_close_for is not None
if not fenced and expecting_container_close:
invalid_line("[{line}] is not expected close line for [{expected_for}]", line=line, expected_for=expecting_container_close_for)
continue
elif not fenced:
continue
elif fenced and expecting_container_close and BLOCK_FENCE_END.match(line):
# reset
expecting_container_close_for = None
function_calls = 0
elif open_fence and GALAXY_FLAVORED_MARKDOWN_CONTAINER_LINE_PATTERN.match(line):
if expecting_container_close:
if not VALID_CONTAINER_END_PATTERN.match(line):
invalid_line("Invalid command close line [{line}] for [{expected_for}]", line=line, expected_for=expecting_container_close_for)
# else closing container and we're done
expecting_container_close_for = None
function_calls = 0
continue
expecting_container_close_for = line
continue
elif fenced and line and expecting_container_close_for:
func_call_match = GALAXY_MARKDOWN_FUNCTION_CALL_LINE.match(line)
if func_call_match:
function_calls += 1
if function_calls > 1:
invalid_line("Only one Galaxy directive is allowed per fenced Galaxy block (```galaxy)")
container = func_call_match.group("container")
valid_args = VALID_ARGUMENTS[container]
first_arg_call = func_call_match.group("firstargcall")
def _validate_arg(arg_str):
if arg_str is not None:
arg_name = arg_str.split("=", 1)[0].strip()
if arg_name not in valid_args:
invalid_line("Invalid argument to Galaxy directive [{argument}]", argument=arg_name)
_validate_arg(first_arg_call)
rest = func_call_match.group("restargcalls")
while rest:
rest = rest.strip().split(",", 1)[1]
arg_match = FUNCTION_MULTIPLE_ARGS_PATTERN.match(rest)
if not arg_match:
break
first_arg_call = arg_match.group("firstargcall")
_validate_arg(first_arg_call)
rest = arg_match.group("restargcalls")
continue
else:
invalid_line("Invalid embedded Galaxy markup line [{line}]", line=line)
# Markdown unrelated to Galaxy object containers.
continue
if expecting_container_close_for:
template = "Invalid line %d: %s"
msg = template % (last_line_no, f"close of block for [{expecting_container_close_for}] expected")
raise ValueError(msg)
def _split_markdown_lines(markdown):
"""Yield lines of a markdown document line-by-line keeping track of fencing.
'Fenced' lines are code-like block (e.g. between ```) that shouldn't contain
Markdown markup.
"""
block_fenced = False
indent_fenced = False
for line_number, line in enumerate(markdown.splitlines(True)):
open_fence_this_iteration = False
indent_fenced = line.startswith(" ") or (indent_fenced and WHITE_SPACE_ONLY_PATTERN.match(line))
if not block_fenced:
if BLOCK_FENCE_START.match(line):
open_fence_this_iteration = True
block_fenced = True
yield (line, block_fenced or indent_fenced, open_fence_this_iteration, line_number)
if not open_fence_this_iteration and BLOCK_FENCE_END.match(line):
block_fenced = False
__all__ = (
'validate_galaxy_markdown',
'GALAXY_MARKDOWN_FUNCTION_CALL_LINE',
)
```
#### File: gxformat2/tests/test_cytoscape.py
```python
import json
import os
import shutil
import tempfile
from gxformat2.cytoscape import main
from ._helpers import TEST_INTEROP_EXAMPLES, TEST_PATH
from .test_lint import WITH_REPORT
EXAMPLE_PATH = os.path.join(TEST_PATH, "unicycler-extra-annotations.ga")
TEST_CYTOSCAPE_EXAMPLES = os.path.join(TEST_INTEROP_EXAMPLES, "cytoscape")
def test_main_output_json():
out_file = tempfile.NamedTemporaryFile(prefix="cytoscape_elements", suffix=".json")
main([EXAMPLE_PATH, out_file.name])
with open(out_file.name) as f:
elements = json.load(f)
assert isinstance(elements, list)
assert "</body>" not in open(out_file.name).read()
def test_main_output_html():
out_file = tempfile.NamedTemporaryFile(prefix="cytoscape_elements", suffix=".html")
main([EXAMPLE_PATH, out_file.name])
assert "</body>" in open(out_file.name).read()
def test_interop_generation():
# not much of a test case but it will generate a directory of interoperability examples to
# test Java against.
write_cytoscape_elements(EXAMPLE_PATH)
write_cytoscape_elements_for_string(WITH_REPORT)
def write_cytoscape_elements_for_string(workflow_content):
f = tempfile.NamedTemporaryFile(mode="w", suffix=".gxwf.yml")
f.write(workflow_content)
f.flush()
write_cytoscape_elements(f.name)
def write_cytoscape_elements(workflow_path):
if not os.path.exists(TEST_CYTOSCAPE_EXAMPLES):
os.makedirs(TEST_CYTOSCAPE_EXAMPLES)
base_name, ext = os.path.splitext(os.path.basename(workflow_path))
shutil.copyfile(workflow_path, os.path.join(TEST_CYTOSCAPE_EXAMPLES, base_name + ext))
elements_path = os.path.join(TEST_CYTOSCAPE_EXAMPLES, base_name + ".cytoscape.json")
main([workflow_path, elements_path])
``` |
{
"source": "jmchilton/lwr",
"score": 2
} |
#### File: managers/staging/preprocess.py
```python
from lwr.lwr_client.action_mapper import from_dict
def preprocess(job_directory, setup_actions):
for setup_action in setup_actions:
name = setup_action["name"]
input_type = setup_action["type"]
action = from_dict(setup_action["action"])
path = job_directory.calculate_path(name, input_type)
action.write_to_path(path)
__all__ = [preprocess]
```
#### File: lwr/managers/status.py
```python
PREPROCESSING = "preprocessing"
QUEUED = "queued"
RUNNING = "running"
COMPLETE = "complete"
CANCELLED = "cancelled"
FAILED = "failed"
POSTPROCESSING = "postprocessing"
def is_job_done(status):
""" Does the supplied status correspond to a finished
job (done processing).
"""
return status in [COMPLETE, CANCELLED, FAILED]
```
#### File: lwr/messaging/__init__.py
```python
from ..messaging import bind_amqp
from six import itervalues
def bind_app(app, queue_id, connect_ssl=None):
connection_string = __id_to_connection_string(app, queue_id)
queue_state = QueueState()
for manager in itervalues(app.managers):
bind_amqp.bind_manager_to_queue(manager, queue_state, connection_string, connect_ssl)
return queue_state
class QueueState(object):
""" Passed through to event loops, should be "non-zero" while queues should
be active.
"""
def __init__(self):
self.active = True
def deactivate(self):
self.active = False
def __nonzero__(self):
return self.active
def __id_to_connection_string(app, queue_id):
return queue_id
```
#### File: lwr/scripts/mesos_executor.py
```python
import sys
import threading
from lwr.mesos import (
Executor,
MesosExecutorDriver,
mesos_pb2,
ensure_mesos_libs,
)
from lwr.lwr_client.util import from_base64_json
from lwr.scripts.lwr_submit import (
manager_from_args,
wait_for_job
)
from lwr.manager_endpoint_util import submit_job
from lwr.daemon import (
ArgumentParser,
LwrManagerConfigBuilder,
)
import logging
log = logging.getLogger(__name__)
DESCRIPTION = "Mesos executor for the LWR"
class LwrExecutor(Executor):
def __task_update(self, driver, task, state, data=None):
try:
log.debug("Sending status update...")
update = mesos_pb2.TaskStatus()
update.task_id.value = task.task_id.value
update.state = state
if data:
update.data = data
driver.sendStatusUpdate(update)
except Exception:
log.exception("Failed to update status of task.")
def launchTask(self, driver, task):
# Create a thread to run the task. Tasks should always be run in new
# threads or processes, rather than inside launchTask itself.
def run_task():
try:
log.info("Running task %s" % task.task_id.value)
task_data = from_base64_json(task.data)
manager_options = task_data["manager"]
config_builder = LwrManagerConfigBuilder(**manager_options)
manager, lwr_app = manager_from_args(config_builder)
job_config = task_data["job"]
submit_job(manager, job_config)
self.__task_update(driver, task, mesos_pb2.TASK_RUNNING)
wait_for_job(manager, job_config)
self.__task_update(driver, task, mesos_pb2.TASK_FINISHED)
lwr_app.shutdown()
except Exception:
log.exception("Failed to run, update, or monitor task %s" % task)
raise
thread = threading.Thread(target=run_task)
thread.start()
def frameworkMessage(self, driver, message):
# Send it back to the scheduler.
driver.sendFrameworkMessage(message)
def run_executor():
arg_parser = ArgumentParser(description=DESCRIPTION)
arg_parser.parse_args()
ensure_mesos_libs()
log.info("Starting LWR executor")
driver = MesosExecutorDriver(LwrExecutor())
exit_code = 0
if not driver.run() == mesos_pb2.DRIVER_STOPPED:
exit_code = 1
return exit_code
if __name__ == "__main__":
sys.exit(run_executor())
```
#### File: lwr/test/amqp_test.py
```python
import threading
from .test_utils import skipUnlessModule
from lwr.lwr_client import amqp_exchange
TEST_CONNECTION = "memory://test_amqp"
@skipUnlessModule("kombu")
def test_amqp():
manager1_exchange = amqp_exchange.LwrExchange(TEST_CONNECTION, "manager_test")
manager3_exchange = amqp_exchange.LwrExchange(TEST_CONNECTION, "manager3_test")
manager2_exchange = amqp_exchange.LwrExchange(TEST_CONNECTION, "manager2_test")
thread1 = TestThread("manager_test", manager1_exchange)
thread2 = TestThread("manager2_test", manager2_exchange)
thread3 = TestThread("manager3_test", manager3_exchange)
thread1.start()
thread2.start()
thread3.start()
manager1_exchange.publish("manager_test", "cow1")
manager2_exchange.publish("manager2_test", "cow2")
manager3_exchange.publish("manager3_test", "cow3")
thread1.join(1)
thread2.join(1)
thread3.join(1)
assert thread1.message == "cow1", thread1.message
assert thread2.message == "cow2", thread2.message
assert thread3.message == "cow3", thread3.message
class TestThread(threading.Thread):
def __init__(self, queue_name, exchange):
super(TestThread, self).__init__()
self.queue_name = queue_name
self.daemon = True
self.exchange = exchange
self.message = None
def __nonzero__(self):
return self.message is None
def run(self):
def callback(body, message):
self.message = body
message.ack()
self.exchange.consume(self.queue_name, callback=callback, check=self)
``` |
{
"source": "jmchilton/pulsar",
"score": 3
} |
#### File: metrics/instrumenters/env.py
```python
import re
from ..instrumenters import InstrumentPlugin
from ...metrics import formatting
import logging
log = logging.getLogger( __name__ )
class EnvFormatter( formatting.JobMetricFormatter ):
def format( self, key, value ):
return ( "%s (runtime environment variable)" % key, value )
class EnvPlugin( InstrumentPlugin ):
""" Instrumentation plugin capable of recording all or specific environment
variables for a job at runtime.
"""
plugin_type = "env"
formatter = EnvFormatter()
def __init__( self, **kwargs ):
variables_str = kwargs.get( "variables", None )
if variables_str:
variables = [ v.strip() for v in variables_str.split(",") ]
else:
variables = None
self.variables = variables
def pre_execute_instrument( self, job_directory ):
""" Use env to dump all environment variables to a file.
"""
return "env > '%s'" % self.__env_file( job_directory )
def post_execute_instrument( self, job_directory ):
return None
def job_properties( self, job_id, job_directory ):
""" Recover environment variables dumped out on compute server and filter
out specific variables if needed.
"""
variables = self.variables
properties = {}
env_string = ''.join( open( self.__env_file( job_directory ) ).readlines() )
while env_string:
# Check if the next lines contain a shell function.
# We use '\n\}\n' as regex termination because shell
# functions can be nested.
# We use the non-greedy '.+?' because of re.DOTALL .
m = re.match( '([^=]+)=(\(\) \{.+?\n\})\n', env_string, re.DOTALL )
if m is None:
m = re.match( '([^=]+)=(.*)\n', env_string )
if m is None:
# Some problem recording or reading back env output.
message_template = "Problem parsing env metric output for job %s - properties will be incomplete"
message = message_template % job_id
log.debug( message )
break
(var, value) = m.groups()
if not variables or var in variables:
properties[ var ] = value
env_string = env_string[m.end():]
return properties
def __env_file( self, job_directory ):
return self._instrument_file_path( job_directory, "vars" )
__all__ = [ 'EnvPlugin' ]
```
#### File: pulsar/pulsar/main.py
```python
import logging
from logging.config import fileConfig
import os
import functools
import time
import sys
from six.moves import configparser
try:
import yaml
except ImportError:
yaml = None
try:
from daemonize import Daemonize
except ImportError:
Daemonize = None
# Vaguely Python 2.6 compatibile ArgumentParser import
try:
from argparser import ArgumentParser
except ImportError:
from optparse import OptionParser
class ArgumentParser(OptionParser):
def __init__(self, **kwargs):
self.delegate = OptionParser(**kwargs)
def add_argument(self, *args, **kwargs):
if "required" in kwargs:
del kwargs["required"]
return self.delegate.add_option(*args, **kwargs)
def parse_args(self, args=None):
(options, args) = self.delegate.parse_args(args)
return options
log = logging.getLogger(__name__)
REQUIRES_DAEMONIZE_MESSAGE = "Attempted to use Pulsar in daemon mode, but daemonize is unavailable."
PULSAR_ROOT_DIR = os.path.abspath(os.path.join(os.path.dirname(__file__), ".."))
if "PULSAR_CONFIG_DIR" in os.environ:
PULSAR_CONFIG_DIR = os.path.abspath(os.environ["PULSAR_CONFIG_DIR"])
else:
PULSAR_CONFIG_DIR = PULSAR_ROOT_DIR
DEFAULT_INI_APP = "main"
DEFAULT_INI = "server.ini"
DEFAULT_APP_YAML = "app.yml"
DEFAULT_MANAGER = "_default_"
DEFAULT_PID = "pulsar.pid"
DEFAULT_VERBOSE = True
DESCRIPTION = "Daemonized entry point for Pulsar services."
def load_pulsar_app(
config_builder,
config_env=False,
log=None,
**kwds
):
# Allow specification of log so daemon can reuse properly configured one.
if log is None:
log = logging.getLogger(__name__)
# If called in daemon mode, set the ROOT directory and ensure Pulsar is on
# sys.path.
if config_env:
try:
os.chdir(PULSAR_ROOT_DIR)
except Exception:
log.exception("Failed to chdir")
raise
try:
sys.path.append(PULSAR_ROOT_DIR)
except Exception:
log.exception("Failed to add Pulsar to sys.path")
raise
config_builder.setup_logging()
config = config_builder.load()
config.update(kwds)
import pulsar.core
pulsar_app = pulsar.core.PulsarApp(**config)
return pulsar_app
def app_loop(args, log):
pulsar_app = _app(args, log)
sleep = True
while sleep:
try:
time.sleep(5)
except KeyboardInterrupt:
sleep = False
except SystemExit:
sleep = False
except Exception:
pass
try:
pulsar_app.shutdown()
except Exception:
log.exception("Failed to shutdown Pulsar application")
raise
def _app(args, log):
try:
config_builder = PulsarConfigBuilder(args)
pulsar_app = load_pulsar_app(
config_builder,
config_env=True,
log=log,
)
except BaseException:
log.exception("Failed to initialize Pulsar application")
raise
return pulsar_app
def absolute_config_path(path, config_dir):
if path and not os.path.isabs(path):
path = os.path.join(config_dir, path)
return path
def _find_default_app_config(*config_dirs):
for config_dir in config_dirs:
app_config_path = os.path.join(config_dir, DEFAULT_APP_YAML)
if os.path.exists(app_config_path):
return app_config_path
return None
def load_app_configuration(ini_path=None, app_conf_path=None, app_name=None, local_conf=None, config_dir=PULSAR_CONFIG_DIR):
"""
"""
if ini_path and local_conf is None:
from galaxy.util.pastescript.loadwsgi import ConfigLoader
local_conf = ConfigLoader(ini_path).app_context(app_name).config()
local_conf = local_conf or {}
if app_conf_path is None and "app_config" in local_conf:
app_conf_path = absolute_config_path(local_conf["app_config"], config_dir)
elif ini_path:
# If not explicit app.yml file found - look next to server.ini -
# be it in pulsar root, some temporary staging directory, or /etc.
app_conf_path = _find_default_app_config(
os.path.dirname(ini_path),
)
if app_conf_path:
if yaml is None:
raise Exception("Cannot load confiuration from file %s, pyyaml is not available." % app_conf_path)
with open(app_conf_path, "r") as f:
app_conf = yaml.load(f) or {}
local_conf.update(app_conf)
return local_conf
def find_ini(supplied_ini, config_dir):
if supplied_ini:
return supplied_ini
# If not explicitly supplied an ini, check server.ini and then
# just resort to sample if that has not been configured.
for guess in ["server.ini", "server.ini.sample"]:
ini_path = os.path.join(config_dir, guess)
if os.path.exists(ini_path):
return ini_path
return guess
class PulsarConfigBuilder(object):
""" Generate paste-like configuration from supplied command-line arguments.
"""
def __init__(self, args=None, **kwds):
config_dir = kwds.get("config_dir", None) or PULSAR_CONFIG_DIR
ini_path = kwds.get("ini_path", None) or (args and args.ini_path)
app_conf_path = kwds.get("app_conf_path", None) or (args and args.app_conf_path)
# If given app_conf_path - use that - else we need to ensure we have an
# ini path.
if not app_conf_path:
ini_path = find_ini(ini_path, config_dir)
ini_path = absolute_config_path(ini_path, config_dir=config_dir)
self.config_dir = config_dir
self.ini_path = ini_path
self.app_conf_path = app_conf_path
self.app_name = kwds.get("app") or (args and args.app) or DEFAULT_INI_APP
@classmethod
def populate_options(cls, arg_parser):
arg_parser.add_argument("-c", "--config_dir", default=None)
arg_parser.add_argument("--ini_path", default=None)
arg_parser.add_argument("--app_conf_path", default=None)
arg_parser.add_argument("--app", default=DEFAULT_INI_APP)
# daemon related options...
arg_parser.add_argument("-d", "--daemonize", default=False, help="Daemonzie process", action="store_true")
arg_parser.add_argument("--daemon-log-file", default=None, help="log file for daemon script ")
arg_parser.add_argument("--pid-file", default=DEFAULT_PID, help="pid file (default is %s)" % DEFAULT_PID)
def load(self):
config = load_app_configuration(
config_dir=self.config_dir,
ini_path=self.ini_path,
app_conf_path=self.app_conf_path,
app_name=self.app_name
)
return config
def setup_logging(self):
if not self.ini_path:
# TODO: should be possible can configure using dict.
return
raw_config = configparser.ConfigParser()
raw_config.read([self.ini_path])
# https://github.com/mozilla-services/chaussette/pull/32/files
if raw_config.has_section('loggers'):
config_file = os.path.abspath(self.ini_path)
fileConfig(
config_file,
dict(__file__=config_file, here=os.path.dirname(config_file))
)
def to_dict(self):
return dict(
config_dir=self.config_dir,
ini_path=self.ini_path,
app_conf_path=self.app_conf_path,
app=self.app_name
)
class PulsarManagerConfigBuilder(PulsarConfigBuilder):
def __init__(self, args=None, **kwds):
super(PulsarManagerConfigBuilder, self).__init__(args=args, **kwds)
self.manager = kwds.get("manager", None) or (args and args.manager) or DEFAULT_MANAGER
def to_dict(self):
as_dict = super(PulsarManagerConfigBuilder, self).to_dict()
as_dict["manager"] = self.manager
return as_dict
@classmethod
def populate_options(cls, arg_parser):
PulsarConfigBuilder.populate_options(arg_parser)
arg_parser.add_argument("--manager", default=DEFAULT_MANAGER)
def main(argv=None):
if argv is None:
argv = sys.argv
arg_parser = ArgumentParser(description=DESCRIPTION)
PulsarConfigBuilder.populate_options(arg_parser)
args = arg_parser.parse_args(argv)
pid_file = args.pid_file
log.setLevel(logging.DEBUG)
log.propagate = False
if args.daemonize:
if Daemonize is None:
raise ImportError(REQUIRES_DAEMONIZE_MESSAGE)
keep_fds = []
if args.daemon_log_file:
fh = logging.FileHandler(args.daemon_log_file, "w")
fh.setLevel(logging.DEBUG)
log.addHandler(fh)
keep_fds.append(fh.stream.fileno())
else:
fh = logging.StreamHandler(sys.stderr)
fh.setLevel(logging.DEBUG)
log.addHandler(fh)
daemon = Daemonize(
app="pulsar",
pid=pid_file,
action=functools.partial(app_loop, args, log),
verbose=DEFAULT_VERBOSE,
logger=log,
keep_fds=keep_fds,
)
daemon.start()
else:
app_loop(args, log)
if __name__ == "__main__":
main()
```
#### File: managers/base/directory.py
```python
import logging
import os
import stat
from pulsar.managers.base import BaseManager
from pulsar.managers import PULSAR_UNKNOWN_RETURN_CODE
from ..util.job_script import job_script
from ..util.env import env_to_statement
log = logging.getLogger(__name__)
# TODO: Rename these to abstract out the fact they are files - pulsar
# should be able to replace metadata backing with non-file stuff now that
# the abstractions are fairly well utilized.
JOB_FILE_RETURN_CODE = "return_code"
JOB_FILE_STANDARD_OUTPUT = "stdout"
JOB_FILE_STANDARD_ERROR = "stderr"
JOB_FILE_TOOL_ID = "tool_id"
JOB_FILE_TOOL_VERSION = "tool_version"
JOB_FILE_CANCELLED = "cancelled"
class DirectoryBaseManager(BaseManager):
def _job_file(self, job_id, name):
return self._job_directory(job_id)._job_file(name)
def return_code(self, job_id):
return_code_str = self._read_job_file(job_id, JOB_FILE_RETURN_CODE, default=PULSAR_UNKNOWN_RETURN_CODE)
return int(return_code_str) if return_code_str and return_code_str != PULSAR_UNKNOWN_RETURN_CODE else return_code_str
def stdout_contents(self, job_id):
return self._read_job_file(job_id, JOB_FILE_STANDARD_OUTPUT, default="")
def stderr_contents(self, job_id):
return self._read_job_file(job_id, JOB_FILE_STANDARD_ERROR, default="")
def _stdout_path(self, job_id):
return self._job_file(job_id, JOB_FILE_STANDARD_OUTPUT)
def _stderr_path(self, job_id):
return self._job_file(job_id, JOB_FILE_STANDARD_ERROR)
def _return_code_path(self, job_id):
return self._job_file(job_id, JOB_FILE_RETURN_CODE)
def _setup_job_for_job_id(self, job_id, tool_id, tool_version):
self._setup_job_directory(job_id)
tool_id = str(tool_id) if tool_id else ""
tool_version = str(tool_version) if tool_version else ""
authorization = self._get_authorization(job_id, tool_id)
authorization.authorize_setup()
self._write_tool_info(job_id, tool_id, tool_version)
return job_id
def _read_job_file(self, job_id, name, **kwds):
return self._job_directory(job_id).read_file(name, **kwds)
def _write_job_file(self, job_id, name, contents):
return self._job_directory(job_id).write_file(name, contents)
def _write_return_code(self, job_id, return_code):
self._write_job_file(job_id, JOB_FILE_RETURN_CODE, str(return_code))
def _write_tool_info(self, job_id, tool_id, tool_version):
job_directory = self._job_directory(job_id)
job_directory.store_metadata(JOB_FILE_TOOL_ID, tool_id)
job_directory.store_metadata(JOB_FILE_TOOL_VERSION, tool_version)
def _record_cancel(self, job_id):
try:
self._job_directory(job_id).store_metadata(JOB_FILE_CANCELLED, True)
except Exception:
log.info("Failed to recod job with id %s was cancelled." % job_id)
def _was_cancelled(self, job_id):
try:
return self._job_directory(job_id).load_metadata(JOB_FILE_CANCELLED, None)
except Exception:
log.info("Failed to determine if job with id %s was cancelled, assuming no." % job_id)
return False
def _open_standard_output(self, job_id):
return self._job_directory(job_id).open_file(JOB_FILE_STANDARD_OUTPUT, 'w')
def _open_standard_error(self, job_id):
return self._job_directory(job_id).open_file(JOB_FILE_STANDARD_ERROR, 'w')
def _check_execution_with_tool_file(self, job_id, command_line):
tool_id = self._tool_id(job_id)
self._check_execution(job_id, tool_id, command_line)
def _tool_id(self, job_id):
tool_id = None
job_directory = self._job_directory(job_id)
if job_directory.has_metadata(JOB_FILE_TOOL_ID):
tool_id = job_directory.load_metadata(JOB_FILE_TOOL_ID)
return tool_id
# Helpers methods related to setting up job script files.
def _setup_job_file(self, job_id, command_line, dependencies_description=None, env=[]):
command_line = self._expand_command_line(command_line, dependencies_description)
script_env = self._job_template_env(job_id, command_line=command_line, env=env)
script = job_script(**script_env)
return self._write_job_script(job_id, script)
def _job_template_env(self, job_id, command_line=None, env=[]):
return_code_path = self._return_code_path(job_id)
# TODO: Add option to ignore remote env.
env = env + self.env_vars
env_setup_commands = map(env_to_statement, env)
job_template_env = {
'job_instrumenter': self.job_metrics.default_job_instrumenter,
'galaxy_lib': self._galaxy_lib(),
'env_setup_commands': env_setup_commands,
'exit_code_path': return_code_path,
'working_directory': self.job_directory(job_id).working_directory(),
'job_id': job_id,
}
if command_line:
job_template_env['command'] = command_line
return job_template_env
def _write_job_script(self, job_id, contents):
self._write_job_file(job_id, "command.sh", contents)
script_path = self._job_file(job_id, "command.sh")
os.chmod(script_path, stat.S_IEXEC | stat.S_IWRITE | stat.S_IREAD)
return script_path
```
#### File: pulsar/managers/status.py
```python
PREPROCESSING = "preprocessing"
# Job manager has queued this job for execution.
QUEUED = "queued"
# Job manager believes the job is currently running.
RUNNING = "running"
# Job manager has finished and postprocessing ran successfully.
COMPLETE = "complete"
# Job was cancelled
CANCELLED = "cancelled"
# Problem submitting the job, interfacing with the job manager,
# or postprocessing the job.
FAILED = "failed"
# DRM marked job as complete and job is being unstaged.
POSTPROCESSING = "postprocessing"
# Pulsar believed this job to be active but the job manager
# cannot determine a state for it.
LOST = "lost"
def is_job_done(status):
""" Does the supplied status correspond to a finished
job (done processing).
"""
return status in [COMPLETE, CANCELLED, FAILED, LOST]
```
#### File: pulsar/test/manager_drmaa_test.py
```python
from .test_utils import (
BaseManagerTestCase,
skip_unless_module
)
from pulsar.managers.queued_drmaa import DrmaaQueueManager
class DrmaaManagerTest(BaseManagerTestCase):
def setUp(self):
super(DrmaaManagerTest, self).setUp()
self._set_manager()
def tearDown(self):
super(DrmaaManagerTest, self).setUp()
self.manager.shutdown()
def _set_manager(self, **kwds):
self.manager = DrmaaQueueManager('_default_', self.app, **kwds)
@skip_unless_module("drmaa")
def test_simple_execution(self):
self._test_simple_execution(self.manager)
@skip_unless_module("drmaa")
def test_cancel(self):
self._test_cancelling(self.manager)
``` |
{
"source": "jmchilton/shedclient-beta",
"score": 2
} |
#### File: shedclient-beta/shedclient/views.py
```python
def install(request):
_check_install_request(request)
map(_handle_install())
def _check_install_request(request):
if not isinstance(request, list):
raise ValueError("Install request requires list of one or more repositories to install.")
```
#### File: shedclient-beta/tests/shed_app_test_utils.py
```python
from collections import namedtuple
import contextlib
import shutil
import socket
from time import time as now
from tempfile import mkdtemp
import threading
from requests import post
from werkzeug.serving import run_simple
from .shed_app import (
app,
InMemoryShedDataModel,
)
from galaxy.util.sockets import unused_port
DEFAULT_OP_TIMEOUT = 2
def mock_model(directory):
return InMemoryShedDataModel(
directory
).add_category(
"c1", "Text Manipulation"
).add_category(
"c2", "Sequence Analysis"
).add_category(
"c3", "Tool Dependency Packages"
)
def setup_mock_shed():
port = unused_port()
directory = mkdtemp()
model = mock_model(directory)
def run():
app.debug = True
app.config["model"] = model
run_simple(
'localhost',
port,
app,
use_reloader=False,
use_debugger=True
)
t = threading.Thread(target=run)
t.start()
wait_net_service("localhost", port, DEFAULT_OP_TIMEOUT)
return MockShed("http://localhost:%d" % port, directory, t, model)
# code.activestate.com/recipes/576655-wait-for-network-service-to-appear
def wait_net_service(server, port, timeout=None):
""" Wait for network service to appear
@param timeout: in seconds, if None or 0 wait forever
@return: True of False, if timeout is None may return only True or
throw unhandled network exception
"""
s = socket.socket()
# Following line prevents this method from interfering with process
# it is waiting for on localhost.
s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
if timeout:
end = now() + timeout
while True:
try:
if timeout:
next_timeout = end - now()
if next_timeout < 0:
return False
else:
s.settimeout(next_timeout)
s.connect((server, port))
except socket.timeout:
# this exception occurs only if timeout is set
if timeout:
return False
except socket.error:
pass
else:
s.close()
return True
@contextlib.contextmanager
def mock_shed():
mock_shed_obj = None
try:
mock_shed_obj = setup_mock_shed()
yield mock_shed_obj
finally:
if mock_shed_obj is not None:
mock_shed_obj.shutdown()
def _shutdown(self):
post("%s/shutdown" % self.url)
self.thread.join(DEFAULT_OP_TIMEOUT)
shutil.rmtree(self.directory)
MockShed = namedtuple("MockShed", ["url", "directory", "thread", "model"])
MockShed.shutdown = _shutdown
__all__ = ["setup_mock_shed", "mock_shed"]
```
#### File: shedclient-beta/tests/test_task_tracker.py
```python
from test_utils import TempDirectoryContext
from shedclient import task_tracker
def test_task_tracker():
with TempDirectoryContext() as context:
config = dict(
task_tracking_directory=context.temp_directory
)
tracker = task_tracker.build_task_tracker(config)
assert len(tracker.list_active_tasks()) == 0
task0_id = tracker.register_task({"state": "new"})
assert len(tracker.list_active_tasks()) == 1
task0_state0 = tracker.read_task(task0_id)
assert task0_state0["state"] == "new"
tracker.delete_task(task0_id)
assert len(tracker.list_active_tasks()) == 0
task1_id = tracker.register_task({"state": "new"})
assert len(tracker.list_active_tasks()) == 1
tracker.update_task(task1_id, {"state": "queued", "name": "task 1"})
task1_state0 = tracker.read_task(task1_id)
assert task1_state0["state"] == "queued"
assert task1_state0["name"] == "task 1"
``` |
{
"source": "jmcjacob/Nuclei_Classification",
"score": 3
} |
#### File: jmcjacob/Nuclei_Classification/bootstrap_learner.py
```python
import math
import time
import numpy as np
from active_learner import ActiveLearner
class BootStrap_Learner(ActiveLearner):
def run(self):
""" Runs the active learner with random data updates.
:return: The lists of testing metrics from each iteration.
"""
# Defines the lists to store the metrics.
accuracies, mean_accuracies, recalls, precisions, f1_scores, losses = [], [], [], [], [], []
start_time = time.clock()
# Loops until all the data has been read.
while len(self.data.data_y) != 0 and len(accuracies) < self.config.max_updates:
self.log("\nCycle " + str(len(accuracies) + 1))
# Trains a model with the training data.
accuracy, mean_accuracy, recall, precision, f1_score, loss = self.model.train(self.data)
# Adds the metrics to the lists.
accuracies.append(accuracy)
mean_accuracies.append(mean_accuracy)
recalls.append(recall)
precisions.append(precision)
f1_scores.append(f1_score)
losses.append(loss)
predictions, labels = self.model.predict(self.data, np.average)
if self.config.pseudo_labels and len(self.data.data_y) != 0:
self.data.add_pesudo_labels(predictions, labels)
else:
self.data.pseudo_indices = []
train_x = np.append(self.data.train_x, self.data.data_x[self.data.pseudo_indices])
train_y = np.append(self.data.train_y, self.data.data_y[self.data.pseudo_indices])
if self.config.shortlist < len(self.data.data_y):
update = self.config.shortlist
else:
update = len(self.data.data_x) // self.config.cell_patches
uncertainties = []
for prediction in predictions:
uncertainty = max(prediction)
for i in range(self.config.num_classes):
if prediction[i] != 0.0:
uncertainty -= prediction[i] * math.log(prediction[i])
uncertainties.append(uncertainty)
indices = [i[1] for i in sorted(((value, index) for index, value in enumerate(uncertainties)),
reverse=True)[:update]]
bootstraps = self.data.get_bootstraps(train_x, train_y, indices)
cell_predictions = []
for i in range(len(bootstraps)):
self.log("\nBootstrap " + str(i + 1))
predictions, _ = self.model.train(bootstraps[i], test=False)
cell_predictions.append(predictions)
cell_predictions = np.average(cell_predictions, axis=0)
update_size = int(np.around(len(self.data.data_y) * self.config.update_size))
if update_size * self.config.cell_patches < len(self.data.data_y):
update = update_size
else:
update = len(self.data.data_x) // self.config.cell_patches
uncertainties = []
for prediction in cell_predictions:
uncertainty = max(prediction)
for i in range(self.config.num_classes):
uncertainty -= prediction[i] * math.log(prediction[i])
uncertainties.append(uncertainty)
indices = [i[1] for i in sorted(((value, index) for index, value in enumerate(uncertainties)),
reverse=True)[:update]]
self.data.set_training_data(indices)
self.log("\n\n")
# Trains the model with all the data.
accuracy, mean_accuracy, recall, precision, f1_score, loss = self.model.train(self.data)
# Adds the metrics to the lists.
accuracies.append(accuracy)
mean_accuracies.append(mean_accuracy)
recalls.append(recall)
precisions.append(precision)
f1_scores.append(f1_score)
losses.append(loss)
# Logs the metrics.
self.log("Accuracies: " + str(accuracies) + "\n")
self.log("Mean Class Accuracies: " + str(mean_accuracies) + "\n")
self.log("Recalls: " + str(recalls) + "\n")
self.log("Precisions: " + str(precisions) + "\n")
self.log("F1 Scores: " + str(f1_scores) + "\n")
self.log("Losses: " + str(losses))
self.log("Cycles: " + str(len(accuracies)) + " Time: " + str(time.clock() - start_time))
# Returns the list of metrics.
return accuracies, mean_accuracies, recalls, precisions, f1_scores, losses
```
#### File: jmcjacob/Nuclei_Classification/data_handler.py
```python
import numpy as np
from collections import Counter
import sklearn.metrics as metrics
class DataHandler:
def __init__(self, config, load_data=True):
""" The initialiser for the DataHandler class.
:param config: A ArgumentParser object.
"""
# Creates the lists to store data.
self.train_x, self.train_y = np.array([]), np.array([])
self.test_x, self.test_y = np.array([]), np.array([])
self.val_x, self.val_y = np.array([]), np.array([])
self.data_x, self.data_y = np.array([]), np.array([])
# Sets the class members.
self.val_per = config.val_per
self.verbose = config.verbose
self.config = config
self.pseudo_indices = []
# Loads the training data into the unannotated data stores.
if load_data:
self.load_training_data(config.data_dir)
self.load_testing_data(config.data_dir)
def log(self, message):
""" Method to handle printing and logging of messages.
:param message: String of message to be printed and logged.
"""
if self.config.verbose:
print(message)
if self.config.log_file != '':
print(message, file=open(self.config.log_file, 'a'))
def load_training_data(self, data_dir):
""" Loads the training data to the unannotated lists.
:param data_dir: The data directory.
"""
values = np.load(data_dir + "Training/values.npy")
self.data_x = np.array(values[:, 0])
self.data_x = np.array(["Training/" + i for i in self.data_x])
self.data_y = values[:, 1].astype(int)
self.log("Loaded " + str(int(len(self.data_y) / self.config.cell_patches)) + " Unannotated Cells")
def load_testing_data(self, data_dir):
""" Loads the testing data to the testing data lists.
:param data_dir: The data directory.
"""
values = np.load(data_dir + "Testing/values.npy")
self.test_x = np.array(values[:, 0])
self.test_x = np.array(["Testing/" + i for i in self.test_x])
self.test_y = values[:,1].astype(int)
self.log("Loaded " + str(int(len(self.test_y) / self.config.cell_patches)) + " Testing Cells")
def balance(self, x_list, y_list):
""" A method to balance a set of data.
:param x_list: A list of data.
:param y_list: A list of labels.
:return: balanced x and y lists.
"""
# TODO - make this work with cell patches
balance = Counter(y_list)
min_values = min(list(balance.values()))
indices = []
for c in range(self.config.num_classes):
class_values = balance[c]
indices.append(np.random.permutation([j for j, i in enumerate(y_list) if i == c])
[:class_values - min_values])
x_list = np.array([i for j, i in enumerate(x_list) if j not in indices])
y_list = np.array([i for j, i in enumerate(y_list) if j not in indices])
return x_list, y_list
def set_validation_set(self, x, y):
""" Sets the validation set from the training data.
"""
num_val = int((len(y) / self.config.cell_patches) * self.val_per)
indices = []
cell_indices = np.random.choice(list(range(len(y) // self.config.cell_patches)), num_val, False)
for i in cell_indices:
index = i * self.config.cell_patches
indices += list(range(index, index + self.config.cell_patches))
val_x = np.take(x, indices)
val_y = np.take(y, indices)
x = np.delete(x, indices)
y = np.delete(y, indices)
# val_x = np.array([i for j, i in enumerate(self.train_x) if j in indices])
# val_y = np.array([i for j, i in enumerate(self.train_y) if j in indices])
# self.data_x = np.delete(self.train_x, indices)
# self.data_y = np.delete(self.train_y, indices)
if self.config.combine.lower() == "add":
self.val_x = np.append(self.val_x, val_x)
self.val_y = np.append(self.val_y, val_y)#, axis=0) if len(self.val_y) != 0 else val_y
elif self.config.combine.lower() == "replace":
self.val_x = val_x
self.val_y = val_y
return x, y
def all_data(self):
""" Sets all data from the unlabelled data to the training set.
"""
# Sets the unlabelled data to the training set.
self.train_x = self.data_x
self.train_y = self.data_y
self.data_x = np.array([])
self.data_y = np.array([])
# Sets the validation set.
self.train_x, self.train_y = self.set_validation_set(self.train_x, self.train_y)
# Balances the training data.
if self.config.balance:
self.train_x, self.train_y = self.balance(self.train_x, self.train_y)
# Logs the number of patches.
self.log("Training Patches: " + str(len(self.train_y)))
self.log("Validation Patches: " + str(len(self.val_y)))
def set_training_data(self, indices):
""" Sets data from the unlabelled data to the training set.
:param indices: A list of indices to be moved from unlabelled to training.
"""
# Sets the full list of indices
full_indices = []
for index in indices:
index *= self.config.cell_patches
full_indices += list(range(index, index + self.config.cell_patches))
# Sets temparary lists to the data to be added.
temp_x = np.take(self.data_x, full_indices)
temp_y = np.take(self.data_y, full_indices)#, axis=0)
# Removes the data from the unannotated list.
self.data_x = np.delete(self.data_x, full_indices)
self.data_y = np.delete(self.data_y, full_indices)#, axis=0)
# Sets the validation data.
temp_x, temp_y = self.set_validation_set(temp_x, temp_y)
# Balances the data.
if self.config.balance:
temp_x, temp_y = self.balance(temp_x, temp_y)
# Adds the data depending on specified method.
if self.config.combine.lower() == "add":
self.train_x = np.append(self.train_x, temp_x)
self.train_y = np.append(self.train_y, temp_y)#, axis=0) if len(self.train_y) != 0 else temp_y
elif self.config.combine.lower() == "replace":
self.train_x = temp_x
self.train_y = temp_y
# Logs the number of patches.
self.log("Training Patches: " + str(len(self.train_y)))
self.log("Validation Patches: " + str(len(self.val_y)))
def sample_data(self, x, y):
""" Method for randomly sampling each cell within the inputted data.
:param x: The x data.
:param y: The y data.
:return: Sampled x and y data.
"""
indices = []
for i in range(0, len(x) - 1, self.config.cell_patches):
cell_indices = list(range(i, i + self.config.cell_patches))
indices += np.random.choice(cell_indices, self.config.sample_size, replace=False).tolist()
return np.take(x, indices), np.take(y, indices)
def add_pesudo_labels(self, predictions, labels):
""" Adds unlabelled cells to be used in training data.
:param cell_indices: The indices of the cells.to be added as pesudo labels
"""
indices = [j for j, i in enumerate(predictions) if max(i) > self.config.pseudo_threshold]
self.pseudo_indices = []
for cell_index in indices:
index = cell_index * self.config.cell_patches
self.pseudo_indices += list(range(index, index + self.config.cell_patches))
self.log("Pesudo Cells: " + str(len(indices)))
self.log("Pesudo Patches: " + str(len(indices) * self.config.cell_patches))
predicted_labels = np.argmax(np.array(predictions)[indices], axis=1)
self.log("Pesudo Accuracy: " + str(float(metrics.accuracy_score(np.array(labels)[indices], predicted_labels))))
def get_training_data(self):
""" Method for getting the data for training including pesudo labels and sampling.
:return: Two lists representing x and y data.
"""
if self.config.mode != "bootstrap":
train_x = np.append(self.train_x, self.data_x[self.pseudo_indices])
train_y = np.append(self.train_y, self.data_y[self.pseudo_indices])
return train_x, train_y
else:
return self.train_x, self.train_y
def get_bootstraps(self, data_x, data_y, shortlist_indices):
""" Method for extracting bootstraped data handelers.
:param data_x: A list of data
:param data_y: A list of labels
:return: A list of data handelers
"""
bootstraps = []
for _ in range(self.config.bootstrap_number):
indices = np.random.choice(range(0, len(data_y // self.config.cell_patches), self.config.cell_patches),
self.config.bootstrap_size, replace=True)
full_indices = []
for index in indices:
full_indices += list(range(index, index + self.config.cell_patches))
bootstrap_x = data_x[full_indices]
bootstrap_y = data_y[full_indices]
data = DataHandler(self.config, False)
bootstrap_x, bootstrap_y = data.set_validation_set(bootstrap_x, bootstrap_y)
data.train_x = bootstrap_x
data.train_y = bootstrap_y
full_indices = []
for i in shortlist_indices:
full_indices += list(range(i, i + self.config.cell_patches))
data.data_x = self.data_x[full_indices]
data.data_y = self.data_y[full_indices]
bootstraps.append(data)
return bootstraps
``` |
{
"source": "jmckaskill/subversion",
"score": 2
} |
#### File: build/generator/gen_base.py
```python
import os
import sys
import glob
import re
import fileinput
import filecmp
try:
# Python >=3.0
import configparser
except ImportError:
# Python <3.0
import ConfigParser as configparser
import generator.swig
import getversion
def _warning(msg):
sys.stderr.write("WARNING: %s\n" % msg)
def _error(msg):
sys.stderr.write("ERROR: %s\n" % msg)
sys.exit(1)
class GeneratorBase:
#
# Derived classes should define a class attribute named _extension_map.
# This attribute should be a dictionary of the form:
# { (target-type, file-type): file-extension ...}
#
# where: target-type is 'exe', 'lib', ...
# file-type is 'target', 'object', ...
#
def __init__(self, fname, verfname, options=None):
# Retrieve major version from the C header, to avoid duplicating it in
# build.conf - it is required because some file names include it.
try:
vsn_parser = getversion.Parser()
vsn_parser.search('SVN_VER_MAJOR', 'libver')
self.version = vsn_parser.parse(verfname).libver
except:
raise GenError('Unable to extract version.')
# Read options
self.release_mode = None
for opt, val in options:
if opt == '--release':
self.release_mode = 1
# Now read and parse build.conf
parser = configparser.ConfigParser()
parser.read(fname)
self.conf = build_path(os.path.abspath(fname))
self.sections = { }
self.graph = DependencyGraph()
# Allow derived classes to suppress certain configuration sections
if not hasattr(self, 'skip_sections'):
self.skip_sections = { }
# The 'options' section does not represent a build target,
# it simply contains global options
self.skip_sections['options'] = None
# Read in the global options
self.includes = \
_collect_paths(parser.get('options', 'includes'))
self.private_includes = \
_collect_paths(parser.get('options', 'private-includes'))
self.private_built_includes = \
parser.get('options', 'private-built-includes').split()
self.scripts = \
_collect_paths(parser.get('options', 'test-scripts'))
self.bdb_scripts = \
_collect_paths(parser.get('options', 'bdb-test-scripts'))
self.include_wildcards = \
parser.get('options', 'include-wildcards').split()
self.swig_lang = parser.get('options', 'swig-languages').split()
self.swig_dirs = parser.get('options', 'swig-dirs').split()
# SWIG Generator
self.swig = generator.swig.Generator(self.conf, "swig")
# Visual C++ projects - contents are either TargetProject instances,
# or other targets with an external-project attribute.
self.projects = []
# Lists of pathnames of various kinds
self.test_deps = [] # Non-BDB dependent items to build for the tests
self.test_progs = [] # Subset of the above to actually execute
self.test_helpers = [] # $ {test_deps} \setminus {test_progs} $
self.bdb_test_deps = [] # BDB-dependent items to build for the tests
self.bdb_test_progs = [] # Subset of the above to actually execute
self.target_dirs = [] # Directories in which files are built
self.manpages = [] # Manpages
# Collect the build targets and have a reproducible ordering
parser_sections = sorted(parser.sections())
for section_name in parser_sections:
if section_name in self.skip_sections:
continue
options = {}
for option in parser.options(section_name):
options[option] = parser.get(section_name, option)
type = options.get('type')
target_class = _build_types.get(type)
if not target_class:
raise GenError('ERROR: unknown build type for ' + section_name)
section = target_class.Section(target_class, section_name, options, self)
self.sections[section_name] = section
section.create_targets()
# Compute intra-library dependencies
for section in self.sections.values():
dependencies = (( DT_LINK, section.options.get('libs', "") ),
( DT_NONLIB, section.options.get('nonlibs', "") ))
for dep_type, dep_names in dependencies:
# Translate string names to Section objects
dep_section_objects = []
for section_name in dep_names.split():
if section_name in self.sections:
dep_section_objects.append(self.sections[section_name])
# For each dep_section that this section declares a dependency on,
# take the targets of this section, and register a dependency on
# any 'matching' targets of the dep_section.
#
# At the moment, the concept of multiple targets per section is
# employed only for the SWIG modules, which have 1 target
# per language. Then, 'matching' means being of the same language.
for dep_section in dep_section_objects:
for target in section.get_targets():
self.graph.bulk_add(dep_type, target.name,
dep_section.get_dep_targets(target))
def compute_hdrs(self):
"""Get a list of the header files"""
all_includes = list(map(native_path, self.includes + self.private_includes))
for d in unique(self.target_dirs):
for wildcard in self.include_wildcards:
hdrs = glob.glob(os.path.join(native_path(d), wildcard))
all_includes.extend(hdrs)
return all_includes
def compute_hdr_deps(self):
"""Compute the dependencies of each header file"""
include_deps = IncludeDependencyInfo(self.compute_hdrs(),
list(map(native_path, self.private_built_includes)))
for objectfile, sources in self.graph.get_deps(DT_OBJECT):
assert len(sources) == 1
source = sources[0]
# Generated .c files must depend on all headers their parent .i file
# includes
if isinstance(objectfile, SWIGObject):
swigsources = self.graph.get_sources(DT_SWIG_C, source)
assert len(swigsources) == 1
ifile = swigsources[0]
assert isinstance(ifile, SWIGSource)
c_includes, swig_includes = \
include_deps.query_swig(native_path(ifile.filename))
for include_file in c_includes:
self.graph.add(DT_OBJECT, objectfile, build_path(include_file))
for include_file in swig_includes:
self.graph.add(DT_SWIG_C, source, build_path(include_file))
# Any non-swig C/C++ object must depend on the headers its parent
# .c or .cpp includes. Note that 'object' includes gettext .mo files,
# Java .class files, and .h files generated from Java classes, so
# we must filter here.
elif isinstance(source, SourceFile) and \
os.path.splitext(source.filename)[1] in ('.c', '.cpp'):
for include_file in include_deps.query(native_path(source.filename)):
self.graph.add(DT_OBJECT, objectfile, build_path(include_file))
def write_sqlite_headers(self):
"Transform sql files into header files"
import transform_sql
for hdrfile, sqlfile in self.graph.get_deps(DT_SQLHDR):
new_hdrfile = hdrfile + ".new"
new_file = open(new_hdrfile, 'w')
transform_sql.main(sqlfile[0], new_file)
new_file.close()
def identical(file1, file2):
try:
if filecmp.cmp(new_hdrfile, hdrfile):
return True
else:
return False
except:
return False
if identical(new_hdrfile, hdrfile):
os.remove(new_hdrfile)
else:
try:
os.remove(hdrfile)
except: pass
os.rename(new_hdrfile, hdrfile)
class DependencyGraph:
"""Record dependencies between build items.
See the DT_* values for the different dependency types. For each type,
the target and source objects recorded will be different. They could
be file names, Target objects, install types, etc.
"""
def __init__(self):
self.deps = { } # type -> { target -> [ source ... ] }
for dt in dep_types:
self.deps[dt] = { }
def add(self, type, target, source):
if target in self.deps[type]:
self.deps[type][target].append(source)
else:
self.deps[type][target] = [ source ]
def bulk_add(self, type, target, sources):
if target in self.deps[type]:
self.deps[type][target].extend(sources)
else:
self.deps[type][target] = sources[:]
def get_sources(self, type, target, cls=None):
sources = self.deps[type].get(target, [ ])
if not cls:
return sources
filtered = [ ]
for src in sources:
if isinstance(src, cls):
filtered.append(src)
return filtered
def get_all_sources(self, type):
sources = [ ]
for group in self.deps[type].values():
sources.extend(group)
return sources
def get_deps(self, type):
return list(self.deps[type].items())
# dependency types
dep_types = [
'DT_INSTALL', # install areas. e.g. 'lib', 'base-lib'
'DT_OBJECT', # an object filename, depending upon .c filenames
'DT_SWIG_C', # a swig-generated .c file, depending upon .i filename(s)
'DT_LINK', # a libtool-linked filename, depending upon object fnames
'DT_NONLIB', # filename depends on object fnames, but isn't linked to them
'DT_SQLHDR', # header generated from a .sql file
]
# create some variables for these
for _dt in dep_types:
# e.g. DT_INSTALL = 'DT_INSTALL'
globals()[_dt] = _dt
class DependencyNode:
def __init__(self, filename, when = None):
self.filename = filename
self.when = when
def __str__(self):
return self.filename
class ObjectFile(DependencyNode):
def __init__(self, filename, compile_cmd = None, when = None):
DependencyNode.__init__(self, filename, when)
self.compile_cmd = compile_cmd
self.source_generated = 0
class SWIGObject(ObjectFile):
def __init__(self, filename, lang):
ObjectFile.__init__(self, filename)
self.lang = lang
self.lang_abbrev = lang_abbrev[lang]
self.source_generated = 1
### hmm. this is Makefile-specific
self.compile_cmd = '$(COMPILE_%s_WRAPPER)' % self.lang_abbrev.upper()
class HeaderFile(DependencyNode):
def __init__(self, filename, classname = None, compile_cmd = None):
DependencyNode.__init__(self, filename)
self.classname = classname
self.compile_cmd = compile_cmd
class SourceFile(DependencyNode):
def __init__(self, filename, reldir):
DependencyNode.__init__(self, filename)
self.reldir = reldir
class SWIGSource(SourceFile):
def __init__(self, filename):
SourceFile.__init__(self, filename, build_path_dirname(filename))
lang_abbrev = {
'python' : 'py',
'perl' : 'pl',
'ruby' : 'rb',
}
lang_full_name = {
'python' : 'Python',
'perl' : 'Perl',
'ruby' : 'Ruby',
}
lang_utillib_suffix = {
'python' : 'py',
'perl' : 'perl',
'ruby' : 'ruby',
}
class Target(DependencyNode):
"A build target is a node in our dependency graph."
def __init__(self, name, options, gen_obj):
self.name = name
self.gen_obj = gen_obj
self.desc = options.get('description')
self.when = options.get('when')
self.path = options.get('path', '')
self.add_deps = options.get('add-deps', '')
self.add_install_deps = options.get('add-install-deps', '')
self.msvc_name = options.get('msvc-name') # override project name
def add_dependencies(self):
# subclasses should override to provide behavior, as appropriate
raise NotImplementedError
class Section:
"""Represents an individual section of build.conf
The Section class is sort of a factory class which is responsible for
creating and keeping track of Target instances associated with a section
of the configuration file. By default it only allows one Target per
section, but subclasses may create multiple Targets.
"""
def __init__(self, target_class, name, options, gen_obj):
self.target_class = target_class
self.name = name
self.options = options
self.gen_obj = gen_obj
def create_targets(self):
"""Create target instances"""
self.target = self.target_class(self.name, self.options, self.gen_obj)
self.target.add_dependencies()
def get_targets(self):
"""Return list of target instances associated with this section"""
return [self.target]
def get_dep_targets(self, target):
"""Return list of targets from this section that "target" depends on"""
return [self.target]
class TargetLinked(Target):
"The target is linked (by libtool) against other libraries."
def __init__(self, name, options, gen_obj):
Target.__init__(self, name, options, gen_obj)
self.install = options.get('install')
self.compile_cmd = options.get('compile-cmd')
self.sources = options.get('sources', '*.c *.cpp')
self.link_cmd = options.get('link-cmd', '$(LINK)')
self.external_lib = options.get('external-lib')
self.external_project = options.get('external-project')
self.msvc_libs = options.get('msvc-libs', '').split()
def add_dependencies(self):
if self.external_lib or self.external_project:
if self.external_project:
self.gen_obj.projects.append(self)
return
# the specified install area depends upon this target
self.gen_obj.graph.add(DT_INSTALL, self.install, self)
sources = sorted(_collect_paths(self.sources or '*.c' or '*.cpp', self.path))
for srcs, reldir in sources:
for src in srcs.split(" "):
if glob.glob(src):
if src[-2:] == '.c':
objname = src[:-2] + self.objext
elif src[-4:] == '.cpp':
objname = src[:-4] + self.objext
else:
raise GenError('ERROR: unknown file extension on ' + src)
ofile = ObjectFile(objname, self.compile_cmd, self.when)
# object depends upon source
self.gen_obj.graph.add(DT_OBJECT, ofile, SourceFile(src, reldir))
# target (a linked item) depends upon object
self.gen_obj.graph.add(DT_LINK, self.name, ofile)
# collect all the paths where stuff might get built
### we should collect this from the dependency nodes rather than
### the sources. "what dir are you going to put yourself into?"
self.gen_obj.target_dirs.append(self.path)
for pattern in self.sources.split():
dirname = build_path_dirname(pattern)
if dirname:
self.gen_obj.target_dirs.append(build_path_join(self.path, dirname))
class TargetExe(TargetLinked):
def __init__(self, name, options, gen_obj):
TargetLinked.__init__(self, name, options, gen_obj)
if not (self.external_lib or self.external_project):
extmap = self.gen_obj._extension_map
self.objext = extmap['exe', 'object']
self.filename = build_path_join(self.path, name + extmap['exe', 'target'])
self.manpages = options.get('manpages', '')
self.testing = options.get('testing')
def add_dependencies(self):
TargetLinked.add_dependencies(self)
# collect test programs
if self.install == 'test':
self.gen_obj.test_deps.append(self.filename)
if self.testing != 'skip':
self.gen_obj.test_progs.append(self.filename)
else:
self.gen_obj.test_helpers.append(self.filename)
elif self.install == 'bdb-test':
self.gen_obj.bdb_test_deps.append(self.filename)
if self.testing != 'skip':
self.gen_obj.bdb_test_progs.append(self.filename)
self.gen_obj.manpages.extend(self.manpages.split())
class TargetScript(Target):
def add_dependencies(self):
# we don't need to "compile" the sources, so there are no dependencies
# to add here, except to get the script installed in the proper area.
# note that the script might itself be generated, but that isn't a
# concern here.
self.gen_obj.graph.add(DT_INSTALL, self.install, self)
class TargetLib(TargetLinked):
def __init__(self, name, options, gen_obj):
TargetLinked.__init__(self, name, options, gen_obj)
if not (self.external_lib or self.external_project):
extmap = gen_obj._extension_map
self.objext = extmap['lib', 'object']
# the target file is the name, version, and appropriate extension
tfile = '%s-%s%s' % (name, gen_obj.version, extmap['lib', 'target'])
self.filename = build_path_join(self.path, tfile)
# Is a library referencing symbols which are undefined at link time.
self.undefined_lib_symbols = options.get('undefined-lib-symbols') == 'yes'
self.link_cmd = options.get('link-cmd', '$(LINK_LIB)')
self.msvc_static = options.get('msvc-static') == 'yes' # is a static lib
self.msvc_fake = options.get('msvc-fake') == 'yes' # has fake target
self.msvc_export = options.get('msvc-export', '').split()
class TargetApacheMod(TargetLib):
def __init__(self, name, options, gen_obj):
TargetLib.__init__(self, name, options, gen_obj)
tfile = name + self.gen_obj._extension_map['lib', 'target']
self.filename = build_path_join(self.path, tfile)
# we have a custom linking rule
### hmm. this is Makefile-specific
self.compile_cmd = '$(COMPILE_APACHE_MOD)'
self.link_cmd = '$(LINK_APACHE_MOD)'
class TargetRaModule(TargetLib):
pass
class TargetFsModule(TargetLib):
pass
class TargetDoc(Target):
pass
class TargetI18N(Target):
"The target is a collection of .po files to be compiled by msgfmt."
def __init__(self, name, options, gen_obj):
Target.__init__(self, name, options, gen_obj)
self.install = options.get('install')
self.sources = options.get('sources')
# Let the Makefile determine this via .SUFFIXES
self.compile_cmd = None
self.objext = '.mo'
self.external_project = options.get('external-project')
def add_dependencies(self):
self.gen_obj.graph.add(DT_INSTALL, self.install, self)
sources = sorted(_collect_paths(self.sources or '*.po', self.path))
for src, reldir in sources:
if src[-3:] == '.po':
objname = src[:-3] + self.objext
else:
raise GenError('ERROR: unknown file extension on ' + src)
ofile = ObjectFile(objname, self.compile_cmd, self.when)
# object depends upon source
self.gen_obj.graph.add(DT_OBJECT, ofile, SourceFile(src, reldir))
# target depends upon object
self.gen_obj.graph.add(DT_LINK, self.name, ofile)
# Add us to the list of target dirs, so we're created in mkdir-init.
self.gen_obj.target_dirs.append(self.path)
class TargetSWIG(TargetLib):
def __init__(self, name, options, gen_obj, lang):
TargetLib.__init__(self, name, options, gen_obj)
self.lang = lang
self.desc = self.desc + ' for ' + lang_full_name[lang]
self.include_runtime = options.get('include-runtime') == 'yes'
### hmm. this is Makefile-specific
self.link_cmd = '$(LINK_%s_WRAPPER)' % lang_abbrev[lang].upper()
def add_dependencies(self):
# Look in source directory for dependencies
self.gen_obj.target_dirs.append(self.path)
sources = _collect_paths(self.sources, self.path)
assert len(sources) == 1 ### simple assertions for now
# get path to SWIG .i file
ipath = sources[0][0]
iname = build_path_basename(ipath)
assert iname[-2:] == '.i'
cname = iname[:-2] + '.c'
oname = iname[:-2] + self.gen_obj._extension_map['pyd', 'object']
# Extract SWIG module name from .i file name
module_name = iname[:4] != 'svn_' and iname[:-2] or iname[4:-2]
lib_extension = self.gen_obj._extension_map['lib', 'target']
if self.lang == "ruby":
lib_filename = module_name + lib_extension
elif self.lang == "perl":
lib_filename = '_' + module_name.capitalize() + lib_extension
else:
lib_extension = self.gen_obj._extension_map['pyd', 'target']
lib_filename = '_' + module_name + lib_extension
self.name = self.lang + '_' + module_name
self.path = build_path_join(self.path, self.lang)
if self.lang == "perl":
self.path = build_path_join(self.path, "native")
self.filename = build_path_join(self.path, lib_filename)
ifile = SWIGSource(ipath)
cfile = SWIGObject(build_path_join(self.path, cname), self.lang)
ofile = SWIGObject(build_path_join(self.path, oname), self.lang)
# the .c file depends upon the .i file
self.gen_obj.graph.add(DT_SWIG_C, cfile, ifile)
# the object depends upon the .c file
self.gen_obj.graph.add(DT_OBJECT, ofile, cfile)
# the library depends upon the object
self.gen_obj.graph.add(DT_LINK, self.name, ofile)
# the specified install area depends upon the library
self.gen_obj.graph.add(DT_INSTALL, 'swig-' + lang_abbrev[self.lang], self)
class Section(TargetLib.Section):
def create_targets(self):
self.targets = { }
for lang in self.gen_obj.swig_lang:
target = self.target_class(self.name, self.options, self.gen_obj, lang)
target.add_dependencies()
self.targets[lang] = target
def get_targets(self):
return list(self.targets.values())
def get_dep_targets(self, target):
target = self.targets.get(target.lang, None)
return target and [target] or [ ]
class TargetSWIGLib(TargetLib):
def __init__(self, name, options, gen_obj):
TargetLib.__init__(self, name, options, gen_obj)
self.lang = options.get('lang')
class Section(TargetLib.Section):
def get_dep_targets(self, target):
if target.lang == self.target.lang:
return [ self.target ]
return [ ]
class TargetProject(Target):
def __init__(self, name, options, gen_obj):
Target.__init__(self, name, options, gen_obj)
self.cmd = options.get('cmd')
self.release = options.get('release')
self.debug = options.get('debug')
def add_dependencies(self):
self.gen_obj.projects.append(self)
class TargetSWIGProject(TargetProject):
def __init__(self, name, options, gen_obj):
TargetProject.__init__(self, name, options, gen_obj)
self.lang = options.get('lang')
class TargetJava(TargetLinked):
def __init__(self, name, options, gen_obj):
TargetLinked.__init__(self, name, options, gen_obj)
self.link_cmd = options.get('link-cmd')
self.packages = options.get('package-roots', '').split()
self.jar = options.get('jar')
self.deps = [ ]
class TargetJavaHeaders(TargetJava):
def __init__(self, name, options, gen_obj):
TargetJava.__init__(self, name, options, gen_obj)
self.objext = '.class'
self.javah_objext = '.h'
self.headers = options.get('headers')
self.classes = options.get('classes')
self.package = options.get('package')
self.output_dir = self.headers
def add_dependencies(self):
sources = _collect_paths(self.sources, self.path)
for src, reldir in sources:
if src[-5:] != '.java':
raise GenError('ERROR: unknown file extension on ' + src)
class_name = build_path_basename(src[:-5])
class_header = build_path_join(self.headers, class_name + '.h')
class_header_win = build_path_join(self.headers,
self.package.replace(".", "_")
+ "_" + class_name + '.h')
class_pkg_list = self.package.split('.')
class_pkg = build_path_join(*class_pkg_list)
class_file = ObjectFile(build_path_join(self.classes, class_pkg,
class_name + self.objext),
self.when)
class_file.source_generated = 1
class_file.class_name = class_name
hfile = HeaderFile(class_header, self.package + '.' + class_name,
self.compile_cmd)
hfile.filename_win = class_header_win
hfile.source_generated = 1
self.gen_obj.graph.add(DT_OBJECT, hfile, class_file)
self.deps.append(hfile)
# target (a linked item) depends upon object
self.gen_obj.graph.add(DT_LINK, self.name, hfile)
# collect all the paths where stuff might get built
### we should collect this from the dependency nodes rather than
### the sources. "what dir are you going to put yourself into?"
self.gen_obj.target_dirs.append(self.path)
self.gen_obj.target_dirs.append(self.classes)
self.gen_obj.target_dirs.append(self.headers)
for pattern in self.sources.split():
dirname = build_path_dirname(pattern)
if dirname:
self.gen_obj.target_dirs.append(build_path_join(self.path, dirname))
self.gen_obj.graph.add(DT_INSTALL, self.name, self)
class TargetJavaClasses(TargetJava):
def __init__(self, name, options, gen_obj):
TargetJava.__init__(self, name, options, gen_obj)
self.objext = '.class'
self.lang = 'java'
self.classes = options.get('classes')
self.output_dir = self.classes
def add_dependencies(self):
sources = []
for p in self.path.split():
sources.extend(_collect_paths(self.sources, p))
for src, reldir in sources:
if src[-5:] == '.java':
objname = src[:-5] + self.objext
# As .class files are likely not generated into the same
# directory as the source files, the object path may need
# adjustment. To this effect, take "target_ob.classes" into
# account.
dirs = build_path_split(objname)
sourcedirs = dirs[:-1] # Last element is the .class file name.
while sourcedirs:
if sourcedirs.pop() in self.packages:
sourcepath = build_path_join(*sourcedirs)
objname = build_path_join(self.classes, *dirs[len(sourcedirs):])
break
else:
raise GenError('Unable to find Java package root in path "%s"' % objname)
else:
raise GenError('ERROR: unknown file extension on "' + src + '"')
ofile = ObjectFile(objname, self.compile_cmd, self.when)
sfile = SourceFile(src, reldir)
sfile.sourcepath = sourcepath
# object depends upon source
self.gen_obj.graph.add(DT_OBJECT, ofile, sfile)
# target (a linked item) depends upon object
self.gen_obj.graph.add(DT_LINK, self.name, ofile)
# Add the class file to the dependency tree for this target
self.deps.append(ofile)
# collect all the paths where stuff might get built
### we should collect this from the dependency nodes rather than
### the sources. "what dir are you going to put yourself into?"
self.gen_obj.target_dirs.extend(self.path.split())
self.gen_obj.target_dirs.append(self.classes)
for pattern in self.sources.split():
dirname = build_path_dirname(pattern)
if dirname:
self.gen_obj.target_dirs.append(build_path_join(self.path, dirname))
self.gen_obj.graph.add(DT_INSTALL, self.name, self)
class TargetSQLHeader(Target):
def __init__(self, name, options, gen_obj):
Target.__init__(self, name, options, gen_obj)
self.sources = options.get('sources')
_re_sql_include = re.compile('-- *include: *([-a-z]+)')
def add_dependencies(self):
sources = _collect_paths(self.sources, self.path)
assert len(sources) == 1 # support for just one source, for now
source, reldir = sources[0]
assert reldir == '' # no support for reldir right now
assert source.endswith('.sql')
output = source[:-4] + '.h'
self.gen_obj.graph.add(DT_SQLHDR, output, source)
for line in fileinput.input(source):
match = self._re_sql_include.match(line)
if not match:
continue
file = match.group(1)
self.gen_obj.graph.add(DT_SQLHDR, output,
os.path.join(os.path.dirname(source), file + '.sql'))
_build_types = {
'exe' : TargetExe,
'script' : TargetScript,
'lib' : TargetLib,
'doc' : TargetDoc,
'swig' : TargetSWIG,
'project' : TargetProject,
'swig_lib' : TargetSWIGLib,
'swig_project' : TargetSWIGProject,
'ra-module': TargetRaModule,
'fs-module': TargetFsModule,
'apache-mod': TargetApacheMod,
'javah' : TargetJavaHeaders,
'java' : TargetJavaClasses,
'i18n' : TargetI18N,
'sql-header' : TargetSQLHeader,
}
class GenError(Exception):
pass
# Path Handling Functions
#
# Build paths specified in build.conf are assumed to be always separated
# by forward slashes, regardless of the current running os.
#
# Native paths are paths separated by os.sep.
def native_path(path):
"""Convert a build path to a native path"""
return path.replace('/', os.sep)
def build_path(path):
"""Convert a native path to a build path"""
path = path.replace(os.sep, '/')
if os.altsep:
path = path.replace(os.altsep, '/')
return path
def build_path_join(*path_parts):
"""Join path components into a build path"""
return '/'.join(path_parts)
def build_path_split(path):
"""Return list of components in a build path"""
return path.split('/')
def build_path_splitfile(path):
"""Return the filename and directory portions of a file path"""
pos = path.rfind('/')
if pos > 0:
return path[:pos], path[pos+1:]
elif pos == 0:
return path[0], path[1:]
else:
return "", path
def build_path_dirname(path):
"""Return the directory portion of a file path"""
return build_path_splitfile(path)[0]
def build_path_basename(path):
"""Return the filename portion of a file path"""
return build_path_splitfile(path)[1]
def build_path_retreat(path):
"Given a relative directory, return ../ paths to retreat to the origin."
return ".." + "/.." * path.count('/')
def build_path_strip(path, files):
"Strip the given path from each file."
l = len(path)
result = [ ]
for file in files:
if len(file) > l and file[:l] == path and file[l] == '/':
result.append(file[l+1:])
else:
result.append(file)
return result
def _collect_paths(pats, path=None):
"""Find files matching a space separated list of globs
pats (string) is the list of glob patterns
path (string), if specified, is a path that will be prepended to each
glob pattern before it is evaluated
If path is none the return value is a list of filenames, otherwise
the return value is a list of 2-tuples. The first element in each tuple
is a matching filename and the second element is the portion of the
glob pattern which matched the file before its last forward slash (/)
"""
result = [ ]
for base_pat in pats.split():
if path:
pattern = build_path_join(path, base_pat)
else:
pattern = base_pat
files = sorted(glob.glob(native_path(pattern))) or [pattern]
if path is None:
# just append the names to the result list
for file in files:
result.append(build_path(file))
else:
# if we have paths, then we need to record how each source is located
# relative to the specified path
reldir = build_path_dirname(base_pat)
for file in files:
result.append((build_path(file), reldir))
return result
_re_public_include = re.compile(r'^subversion/include/(\w+)\.h$')
def _is_public_include(fname):
return _re_public_include.match(build_path(fname))
def _swig_include_wrapper(fname):
return native_path(_re_public_include.sub(
r"subversion/bindings/swig/proxy/\1_h.swg", build_path(fname)))
def _path_endswith(path, subpath):
"""Check if SUBPATH is a true path suffix of PATH.
"""
path_len = len(path)
subpath_len = len(subpath)
return (subpath_len > 0 and path_len >= subpath_len
and path[-subpath_len:] == subpath
and (path_len == subpath_len
or (subpath[0] == os.sep and path[-subpath_len] == os.sep)
or path[-subpath_len - 1] == os.sep))
class IncludeDependencyInfo:
"""Finds all dependencies between a named set of headers, and computes
closure, so that individual C and SWIG source files can then be scanned, and
the stored dependency data used to return all directly and indirectly
referenced headers.
Note that where SWIG is concerned, there are two different kinds of include:
(1) those that include files in SWIG processing, and so matter to the
generation of .c files. (These are %include, %import).
(2) those that include references to C headers in the generated output,
and so are not required at .c generation, only at .o generation.
(These are %{ #include ... %}).
This class works exclusively in native-style paths."""
def __init__(self, filenames, fnames_nonexist):
"""Operation of an IncludeDependencyInfo instance is restricted to a
'domain' - a set of header files which are considered interesting when
following and reporting dependencies. This is done to avoid creating any
dependencies on system header files. The domain is defined by three
factors:
(1) FILENAMES is a list of headers which are in the domain, and should be
scanned to discover how they inter-relate.
(2) FNAMES_NONEXIST is a list of headers which are in the domain, but will
be created by the build process, and so are not available to be
scanned - they will be assumed not to depend on any other interesting
headers.
(3) Files in subversion/bindings/swig/proxy/, which are based
autogenerated based on files in subversion/include/, will be added to
the domain when a file in subversion/include/ is processed, and
dependencies will be deduced by special-case logic.
"""
# This defines the domain (i.e. set of files) in which dependencies are
# being located. Its structure is:
# { 'basename.h': [ 'path/to/something/named/basename.h',
# 'path/to/another/named/basename.h', ] }
self._domain = {}
for fname in filenames + fnames_nonexist:
bname = os.path.basename(fname)
self._domain.setdefault(bname, []).append(fname)
if _is_public_include(fname):
swig_fname = _swig_include_wrapper(fname)
swig_bname = os.path.basename(swig_fname)
self._domain.setdefault(swig_bname, []).append(swig_fname)
# This data structure is:
# { 'full/path/to/header.h': { 'full/path/to/dependency.h': TYPECODE, } }
# TYPECODE is '#', denoting a C include, or '%' denoting a SWIG include.
self._deps = {}
for fname in filenames:
self._deps[fname] = self._scan_for_includes(fname)
if _is_public_include(fname):
hdrs = { self._domain["proxy.swg"][0]: '%',
self._domain["apr.swg"][0]: '%',
fname: '%' }
for h in self._deps[fname].keys():
if (_is_public_include(h)
or h == os.path.join('subversion', 'include', 'private',
'svn_debug.h')):
hdrs[_swig_include_wrapper(h)] = '%'
else:
raise RuntimeError("Public include '%s' depends on '%s', " \
"which is not a public include! What's going on?" % (fname, h))
swig_fname = _swig_include_wrapper(fname)
swig_bname = os.path.basename(swig_fname)
self._deps[swig_fname] = hdrs
for fname in fnames_nonexist:
self._deps[fname] = {}
# Keep recomputing closures until we see no more changes
while True:
changes = 0
for fname in self._deps.keys():
changes = self._include_closure(self._deps[fname]) or changes
if not changes:
break
def query_swig(self, fname):
"""Scan the C or SWIG file FNAME, and return the full paths of each
include file that is a direct or indirect dependency, as a 2-tuple:
(C_INCLUDES, SWIG_INCLUDES)."""
if fname in self._deps:
hdrs = self._deps[fname]
else:
hdrs = self._scan_for_includes(fname)
self._include_closure(hdrs)
c_filenames = []
swig_filenames = []
for hdr, hdr_type in hdrs.items():
if hdr_type == '#':
c_filenames.append(hdr)
else: # hdr_type == '%'
swig_filenames.append(hdr)
# Be independent of hash ordering
c_filenames.sort()
swig_filenames.sort()
return (c_filenames, swig_filenames)
def query(self, fname):
"""Same as SELF.QUERY_SWIG(FNAME), but assert that there are no SWIG
includes, and return only C includes as a single list."""
c_includes, swig_includes = self.query_swig(fname)
assert len(swig_includes) == 0
return c_includes
def _include_closure(self, hdrs):
"""Mutate the passed dictionary HDRS, by performing a single pass
through the listed headers, adding the headers on which the first group
of headers depend, if not already present.
HDRS is of the form { 'path/to/header.h': TYPECODE, }
Return a boolean indicating whether any changes were made."""
items = list(hdrs.items())
for this_hdr, this_type in items:
for dependency_hdr, dependency_type in self._deps[this_hdr].items():
self._upd_dep_hash(hdrs, dependency_hdr, dependency_type)
return (len(items) != len(hdrs))
def _upd_dep_hash(self, hash, hdr, type):
"""Mutate HASH (a data structure of the form
{ 'path/to/header.h': TYPECODE, } ) to include additional info of a
dependency of type TYPE on the file HDR."""
# '%' (SWIG, .c: .i) has precedence over '#' (C, .o: .c)
if hash.get(hdr) != '%':
hash[hdr] = type
_re_include = \
re.compile(r'^\s*([#%])\s*(?:include|import)\s*([<"])?([^<">;\s]+)')
def _scan_for_includes(self, fname):
"""Scan C source file FNAME and return the basenames of any headers
which are directly included, and within the set defined when this
IncludeDependencyProcessor was initialized.
Return a dictionary with included full file names as keys and None as
values."""
hdrs = { }
for line in fileinput.input(fname):
match = self._re_include.match(line)
if not match:
continue
include_param = native_path(match.group(3))
type_code = match.group(1)
direct_possibility_fname = os.path.normpath(os.path.join(
os.path.dirname(fname), include_param))
domain_fnames = self._domain.get(os.path.basename(include_param), [])
if direct_possibility_fname in domain_fnames:
self._upd_dep_hash(hdrs, direct_possibility_fname, type_code)
elif (len(domain_fnames) == 1
and (include_param.find(os.sep) == -1
or _path_endswith(domain_fnames[0], include_param))):
self._upd_dep_hash(hdrs, domain_fnames[0], type_code)
else:
# None found
if include_param.find(os.sep) == -1 and len(domain_fnames) > 1:
_error(
"Unable to determine which file is being included\n"
" Include Parameter: '%s'\n"
" Including File: '%s'\n"
" Direct possibility: '%s'\n"
" Other possibilities: %s\n"
% (include_param, fname, direct_possibility_fname,
domain_fnames))
if match.group(2) == '"':
_warning('"%s" header not found, file %s' % (include_param, fname))
continue
if match.group(2) == '<':
_warning('<%s> header *found*, file %s' % (include_param, fname))
# The above warnings help to avoid the following problems:
# - If header is uses the correct <> or "" convention, then the warnings
# reveal if the build generator does/does not make dependencies for it
# when it should not/should - e.g. might reveal changes needed to
# build.conf.
# ...and...
# - If the generator is correct, them the warnings reveal incorrect use
# of <>/"" convention.
return hdrs
class FileInfo:
def __init__(self, filename, when):
self.filename = filename
self.when = when
def _sorted_files(graph, area):
"Given a list of targets, sort them based on their dependencies."
# we're going to just go with a naive algorithm here. these lists are
# going to be so short, that we can use O(n^2) or whatever this is.
inst_targets = graph.get_sources(DT_INSTALL, area)
# first we need our own copy of the target list since we're going to
# munge it.
targets = inst_targets[:]
# the output list of the targets' files
files = [ ]
# loop while we have targets remaining:
while targets:
# find a target that has no dependencies in our current targets list.
for t in targets:
s = graph.get_sources(DT_LINK, t.name, Target) \
+ graph.get_sources(DT_NONLIB, t.name, Target)
for d in s:
if d in targets:
break
else:
# no dependencies found in the targets list. this is a good "base"
# to add to the files list now.
if isinstance(t, TargetJava):
# Java targets have no filename, and we just ignore them.
pass
elif isinstance(t, TargetI18N):
# I18N targets have no filename, we recurse one level deeper, and
# get the filenames of their dependencies.
s = graph.get_sources(DT_LINK, t.name)
for d in s:
if d not in targets:
files.append(FileInfo(d.filename, d.when))
else:
files.append(FileInfo(t.filename, t.when))
# don't consider this target any more
targets.remove(t)
# break out of search through targets
break
else:
# we went through the entire target list and everything had at least
# one dependency on another target. thus, we have a circular dependency
# tree. somebody messed up the .conf file, or the app truly does have
# a loop (and if so, they're screwed; libtool can't relink a lib at
# install time if the dependent libs haven't been installed yet)
raise CircularDependencies()
return files
class CircularDependencies(Exception):
pass
def unique(seq):
"Eliminate duplicates from a sequence"
list = [ ]
dupes = { }
for e in seq:
if e not in dupes:
dupes[e] = None
list.append(e)
return list
### End of file.
```
#### File: contrib/client-side/svnpatch.py
```python
import base64
import sys
import textwrap
import zlib
svnpatch1_block_start = b"========================= SVNPATCH1 BLOCK =========================\n"
def encode(input, output_file = sys.stdout):
output = svnpatch1_block_start.decode()
output += "\n".join(textwrap.wrap(base64.encodestring(zlib.compress(b"".join([x for x in input]).rstrip(b"\n"))).decode(), 76))
output_file.write(output)
def decode(input, output_file = sys.stdout):
svnpatch1_block_start_index = input.index(svnpatch1_block_start)
svnpatch1_block = input[svnpatch1_block_start_index+1:]
output = zlib.decompress(base64.decodestring(b"".join([x.rstrip(b"\n") for x in svnpatch1_block])))
if sys.version_info[0] >= 3:
output_file.buffer.write(output)
else:
output_file.write(output)
def help():
print("svnpatch.py - svnpatch helper script")
print("Usage: svnpatch.py [-e | --encode | -d | --decode] FILE")
print(" svnpatch.py [-e | --encode | -d | --decode] -")
print(" svnpatch.py [-h | --help]")
print("")
print("Author: <NAME>")
print("License: GPL-3")
exit(0)
if len(sys.argv) == 2 and sys.argv[1] in ("-h", "--help"):
help()
elif len(sys.argv) < 3:
sys.stderr.write("svnpatch.py: Missing arguments\n")
exit(1)
elif len(sys.argv) > 3:
sys.stderr.write("svnpatch.py: Excessive argument(s)\n")
exit(1)
if sys.argv[1] in ("-e", "--encode"):
func = encode
elif sys.argv[1] in ("-d", "--decode"):
func = decode
else:
sys.stderr.write("Incorrect option\n")
exit(1)
if sys.argv[2] == "-":
if sys.version_info[0] >= 3:
lines = sys.stdin.buffer.readlines()
else:
lines = sys.stdin.readlines()
else:
lines = open(sys.argv[2], "rb").readlines()
func(lines)
print("")
```
#### File: tests/cmdline/svnlook_tests.py
```python
import re, os, logging
logger = logging.getLogger()
# Our testing module
import svntest
# (abbreviation)
Skip = svntest.testcase.Skip_deco
SkipUnless = svntest.testcase.SkipUnless_deco
XFail = svntest.testcase.XFail_deco
Issues = svntest.testcase.Issues_deco
Issue = svntest.testcase.Issue_deco
Wimp = svntest.testcase.Wimp_deco
Item = svntest.wc.StateItem
#----------------------------------------------------------------------
# Convenience functions to make writing more tests easier
def run_svnlook(*varargs):
"""Run svnlook with VARARGS, returns stdout as list of lines.
Raises Failure if any stderr messages."""
exit_code, output, dummy_errput = svntest.main.run_command(
svntest.main.svnlook_binary, 0, 0, *varargs)
return output
def expect(tag, expected, got):
if expected != got:
logger.warn("When testing: %s", tag)
logger.warn("Expected: %s", expected)
logger.warn(" Got: %s", got)
raise svntest.Failure
# Tests
def test_misc(sbox):
"test miscellaneous svnlook features"
sbox.build()
wc_dir = sbox.wc_dir
repo_dir = sbox.repo_dir
# Make a couple of local mods to files
mu_path = os.path.join(wc_dir, 'A', 'mu')
rho_path = os.path.join(wc_dir, 'A', 'D', 'G', 'rho')
svntest.main.file_append(mu_path, 'appended mu text')
svntest.main.file_append(rho_path, 'new appended text for rho')
# Created expected output tree for 'svn ci'
expected_output = svntest.wc.State(wc_dir, {
'A/mu' : Item(verb='Sending'),
'A/D/G/rho' : Item(verb='Sending'),
})
# Create expected status tree; all local revisions should be at 1,
# but mu and rho should be at revision 2.
expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
expected_status.tweak('A/mu', 'A/D/G/rho', wc_rev=2)
svntest.actions.run_and_verify_commit(wc_dir,
expected_output,
expected_status,
None,
wc_dir)
# give the repo a new UUID
uuid = "01234567-89ab-cdef-89ab-cdef01234567"
svntest.main.run_command_stdin(svntest.main.svnadmin_binary, None, 0, 1,
["SVN-fs-dump-format-version: 2\n",
"\n",
"UUID: ", uuid, "\n",
],
'load', '--force-uuid', repo_dir)
expect('youngest', [ '2\n' ], run_svnlook('youngest', repo_dir))
expect('uuid', [ uuid + '\n' ], run_svnlook('uuid', repo_dir))
# it would be nice to test the author too, but the current test framework
# does not pull a username when testing over ra_neon or ra_svn,
# so the commits have an empty author.
expect('log', [ 'log msg\n' ], run_svnlook('log', repo_dir))
# check if the 'svnlook tree' output can be expanded to
# the 'svnlook tree --full-paths' output if demanding the whole repository
treelist = run_svnlook('tree', repo_dir)
treelistfull = run_svnlook('tree', '--full-paths', repo_dir)
path = ''
treelistexpand = []
for entry in treelist:
len1 = len(entry)
len2 = len(entry.lstrip())
path = path[0:2*(len1-len2)-1] + entry.strip() + '\n'
if path == '/\n':
treelistexpand.append(path)
else:
treelistexpand.append(path[1:])
treelistexpand = svntest.verify.UnorderedOutput(treelistexpand)
svntest.verify.compare_and_display_lines('Unexpected result from tree', '',
treelistexpand, treelistfull)
# check if the 'svnlook tree' output is the ending of
# the 'svnlook tree --full-paths' output if demanding
# any part of the repository
treelist = run_svnlook('tree', repo_dir, '/A/B')
treelistfull = run_svnlook('tree', '--full-paths', repo_dir, '/A/B')
path = ''
treelistexpand = []
for entry in treelist:
len1 = len(entry)
len2 = len(entry.lstrip())
path = path[0:2*(len1-len2)] + entry.strip() + '\n'
treelistexpand.append('/A/' + path)
treelistexpand = svntest.verify.UnorderedOutput(treelistexpand)
svntest.verify.compare_and_display_lines('Unexpected result from tree', '',
treelistexpand, treelistfull)
treelist = run_svnlook('tree', repo_dir, '/')
if treelist[0] != '/\n':
raise svntest.Failure
expect('propget svn:log', [ 'log msg' ],
run_svnlook('propget', '--revprop', repo_dir, 'svn:log'))
proplist = run_svnlook('proplist', '--revprop', repo_dir)
proplist = sorted([prop.strip() for prop in proplist])
# We cannot rely on svn:author's presence. ra_svn doesn't set it.
if not (proplist == [ 'svn:author', 'svn:date', 'svn:log' ]
or proplist == [ 'svn:date', 'svn:log' ]):
logger.warn("Unexpected result from proplist: %s", proplist)
raise svntest.Failure
prop_name = 'foo:bar-baz-quux'
exit_code, output, errput = svntest.main.run_svnlook('propget',
'--revprop', repo_dir,
prop_name)
expected_err = "Property '%s' not found on revision " % prop_name
for line in errput:
if line.find(expected_err) != -1:
break
else:
raise svntest.main.SVNUnmatchedError
exit_code, output, errput = svntest.main.run_svnlook('propget',
'-r1', repo_dir,
prop_name, '/')
expected_err = "Property '%s' not found on path '/' in revision " % prop_name
for line in errput:
if line.find(expected_err) != -1:
break
else:
raise svntest.main.SVNUnmatchedError
#----------------------------------------------------------------------
# Issue 1089
@Issue(1089)
def delete_file_in_moved_dir(sbox):
"delete file in moved dir"
sbox.build()
wc_dir = sbox.wc_dir
repo_dir = sbox.repo_dir
# move E to E2 and delete E2/alpha
E_path = os.path.join(wc_dir, 'A', 'B', 'E')
E2_path = os.path.join(wc_dir, 'A', 'B', 'E2')
svntest.actions.run_and_verify_svn(None, None, [], 'mv', E_path, E2_path)
alpha_path = os.path.join(E2_path, 'alpha')
svntest.actions.run_and_verify_svn(None, None, [], 'rm', alpha_path)
# commit
expected_output = svntest.wc.State(wc_dir, {
'A/B/E' : Item(verb='Deleting'),
'A/B/E2' : Item(verb='Adding'),
'A/B/E2/alpha' : Item(verb='Deleting'),
})
expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
expected_status.remove('A/B/E', 'A/B/E/alpha', 'A/B/E/beta')
expected_status.add({
'A/B/E2' : Item(status=' ', wc_rev=2),
'A/B/E2/beta' : Item(status=' ', wc_rev=2),
})
### this commit fails. the 'alpha' node is marked 'not-present' since it
### is a deleted child of a move/copy. this is all well and proper.
### however, during the commit, the parent node is committed down to just
### the BASE node. at that point, 'alpha' has no parent in WORKING which
### is a schema violation. there is a plan for committing in this kind of
### situation, layed out in wc-ng-design. that needs to be implemented
### in order to get this commit working again.
svntest.actions.run_and_verify_commit(wc_dir,
expected_output,
expected_status,
None,
wc_dir)
exit_code, output, errput = svntest.main.run_svnlook("dirs-changed",
repo_dir)
if errput:
raise svntest.Failure
# Okay. No failure, but did we get the right output?
if len(output) != 2:
raise svntest.Failure
if not ((output[0].strip() == 'A/B/')
and (output[1].strip() == 'A/B/E2/')):
raise svntest.Failure
#----------------------------------------------------------------------
# Issue 1241
@Issue(1241)
def test_print_property_diffs(sbox):
"test the printing of property diffs"
sbox.build()
wc_dir = sbox.wc_dir
repo_dir = sbox.repo_dir
# Add a bogus property to iota
iota_path = os.path.join(wc_dir, 'iota')
svntest.actions.run_and_verify_svn(None, None, [], 'propset',
'bogus_prop', 'bogus_val', iota_path)
# commit the change
svntest.actions.run_and_verify_svn(None, None, [],
'ci', '-m', 'log msg', iota_path)
# Grab the diff
exit_code, expected_output, err = svntest.actions.run_and_verify_svn(
None, None, [], 'diff', '-r', 'PREV', iota_path)
exit_code, output, errput = svntest.main.run_svnlook("diff", repo_dir)
if errput:
raise svntest.Failure
# Okay. No failure, but did we get the right output?
if len(output) != len(expected_output):
raise svntest.Failure
canonical_iota_path = iota_path.replace(os.path.sep, '/')
# replace wcdir/iota with iota in expected_output
for i in range(len(expected_output)):
expected_output[i] = expected_output[i].replace(canonical_iota_path,
'iota')
# Check that the header filenames match.
if expected_output[2].split()[1] != output[2].split()[1]:
raise svntest.Failure
if expected_output[3].split()[1] != output[3].split()[1]:
raise svntest.Failure
svntest.verify.compare_and_display_lines('', '',
expected_output[4:],
output[4:])
#----------------------------------------------------------------------
# Check that svnlook info repairs allows inconsistent line endings in logs.
def info_bad_newlines(sbox):
"svnlook info must allow inconsistent newlines"
dump_str = """SVN-fs-dump-format-version: 2
UUID: dc40867b-38f6-0310-9f5f-f81aa277e06e
Revision-number: 0
Prop-content-length: 56
Content-length: 56
K 8
svn:date
V 27
2005-05-03T19:09:41.129900Z
PROPS-END
Revision-number: 1
Prop-content-length: 99
Content-length: 99
K 7
svn:log
V 3
\n\r\n
K 10
svn:author
V 2
pl
K 8
svn:date
V 27
2005-05-03T19:10:19.975578Z
PROPS-END
Node-path: file
Node-kind: file
Node-action: add
Prop-content-length: 10
Text-content-length: 5
Text-content-md5: e1cbb0c3879af8347246f12c559a86b5
Content-length: 15
PROPS-END
text
"""
# load dumpfile with inconsistent newlines into repos.
svntest.actions.load_repo(sbox, dump_str=dump_str,
bypass_prop_validation=True)
exit_code, output, errput = svntest.main.run_svnlook("info",
sbox.repo_dir, "-r1")
if errput:
raise svntest.Failure
def changed_copy_info(sbox):
"test --copy-info flag on the changed command"
sbox.build()
wc_dir = sbox.wc_dir
repo_dir = sbox.repo_dir
# Copy alpha to /A/alpha2.
E_path = os.path.join(wc_dir, 'A', 'B', 'E')
alpha_path = os.path.join(wc_dir, 'A', 'B', 'E', 'alpha')
alpha2_path = os.path.join(wc_dir, 'A', 'alpha2')
svntest.actions.run_and_verify_svn(None, None, [], 'cp', alpha_path,
alpha2_path)
# commit
expected_output = svntest.wc.State(wc_dir, {
'A/alpha2' : Item(verb='Adding'),
})
expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
expected_status.add({
'A/alpha2' : Item(status=' ', wc_rev=2),
})
svntest.actions.run_and_verify_commit(wc_dir,
expected_output,
expected_status,
None,
wc_dir)
exit_code, output, errput = svntest.main.run_svnlook("changed", repo_dir)
if errput:
raise svntest.Failure
expect("changed without --copy-info", ["A A/alpha2\n"], output)
exit_code, output, errput = svntest.main.run_svnlook("changed",
repo_dir, "--copy-info")
if errput:
raise svntest.Failure
expect("changed with --copy-info",
["A + A/alpha2\n",
" (from A/B/E/alpha:r1)\n"],
output)
#----------------------------------------------------------------------
# Issue 2663
@Issue(2663)
def tree_non_recursive(sbox):
"test 'svnlook tree --non-recursive'"
sbox.build()
repo_dir = sbox.repo_dir
expected_results_root = ('/', ' iota', ' A/')
expected_results_deep = ('B/', ' lambda', ' E/', ' F/')
# check the output of svnlook --non-recursive on the
# root of the repository
treelist = run_svnlook('tree', '--non-recursive', repo_dir)
for entry in treelist:
if not entry.rstrip() in expected_results_root:
logger.warn("Unexpected result from tree with --non-recursive:")
logger.warn(" entry : %s", entry.rstrip())
raise svntest.Failure
if len(treelist) != len(expected_results_root):
logger.warn("Expected %i output entries, found %i",
len(expected_results_root), len(treelist))
raise svntest.Failure
# check the output of svnlook --non-recursive on a
# subdirectory of the repository
treelist = run_svnlook('tree', '--non-recursive', repo_dir, '/A/B')
for entry in treelist:
if not entry.rstrip() in expected_results_deep:
logger.warn("Unexpected result from tree with --non-recursive:")
logger.warn(" entry : %s", entry.rstrip())
raise svntest.Failure
if len(treelist) != len(expected_results_deep):
logger.warn("Expected %i output entries, found %i",
len(expected_results_deep), len(treelist))
raise svntest.Failure
#----------------------------------------------------------------------
def limit_history(sbox):
"history --limit"
sbox.build(create_wc=False)
repo_url = sbox.repo_url
svntest.actions.run_and_verify_svn(None, None, [],
'mv', '-m', 'log msg',
repo_url + "/iota", repo_url + "/iota2")
svntest.actions.run_and_verify_svn(None, None, [],
'mv', '-m', 'log msg',
repo_url + "/A/mu", repo_url + "/iota")
history = run_svnlook("history", "--limit=1", sbox.repo_dir)
# Ignore the two lines of header, and verify expected number of items.
if len(history[2:]) != 1:
raise svntest.Failure("Output not limited to expected number of items")
#----------------------------------------------------------------------
def diff_ignore_whitespace(sbox):
"test 'svnlook diff -x -b' and 'svnlook diff -x -w'"
sbox.build()
repo_dir = sbox.repo_dir
wc_dir = sbox.wc_dir
# Make whitespace-only changes to mu
mu_path = os.path.join(wc_dir, 'A', 'mu')
svntest.main.file_write(mu_path, "This is the file 'mu'.\n", "wb")
# Created expected output tree for 'svn ci'
expected_output = svntest.wc.State(wc_dir, {
'A/mu' : Item(verb='Sending'),
})
# Create expected status tree; all local revisions should be at 1,
# but mu should be at revision 2.
expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
expected_status.tweak('A/mu', wc_rev=2)
svntest.actions.run_and_verify_commit(wc_dir,
expected_output,
expected_status,
None,
wc_dir)
# Check the output of 'svnlook diff -x --ignore-space-change' on mu.
# It should not print anything.
output = run_svnlook('diff', '-r2', '-x', '--ignore-space-change',
repo_dir)
if output != []:
raise svntest.Failure
# Check the output of 'svnlook diff -x --ignore-all-space' on mu.
# It should not print anything.
output = run_svnlook('diff', '-r2', '-x', '--ignore-all-space',
repo_dir)
if output != []:
raise svntest.Failure
#----------------------------------------------------------------------
def diff_ignore_eolstyle(sbox):
"test 'svnlook diff -x --ignore-eol-style'"
sbox.build()
repo_dir = sbox.repo_dir
wc_dir = sbox.wc_dir
if os.name == 'nt':
crlf = '\n'
else:
crlf = '\r\n'
mu_path = os.path.join(wc_dir, 'A', 'mu')
rev = 1
# do the --ignore-eol-style test for each eol-style
for eol, eolchar in zip(['CRLF', 'CR', 'native', 'LF'],
[crlf, '\015', '\n', '\012']):
# rewrite file mu and set the eol-style property.
svntest.main.file_write(mu_path, "This is the file 'mu'." + eolchar, 'wb')
svntest.main.run_svn(None, 'propset', 'svn:eol-style', eol, mu_path)
# Created expected output tree for 'svn ci'
expected_output = svntest.wc.State(wc_dir, {
'A/mu' : Item(verb='Sending'),
})
# Create expected status tree; all local revisions should be at
# revision 1, but mu should be at revision rev + 1.
expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
expected_status.tweak('A/mu', wc_rev=rev + 1)
svntest.actions.run_and_verify_commit(wc_dir,
expected_output,
expected_status,
None,
wc_dir)
# Grab the diff
exit_code, expected_output, err = svntest.actions.run_and_verify_svn(
None, None, [],
'diff', '-r', 'PREV', '-x', '--ignore-eol-style', mu_path)
output = run_svnlook('diff', '-r', str(rev + 1), '-x',
'--ignore-eol-style', repo_dir)
rev += 1
canonical_mu_path = mu_path.replace(os.path.sep, '/')
# replace wcdir/A/mu with A/mu in expected_output
for i in range(len(expected_output)):
expected_output[i] = expected_output[i].replace(canonical_mu_path,
'A/mu')
# Check that the header filenames match.
if expected_output[2].split()[1] != output[2].split()[1]:
raise svntest.Failure
if expected_output[3].split()[1] != output[3].split()[1]:
raise svntest.Failure
svntest.verify.compare_and_display_lines('', '',
expected_output[4:],
output[4:])
#----------------------------------------------------------------------
def diff_binary(sbox):
"test 'svnlook diff' on binary files"
sbox.build()
repo_dir = sbox.repo_dir
wc_dir = sbox.wc_dir
# Set A/mu to a binary mime-type, tweak its text, and commit.
mu_path = os.path.join(wc_dir, 'A', 'mu')
svntest.main.file_append(mu_path, 'new appended text for mu')
svntest.main.run_svn(None, 'propset', 'svn:mime-type',
'application/octet-stream', mu_path)
svntest.main.run_svn(None, 'ci', '-m', 'log msg', mu_path)
# Now run 'svnlook diff' and look for the "Binary files differ" message.
output = run_svnlook('diff', repo_dir)
if not "(Binary files differ)\n" in output:
raise svntest.Failure("No 'Binary files differ' indication in "
"'svnlook diff' output.")
#----------------------------------------------------------------------
def test_filesize(sbox):
"test 'svnlook filesize'"
sbox.build()
repo_dir = sbox.repo_dir
wc_dir = sbox.wc_dir
tree_output = run_svnlook('tree', '--full-paths', repo_dir)
for line in tree_output:
# Drop line endings
line = line.rstrip()
# Skip directories
if line[-1] == '/':
continue
# Run 'svnlook cat' and measure the size of the output.
cat_output = run_svnlook('cat', repo_dir, line)
cat_size = len("".join(cat_output))
# Run 'svnlook filesize' and compare the results with the CAT_SIZE.
filesize_output = run_svnlook('filesize', repo_dir, line)
if len(filesize_output) != 1:
raise svntest.Failure("'svnlook filesize' printed something other than "
"a single line of output.")
filesize = int(filesize_output[0].strip())
if filesize != cat_size:
raise svntest.Failure("'svnlook filesize' and the counted length of "
"'svnlook cat's output differ for the path "
"'%s'." % (line))
#----------------------------------------------------------------------
def verify_logfile(logfilename, expected_data):
if os.path.exists(logfilename):
fp = open(logfilename)
else:
raise svntest.verify.SVNUnexpectedOutput("hook logfile %s not found"\
% logfilename)
actual_data = fp.readlines()
fp.close()
os.unlink(logfilename)
svntest.verify.compare_and_display_lines('wrong hook logfile content',
'STDOUT',
expected_data, actual_data)
def test_txn_flag(sbox):
"test 'svnlook * -t'"
sbox.build()
repo_dir = sbox.repo_dir
wc_dir = sbox.wc_dir
logfilepath = os.path.join(repo_dir, 'hooks.log')
# List changed dirs and files in this transaction
hook_template = """import sys,os,subprocess
svnlook_bin=%s
fp = open(os.path.join(sys.argv[1], 'hooks.log'), 'wb')
def output_command(fp, cmd, opt):
command = [svnlook_bin, cmd, '-t', sys.argv[2], sys.argv[1]] + opt
process = subprocess.Popen(command, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=False, bufsize=-1)
(output, errors) = process.communicate()
status = process.returncode
fp.write(output)
fp.write(errors)
return status
for (svnlook_cmd, svnlook_opt) in %s:
output_command(fp, svnlook_cmd, svnlook_opt.split())
fp.close()"""
pre_commit_hook = svntest.main.get_pre_commit_hook_path(repo_dir)
# 1. svnlook 'changed' -t and 'dirs-changed' -t
hook_instance = hook_template % (repr(svntest.main.svnlook_binary),
repr([('changed', ''),
('dirs-changed', '')]))
svntest.main.create_python_hook_script(pre_commit_hook,
hook_instance)
# Change files mu and rho
A_path = os.path.join(wc_dir, 'A')
mu_path = os.path.join(wc_dir, 'A', 'mu')
rho_path = os.path.join(wc_dir, 'A', 'D', 'G', 'rho')
svntest.main.file_append(mu_path, 'appended mu text')
svntest.main.file_append(rho_path, 'new appended text for rho')
# commit, and check the hook's logfile
svntest.actions.run_and_verify_svn(None, None, [],
'ci', '-m', 'log msg', wc_dir)
svntest.actions.run_and_verify_svn(None, None, [],
'up', wc_dir)
expected_data = [ 'U A/D/G/rho\n', 'U A/mu\n', 'A/\n', 'A/D/G/\n' ]
verify_logfile(logfilepath, expected_data)
# 2. svnlook 'propget' -t, 'proplist' -t
# 2. Change a dir and revision property
hook_instance = hook_template % (repr(svntest.main.svnlook_binary),
repr([('propget', 'bogus_prop /A'),
('propget', '--revprop bogus_rev_prop'),
('proplist', '/A'),
('proplist', '--revprop')]))
svntest.main.create_python_hook_script(pre_commit_hook,
hook_instance)
svntest.actions.run_and_verify_svn(None, None, [], 'propset',
'bogus_prop', 'bogus_val\n', A_path)
svntest.actions.run_and_verify_svn(None, None, [],
'ci', '-m', 'log msg', wc_dir,
'--with-revprop', 'bogus_rev_prop=bogus_rev_val\n')
# Now check the logfile
expected_data = [ 'bogus_val\n',
'bogus_rev_val\n',
' bogus_prop\n',
' svn:log\n', ' svn:author\n',
# internal property, not really expected
' svn:check-locks\n',
' bogus_rev_prop\n', ' svn:date\n']
verify_logfile(logfilepath, svntest.verify.UnorderedOutput(expected_data))
def property_delete(sbox):
"property delete"
sbox.build()
repo_dir = sbox.repo_dir
sbox.simple_propset('foo', 'bar', 'A/mu')
sbox.simple_commit()
sbox.simple_propdel('foo', 'A/mu')
sbox.simple_commit()
# XFail since r1293375, changed and diff produce no output on a
# property delete
svntest.actions.run_and_verify_svnlook(None, ["_U A/mu\n"], [],
'changed', repo_dir)
########################################################################
# Run the tests
# list all tests here, starting with None:
test_list = [ None,
test_misc,
delete_file_in_moved_dir,
test_print_property_diffs,
info_bad_newlines,
changed_copy_info,
tree_non_recursive,
limit_history,
diff_ignore_whitespace,
diff_ignore_eolstyle,
diff_binary,
test_filesize,
test_txn_flag,
property_delete,
]
if __name__ == '__main__':
svntest.main.run_tests(test_list)
# NOTREACHED
### End of file.
```
#### File: cmdline/svntest/entry.py
```python
import xml.parsers.expat # you may need to install this package
### The entries file parser in tools/client-side/change-svn-wc-format.py
### handles the WC format for Subversion 1.4 and 1.5, which is no
### longer in XML.
class svn_entry:
"An object that represents an entry from an 'entries' file."
def __init__(self, attributes): # constructor
self.atts = attributes
def prettyprint(self):
print(" Entryname: %s" % self.atts['name'])
print(" Kind: %s" % self.atts['kind'])
print(" Revision: %s" % self.atts['revision'])
print(" Ancestor: %s" % self.atts['ancestor'])
print(" all atts: %s" % self.atts)
print("")
class svn_entryparser:
"A class to parse an 'entries' file."
def __init__(self): # constructor
self.entry_dict = {}
self.parser = xml.parsers.expat.ParserCreate()
self.parser.StartElementHandler = self.handle_start_tag
def handle_start_tag(self, name, attrs):
"Expat callback that receives a new open-tag."
if 'name' in attrs:
entry = svn_entry(attrs) # create new entry object
# Derive missing values
if 'kind' not in entry.atts:
entry.atts['kind'] = 'file' # default kind if none mentioned
if 'revision' not in entry.atts:
if "" in self.entry_dict:
parent = self.entry_dict[""]
entry.atts['revision'] = parent.atts['revision']
if 'ancestor' not in entry.atts:
if "" in self.entry_dict:
parent = self.entry_dict[""]
entry.atts['ancestor'] = parent.atts['ancestor'] + '/' \
+ entry.atts['name']
self.entry_dict[attrs['name']] = entry # store the new entry
# The main exported routine
def get_entries(path):
"Parse the entries file at PATH and return a list of svn_entry objects."
entryparser = svn_entryparser() # make a parser instance
fp = open(path, 'r')
entryparser.parser.ParseFile(fp)
fp.close()
return entryparser.entry_dict
### End of file.
```
#### File: tests/cmdline/upgrade_tests.py
```python
import os
import re
import shutil
import sys
import tarfile
import tempfile
import logging
logger = logging.getLogger()
import svntest
from svntest import wc
Item = svntest.wc.StateItem
Skip = svntest.testcase.Skip_deco
SkipUnless = svntest.testcase.SkipUnless_deco
XFail = svntest.testcase.XFail_deco
Issues = svntest.testcase.Issues_deco
Issue = svntest.testcase.Issue_deco
Wimp = svntest.testcase.Wimp_deco
wc_is_too_old_regex = (".*Working copy '.*' is too old \(format \d+.*\).*")
def get_current_format():
# Get current format from subversion/libsvn_wc/wc.h
format_file = open(os.path.join(os.path.dirname(__file__), "..", "..", "libsvn_wc", "wc.h")).read()
return int(re.search("\n#define SVN_WC__VERSION (\d+)\n", format_file).group(1))
def replace_sbox_with_tarfile(sbox, tar_filename,
dir=None):
try:
svntest.main.safe_rmtree(sbox.wc_dir)
except OSError, e:
pass
if not dir:
dir = tar_filename.split('.')[0]
tarpath = os.path.join(os.path.dirname(sys.argv[0]), 'upgrade_tests_data',
tar_filename)
t = tarfile.open(tarpath, 'r:bz2')
extract_dir = tempfile.mkdtemp(dir=svntest.main.temp_dir)
for member in t.getmembers():
t.extract(member, extract_dir)
shutil.move(os.path.join(extract_dir, dir), sbox.wc_dir)
def replace_sbox_repo_with_tarfile(sbox, tar_filename, dir=None):
try:
svntest.main.safe_rmtree(sbox.repo_dir)
except OSError, e:
pass
if not dir:
dir = tar_filename.split('.')[0]
tarpath = os.path.join(os.path.dirname(sys.argv[0]), 'upgrade_tests_data',
tar_filename)
t = tarfile.open(tarpath, 'r:bz2')
extract_dir = tempfile.mkdtemp(dir=svntest.main.temp_dir)
for member in t.getmembers():
t.extract(member, extract_dir)
shutil.move(os.path.join(extract_dir, dir), sbox.repo_dir)
def check_format(sbox, expected_format):
dot_svn = svntest.main.get_admin_name()
for root, dirs, files in os.walk(sbox.wc_dir):
db = svntest.sqlite3.connect(os.path.join(root, dot_svn, 'wc.db'))
c = db.cursor()
c.execute('pragma user_version;')
found_format = c.fetchone()[0]
db.close()
if found_format != expected_format:
raise svntest.Failure("found format '%d'; expected '%d'; in wc '%s'" %
(found_format, expected_format, root))
if svntest.main.wc_is_singledb(sbox.wc_dir):
dirs[:] = []
if dot_svn in dirs:
dirs.remove(dot_svn)
def check_pristine(sbox, files):
for file in files:
file_path = sbox.ospath(file)
file_text = open(file_path, 'r').read()
file_pristine = open(svntest.wc.text_base_path(file_path), 'r').read()
if (file_text != file_pristine):
raise svntest.Failure("pristine mismatch for '%s'" % (file))
def check_dav_cache(dir_path, wc_id, expected_dav_caches):
dot_svn = svntest.main.get_admin_name()
db = svntest.sqlite3.connect(os.path.join(dir_path, dot_svn, 'wc.db'))
c = db.cursor()
# Check if python's sqlite can read our db
c.execute('select sqlite_version()')
sqlite_ver = map(int, c.fetchone()[0].split('.'))
# SQLite versions have 3 or 4 number groups
major = sqlite_ver[0]
minor = sqlite_ver[1]
patch = sqlite_ver[2]
if major < 3 or (major == 3 and minor < 6) \
or (major == 3 and minor == 6 and patch < 18):
return # We need a newer SQLite
for local_relpath, expected_dav_cache in expected_dav_caches.items():
# NODES conversion is complete enough that we can use it if it exists
c.execute("""pragma table_info(nodes)""")
if c.fetchone():
c.execute('select dav_cache from nodes ' +
'where wc_id=? and local_relpath=? and op_depth = 0',
(wc_id, local_relpath))
row = c.fetchone()
else:
c.execute('select dav_cache from base_node ' +
'where wc_id=? and local_relpath=?',
(wc_id, local_relpath))
row = c.fetchone()
if row is None:
raise svntest.Failure("no dav cache for '%s'" % (local_relpath))
dav_cache = str(row[0])
if dav_cache != expected_dav_cache:
raise svntest.Failure(
"wrong dav cache for '%s'\n Found: '%s'\n Expected: '%s'" %
(local_relpath, dav_cache, expected_dav_cache))
db.close()
# Very simple working copy property diff handler for single line textual properties
# Should probably be moved to svntest/actions.py after some major refactoring.
def simple_property_verify(dir_path, expected_props):
# Shows all items in dict1 that are not also in dict2
def diff_props(dict1, dict2, name, match):
equal = True;
for key in dict1:
node = dict1[key]
node2 = dict2.get(key, None)
if node2:
for prop in node:
v1 = node[prop]
v2 = node2.get(prop, None)
if not v2:
logger.warn('\'%s\' property on \'%s\' not found in %s',
prop, key, name)
equal = False
if match and v1 != v2:
logger.warn('Expected \'%s\' on \'%s\' to be \'%s\', but found \'%s\'',
prop, key, v1, v2)
equal = False
else:
logger.warn('\'%s\': %s not found in %s', key, dict1[key], name)
equal = False
return equal
exit_code, output, errput = svntest.main.run_svn(None, 'proplist', '-R',
'-v', dir_path)
actual_props = {}
target = None
name = None
for i in output:
if i.startswith('Properties on '):
target = i[15+len(dir_path)+1:-3].replace(os.path.sep, '/')
elif not i.startswith(' '):
name = i.strip()
else:
v = actual_props.get(target, {})
v[name] = i.strip()
actual_props[target] = v
v1 = diff_props(expected_props, actual_props, 'actual', True)
v2 = diff_props(actual_props, expected_props, 'expected', False)
if not v1 or not v2:
logger.warn('Actual properties: %s', actual_props)
raise svntest.Failure("Properties unequal")
def simple_checksum_verify(expected_checksums):
for path, checksum in expected_checksums:
exit_code, output, errput = svntest.main.run_svn(None, 'info', path)
if exit_code:
raise svntest.Failure()
if checksum:
if not svntest.verify.RegexOutput('Checksum: ' + checksum,
match_all=False).matches(output):
raise svntest.Failure("did not get expected checksum " + checksum)
if not checksum:
if svntest.verify.RegexOutput('Checksum: ',
match_all=False).matches(output):
raise svntest.Failure("unexpected checksum")
def run_and_verify_status_no_server(wc_dir, expected_status):
"same as svntest.actions.run_and_verify_status(), but without '-u'"
exit_code, output, errput = svntest.main.run_svn(None, 'st', '-q', '-v',
wc_dir)
actual = svntest.tree.build_tree_from_status(output)
try:
svntest.tree.compare_trees("status", actual, expected_status.old_tree())
except svntest.tree.SVNTreeError:
svntest.verify.display_trees(None, 'STATUS OUTPUT TREE',
expected_status.old_tree(), actual)
logger.warn("ACTUAL STATUS TREE:")
svntest.tree.dump_tree_script(actual, wc_dir + os.sep)
raise
def basic_upgrade(sbox):
"basic upgrade behavior"
replace_sbox_with_tarfile(sbox, 'basic_upgrade.tar.bz2')
# Attempt to use the working copy, this should give an error
expected_stderr = wc_is_too_old_regex
svntest.actions.run_and_verify_svn(None, None, expected_stderr,
'info', sbox.wc_dir)
# Upgrade on something not a versioned dir gives a 'not directory' error.
not_dir = ".*E155019.*%s'.*directory"
os.mkdir(sbox.ospath('X'))
svntest.actions.run_and_verify_svn(None, None, not_dir % 'X',
'upgrade', sbox.ospath('X'))
svntest.actions.run_and_verify_svn(None, None, not_dir % 'Y',
'upgrade', sbox.ospath('Y'))
svntest.actions.run_and_verify_svn(None, None, not_dir %
re.escape(sbox.ospath('A/mu')),
'upgrade', sbox.ospath('A/mu'))
# Upgrade on a versioned subdir gives a 'not root' error.
not_root = ".*E155019.*%s'.*root.*%s'"
svntest.actions.run_and_verify_svn(None, None, not_root %
('A', re.escape(sbox.wc_dir)),
'upgrade', sbox.ospath('A'))
# Now upgrade the working copy
svntest.actions.run_and_verify_svn(None, None, [],
'upgrade', sbox.wc_dir)
# Actually check the format number of the upgraded working copy
check_format(sbox, get_current_format())
# Now check the contents of the working copy
expected_status = svntest.actions.get_virginal_state(sbox.wc_dir, 1)
run_and_verify_status_no_server(sbox.wc_dir, expected_status)
check_pristine(sbox, ['iota', 'A/mu'])
def upgrade_with_externals(sbox):
"upgrade with externals"
# Create wc from tarfile, uses the same structure of the wc as the tests
# in externals_tests.py.
replace_sbox_with_tarfile(sbox, 'upgrade_with_externals.tar.bz2')
# Attempt to use the working copy, this should give an error
expected_stderr = wc_is_too_old_regex
svntest.actions.run_and_verify_svn(None, None, expected_stderr,
'info', sbox.wc_dir)
# Now upgrade the working copy
svntest.actions.run_and_verify_svn(None, None, [],
'upgrade', sbox.wc_dir)
# Actually check the format number of the upgraded working copy
check_format(sbox, get_current_format())
check_pristine(sbox, ['iota', 'A/mu',
'A/D/x/lambda', 'A/D/x/E/alpha'])
def upgrade_1_5_body(sbox, subcommand):
replace_sbox_with_tarfile(sbox, 'upgrade_1_5.tar.bz2')
# Attempt to use the working copy, this should give an error
expected_stderr = wc_is_too_old_regex
svntest.actions.run_and_verify_svn(None, None, expected_stderr,
subcommand, sbox.wc_dir)
# Now upgrade the working copy
svntest.actions.run_and_verify_svn(None, None, [],
'upgrade', sbox.wc_dir)
# Check the format of the working copy
check_format(sbox, get_current_format())
# Now check the contents of the working copy
expected_status = svntest.actions.get_virginal_state(sbox.wc_dir, 1)
run_and_verify_status_no_server(sbox.wc_dir, expected_status)
check_pristine(sbox, ['iota', 'A/mu'])
def upgrade_1_5(sbox):
"test upgrading from a 1.5-era working copy"
return upgrade_1_5_body(sbox, 'info')
def update_1_5(sbox):
"test updating a 1.5-era working copy"
# The 'update' printed:
# Skipped 'svn-test-work\working_copies\upgrade_tests-3'
# Summary of conflicts:
# Skipped paths: 1
return upgrade_1_5_body(sbox, 'update')
def logs_left_1_5(sbox):
"test upgrading from a 1.5-era wc with stale logs"
replace_sbox_with_tarfile(sbox, 'logs_left_1_5.tar.bz2')
# Try to upgrade, this should give an error
expected_stderr = (".*Cannot upgrade with existing logs; .*")
svntest.actions.run_and_verify_svn(None, None, expected_stderr,
'upgrade', sbox.wc_dir)
def upgrade_wcprops(sbox):
"test upgrading a working copy with wcprops"
replace_sbox_with_tarfile(sbox, 'upgrade_wcprops.tar.bz2')
svntest.actions.run_and_verify_svn(None, None, [],
'upgrade', sbox.wc_dir)
# Make sure that .svn/all-wcprops has disappeared
dot_svn = svntest.main.get_admin_name()
if os.path.exists(os.path.join(sbox.wc_dir, dot_svn, 'all-wcprops')):
raise svntest.Failure("all-wcprops file still exists")
# Just for kicks, let's see if the wcprops are what we'd expect them
# to be. (This could be smarter.)
expected_dav_caches = {
'' :
'(svn:wc:ra_dav:version-url 41 /svn-test-work/local_tmp/repos/!svn/ver/1)',
'iota' :
'(svn:wc:ra_dav:version-url 46 /svn-test-work/local_tmp/repos/!svn/ver/1/iota)',
}
check_dav_cache(sbox.wc_dir, 1, expected_dav_caches)
# Poor mans relocate to fix up an 1.0 (xml style) working copy to refer to a
# valid repository, so svn upgrade can do its work on it
def xml_entries_relocate(path, from_url, to_url):
adm_name = svntest.main.get_admin_name()
entries = os.path.join(path, adm_name, 'entries')
txt = open(entries).read().replace('url="' + from_url, 'url="' + to_url)
os.chmod(entries, 0777)
open(entries, 'w').write(txt)
for dirent in os.listdir(path):
item_path = os.path.join(path, dirent)
if dirent == svntest.main.get_admin_name():
continue
if os.path.isdir(os.path.join(item_path, adm_name)):
xml_entries_relocate(item_path, from_url, to_url)
# Poor mans relocate to fix up an working copy to refer to a
# valid repository, so svn upgrade can do its work on it
def simple_entries_replace(path, from_url, to_url):
adm_name = svntest.main.get_admin_name()
entries = os.path.join(path, adm_name, 'entries')
txt = open(entries).read().replace(from_url, to_url)
os.chmod(entries, 0777)
open(entries, 'wb').write(txt)
for dirent in os.listdir(path):
item_path = os.path.join(path, dirent)
if dirent == svntest.main.get_admin_name():
continue
if os.path.isdir(os.path.join(item_path, adm_name)):
simple_entries_replace(item_path, from_url, to_url)
def basic_upgrade_1_0(sbox):
"test upgrading a working copy created with 1.0.0"
sbox.build(create_wc = False)
replace_sbox_with_tarfile(sbox, 'upgrade_1_0.tar.bz2')
url = sbox.repo_url
xml_entries_relocate(sbox.wc_dir, 'file:///1.0.0/repos', url)
# Attempt to use the working copy, this should give an error
expected_stderr = wc_is_too_old_regex
svntest.actions.run_and_verify_svn(None, None, expected_stderr,
'info', sbox.wc_dir)
# Now upgrade the working copy
svntest.actions.run_and_verify_svn(None, None, [],
'upgrade', sbox.wc_dir)
# And the separate working copy below COPIED or check_format() fails
svntest.actions.run_and_verify_svn(None, None, [],
'upgrade',
os.path.join(sbox.wc_dir, 'COPIED', 'G'))
# Actually check the format number of the upgraded working copy
check_format(sbox, get_current_format())
# Now check the contents of the working copy
# #### This working copy is not just a basic tree,
# fix with the right data once we get here
expected_status = svntest.wc.State(sbox.wc_dir,
{
'' : Item(status=' ', wc_rev=7),
'B' : Item(status=' ', wc_rev='7'),
'B/mu' : Item(status=' ', wc_rev='7'),
'B/D' : Item(status=' ', wc_rev='7'),
'B/D/H' : Item(status=' ', wc_rev='7'),
'B/D/H/psi' : Item(status=' ', wc_rev='7'),
'B/D/H/omega' : Item(status=' ', wc_rev='7'),
'B/D/H/zeta' : Item(status='MM', wc_rev='7'),
'B/D/H/chi' : Item(status=' ', wc_rev='7'),
'B/D/gamma' : Item(status=' ', wc_rev='9'),
'B/D/G' : Item(status=' ', wc_rev='7'),
'B/D/G/tau' : Item(status=' ', wc_rev='7'),
'B/D/G/rho' : Item(status=' ', wc_rev='7'),
'B/D/G/pi' : Item(status=' ', wc_rev='7'),
'B/B' : Item(status=' ', wc_rev='7'),
'B/B/lambda' : Item(status=' ', wc_rev='7'),
'MKDIR' : Item(status='A ', wc_rev='0'),
'MKDIR/MKDIR' : Item(status='A ', wc_rev='0'),
'A' : Item(status=' ', wc_rev='7'),
'A/B' : Item(status=' ', wc_rev='7'),
'A/B/lambda' : Item(status=' ', wc_rev='7'),
'A/D' : Item(status=' ', wc_rev='7'),
'A/D/G' : Item(status=' ', wc_rev='7'),
'A/D/G/rho' : Item(status=' ', wc_rev='7'),
'A/D/G/pi' : Item(status=' ', wc_rev='7'),
'A/D/G/tau' : Item(status=' ', wc_rev='7'),
'A/D/H' : Item(status=' ', wc_rev='7'),
'A/D/H/psi' : Item(status=' ', wc_rev='7'),
'A/D/H/omega' : Item(status=' ', wc_rev='7'),
'A/D/H/zeta' : Item(status=' ', wc_rev='7'),
'A/D/H/chi' : Item(status=' ', wc_rev='7'),
'A/D/gamma' : Item(status=' ', wc_rev='7'),
'A/mu' : Item(status=' ', wc_rev='7'),
'iota' : Item(status=' ', wc_rev='7'),
'COPIED' : Item(status=' ', wc_rev='10'),
'DELETED' : Item(status='D ', wc_rev='10'),
})
run_and_verify_status_no_server(sbox.wc_dir, expected_status)
expected_infos = [ {
'Node Kind': 'directory',
'Schedule': 'normal',
'Revision': '7',
'Last Changed Author' : 'Bert',
'Last Changed Rev' : '7'
} ]
svntest.actions.run_and_verify_info(expected_infos, sbox.wc_dir)
expected_infos = [ {
'Node Kind': 'directory',
'Schedule': 'delete',
'Revision': '10',
'Last Changed Author' : 'Bert',
'Last Changed Rev' : '10'
} ]
svntest.actions.run_and_verify_info(expected_infos,
os.path.join(sbox.wc_dir, 'DELETED'))
check_pristine(sbox, ['iota', 'A/mu', 'A/D/H/zeta'])
# Helper function for the x3 tests.
def do_x3_upgrade(sbox, expected_error=[]):
# Attempt to use the working copy, this should give an error
expected_stderr = wc_is_too_old_regex
svntest.actions.run_and_verify_svn(None, None, expected_stderr,
'info', sbox.wc_dir)
# Now upgrade the working copy
svntest.actions.run_and_verify_svn(None, None, expected_error,
'upgrade', sbox.wc_dir)
if expected_error != []:
return
# Actually check the format number of the upgraded working copy
check_format(sbox, get_current_format())
# Now check the contents of the working copy
expected_status = svntest.wc.State(sbox.wc_dir,
{
'' : Item(status=' ', wc_rev='2'),
'A' : Item(status=' ', wc_rev='2'),
'A/D' : Item(status=' ', wc_rev='2'),
'A/D/H' : Item(status=' ', wc_rev='2'),
'A/D/H/omega' : Item(status=' ', wc_rev='2'),
'A/D/H/psi' : Item(status='D ', wc_rev='2'),
'A/D/H/new' : Item(status='A ', copied='+', wc_rev='-'),
'A/D/H/chi' : Item(status='R ', copied='+', wc_rev='-'),
'A/D/gamma' : Item(status='D ', wc_rev='2'),
'A/D/G' : Item(status=' ', wc_rev='2'),
'A/B_new' : Item(status='A ', copied='+', wc_rev='-'),
'A/B_new/B' : Item(status='A ', copied='+', wc_rev='-'),
'A/B_new/B/E' : Item(status=' M', copied='+', wc_rev='-'),
'A/B_new/B/E/alpha' : Item(status=' ', copied='+', wc_rev='-'),
'A/B_new/B/E/beta' : Item(status='R ', copied='+', wc_rev='-'),
'A/B_new/B/new' : Item(status='A ', copied='+', wc_rev='-'),
'A/B_new/B/lambda' : Item(status='R ', copied='+', wc_rev='-'),
'A/B_new/B/F' : Item(status=' ', copied='+', wc_rev='-'),
'A/B_new/E' : Item(status=' M', copied='+', wc_rev='-'),
'A/B_new/E/alpha' : Item(status=' M', copied='+', wc_rev='-'),
'A/B_new/E/beta' : Item(status='RM', copied='+', wc_rev='-'),
'A/B_new/lambda' : Item(status='R ', copied='+', wc_rev='-'),
'A/B_new/new' : Item(status='A ', copied='+', wc_rev='-'),
'A/B_new/F' : Item(status=' ', copied='+', wc_rev='-'),
'A/B' : Item(status=' ', wc_rev='2'),
'A/B/E' : Item(status=' ', wc_rev='2'),
'A/B/E/beta' : Item(status='RM', copied='+', wc_rev='-'),
'A/B/E/alpha' : Item(status=' M', wc_rev='2'),
'A/B/F' : Item(status=' ', wc_rev='2'),
'A/B/lambda' : Item(status='R ', copied='+', wc_rev='-'),
'A/B/new' : Item(status='A ', copied='+', wc_rev='-'),
'A/G_new' : Item(status='A ', copied='+', wc_rev='-'),
'A/G_new/rho' : Item(status='R ', copied='+', wc_rev='-'),
'iota' : Item(status=' ', wc_rev='2'),
'A_new' : Item(status='A ', wc_rev='0'),
'A_new/alpha' : Item(status='A ', copied='+', wc_rev='-'),
})
run_and_verify_status_no_server(sbox.wc_dir, expected_status)
simple_property_verify(sbox.wc_dir, {
'A/B_new/E/beta' : {'x3' : '3x',
'svn:eol-style': 'native'},
'A/B/E/beta' : {'s' : 't',
'svn:eol-style': 'native'},
'A/B_new/B/E/alpha' : {'svn:eol-style': 'native'},
'A/B/E/alpha' : {'q': 'r',
'svn:eol-style': 'native'},
'A_new/alpha' : {'svn:eol-style': 'native'},
'A/B_new/B/new' : {'svn:eol-style': 'native'},
'A/B_new/E/alpha' : {'svn:eol-style': 'native',
'u': 'v'},
'A/B_new/B/E' : {'q': 'r'},
'A/B_new/lambda' : {'svn:eol-style': 'native'},
'A/B_new/E' : {'x3': '3x'},
'A/B_new/new' : {'svn:eol-style': 'native'},
'A/B/lambda' : {'svn:eol-style': 'native'},
'A/B_new/B/E/beta' : {'svn:eol-style': 'native'},
'A/B_new/B/lambda' : {'svn:eol-style': 'native'},
'A/B/new' : {'svn:eol-style': 'native'},
'A/G_new/rho' : {'svn:eol-style': 'native'}
})
svntest.actions.run_and_verify_svn(None, 'Reverted.*', [],
'revert', '-R', sbox.wc_dir)
expected_status = svntest.wc.State(sbox.wc_dir,
{
'' : Item(status=' ', wc_rev='2'),
'A' : Item(status=' ', wc_rev='2'),
'A/D' : Item(status=' ', wc_rev='2'),
'A/D/H' : Item(status=' ', wc_rev='2'),
'A/D/H/omega' : Item(status=' ', wc_rev='2'),
'A/D/H/psi' : Item(status=' ', wc_rev='2'),
'A/D/H/chi' : Item(status=' ', wc_rev='2'),
'A/D/gamma' : Item(status=' ', wc_rev='2'),
'A/D/G' : Item(status=' ', wc_rev='2'),
'A/B' : Item(status=' ', wc_rev='2'),
'A/B/F' : Item(status=' ', wc_rev='2'),
'A/B/E' : Item(status=' ', wc_rev='2'),
'A/B/E/beta' : Item(status=' ', wc_rev='2'),
'A/B/E/alpha' : Item(status=' ', wc_rev='2'),
'A/B/lambda' : Item(status=' ', wc_rev='2'),
'iota' : Item(status=' ', wc_rev='2'),
})
run_and_verify_status_no_server(sbox.wc_dir, expected_status)
simple_property_verify(sbox.wc_dir, {
'A/B/E/beta' : {'svn:eol-style': 'native'},
# 'A/B/lambda' : {'svn:eol-style': 'native'},
'A/B/E/alpha' : {'svn:eol-style': 'native'}
})
@Issue(2530)
def x3_1_4_0(sbox):
"3x same wc upgrade 1.4.0 test"
replace_sbox_with_tarfile(sbox, 'wc-3x-1.4.0.tar.bz2', dir='wc-1.4.0')
do_x3_upgrade(sbox, expected_error='.*E155016: The properties of.*are in an '
'indeterminate state and cannot be upgraded. See issue #2530.')
@Issue(3811)
def x3_1_4_6(sbox):
"3x same wc upgrade 1.4.6 test"
replace_sbox_with_tarfile(sbox, 'wc-3x-1.4.6.tar.bz2', dir='wc-1.4.6')
do_x3_upgrade(sbox)
@Issue(3811)
def x3_1_6_12(sbox):
"3x same wc upgrade 1.6.12 test"
replace_sbox_with_tarfile(sbox, 'wc-3x-1.6.12.tar.bz2', dir='wc-1.6.12')
do_x3_upgrade(sbox)
def missing_dirs(sbox):
"missing directories and obstructing files"
# tarball wc looks like:
# svn co URL wc
# svn cp wc/A/B wc/A/B_new
# rm -rf wc/A/B/E wc/A/D wc/A/B_new/E wc/A/B_new/F
# touch wc/A/D wc/A/B_new/F
replace_sbox_with_tarfile(sbox, 'missing-dirs.tar.bz2')
svntest.actions.run_and_verify_svn(None, None, [],
'upgrade', sbox.wc_dir)
expected_status = svntest.wc.State(sbox.wc_dir,
{
'' : Item(status=' ', wc_rev='1'),
'A' : Item(status=' ', wc_rev='1'),
'A/mu' : Item(status=' ', wc_rev='1'),
'A/C' : Item(status=' ', wc_rev='1'),
'A/D' : Item(status='! ', wc_rev='1'),
'A/B' : Item(status=' ', wc_rev='1'),
'A/B/F' : Item(status=' ', wc_rev='1'),
'A/B/E' : Item(status='! ', wc_rev='1'),
'A/B/lambda' : Item(status=' ', wc_rev='1'),
'iota' : Item(status=' ', wc_rev='1'),
'A/B_new' : Item(status='A ', wc_rev='-', copied='+'),
'A/B_new/E' : Item(status='! ', wc_rev='-'),
'A/B_new/F' : Item(status='! ', wc_rev='-'),
'A/B_new/lambda' : Item(status=' ', wc_rev='-', copied='+'),
})
run_and_verify_status_no_server(sbox.wc_dir, expected_status)
def missing_dirs2(sbox):
"missing directories and obstructing dirs"
replace_sbox_with_tarfile(sbox, 'missing-dirs.tar.bz2')
os.remove(sbox.ospath('A/D'))
os.remove(sbox.ospath('A/B_new/F'))
os.mkdir(sbox.ospath('A/D'))
os.mkdir(sbox.ospath('A/B_new/F'))
svntest.actions.run_and_verify_svn(None, None, [],
'upgrade', sbox.wc_dir)
expected_status = svntest.wc.State(sbox.wc_dir,
{
'' : Item(status=' ', wc_rev='1'),
'A' : Item(status=' ', wc_rev='1'),
'A/mu' : Item(status=' ', wc_rev='1'),
'A/C' : Item(status=' ', wc_rev='1'),
'A/D' : Item(status='! ', wc_rev='1'),
'A/B' : Item(status=' ', wc_rev='1'),
'A/B/F' : Item(status=' ', wc_rev='1'),
'A/B/E' : Item(status='! ', wc_rev='1'),
'A/B/lambda' : Item(status=' ', wc_rev='1'),
'iota' : Item(status=' ', wc_rev='1'),
'A/B_new' : Item(status='A ', wc_rev='-', copied='+'),
'A/B_new/E' : Item(status='! ', wc_rev='-'),
'A/B_new/F' : Item(status='! ', wc_rev='-'),
'A/B_new/lambda' : Item(status=' ', wc_rev='-', copied='+'),
})
run_and_verify_status_no_server(sbox.wc_dir, expected_status)
@Issue(3808)
def delete_and_keep_local(sbox):
"check status delete and delete --keep-local"
replace_sbox_with_tarfile(sbox, 'wc-delete.tar.bz2')
svntest.actions.run_and_verify_svn(None, None, [],
'upgrade', sbox.wc_dir)
expected_status = svntest.wc.State(sbox.wc_dir,
{
'' : Item(status=' ', wc_rev='0'),
'Normal' : Item(status=' ', wc_rev='1'),
'Deleted-Keep-Local': Item(status='D ', wc_rev='1'),
'Deleted' : Item(status='D ', wc_rev='1'),
})
run_and_verify_status_no_server(sbox.wc_dir, expected_status)
# Deleted-Keep-Local should still exist after the upgrade
if not os.path.exists(os.path.join(sbox.wc_dir, 'Deleted-Keep-Local')):
raise svntest.Failure('wc/Deleted-Keep-Local should exist')
# Deleted should be removed after the upgrade as it was
# schedule delete and doesn't contain unversioned changes.
if os.path.exists(os.path.join(sbox.wc_dir, 'Deleted')):
raise svntest.Failure('wc/Deleted should not exist')
def dirs_only_upgrade(sbox):
"upgrade a wc without files"
replace_sbox_with_tarfile(sbox, 'dirs-only.tar.bz2')
expected_output = ["Upgraded '%s'\n" % (sbox.ospath('').rstrip(os.path.sep)),
"Upgraded '%s'\n" % (sbox.ospath('A'))]
svntest.actions.run_and_verify_svn(None, expected_output, [],
'upgrade', sbox.wc_dir)
expected_status = svntest.wc.State(sbox.wc_dir, {
'' : Item(status=' ', wc_rev='1'),
'A' : Item(status=' ', wc_rev='1'),
})
run_and_verify_status_no_server(sbox.wc_dir, expected_status)
def read_tree_conflict_data(sbox, path):
dot_svn = svntest.main.get_admin_name()
db = svntest.sqlite3.connect(os.path.join(sbox.wc_dir, dot_svn, 'wc.db'))
for row in db.execute("select tree_conflict_data from actual_node "
"where tree_conflict_data is not null "
"and local_relpath = '%s'" % path):
return
raise svntest.Failure("conflict expected for '%s'" % path)
def no_actual_node(sbox, path):
dot_svn = svntest.main.get_admin_name()
db = svntest.sqlite3.connect(os.path.join(sbox.wc_dir, dot_svn, 'wc.db'))
for row in db.execute("select 1 from actual_node "
"where local_relpath = '%s'" % path):
raise svntest.Failure("no actual node expected for '%s'" % path)
def upgrade_tree_conflict_data(sbox):
"upgrade tree conflict data (f20->f21)"
wc_dir = sbox.wc_dir
replace_sbox_with_tarfile(sbox, 'upgrade_tc.tar.bz2')
# Check and see if we can still read our tree conflicts
expected_status = svntest.actions.get_virginal_state(wc_dir, 2)
expected_status.tweak('A/D/G/pi', status='D ', treeconflict='C')
expected_status.tweak('A/D/G/tau', status='! ', treeconflict='C',
wc_rev=None)
expected_status.tweak('A/D/G/rho', status='A ', copied='+',
treeconflict='C', wc_rev='-')
# Look inside pre-upgrade database
read_tree_conflict_data(sbox, 'A/D/G')
no_actual_node(sbox, 'A/D/G/pi')
no_actual_node(sbox, 'A/D/G/rho')
no_actual_node(sbox, 'A/D/G/tau')
# While the upgrade from f20 to f21 will work the upgrade from f22
# to f23 will not, since working nodes are present, so the
# auto-upgrade will fail. If this happens we cannot use the
# Subversion libraries to query the working copy.
exit_code, output, errput = svntest.main.run_svn('format 22', 'st', wc_dir)
if not exit_code:
run_and_verify_status_no_server(wc_dir, expected_status)
else:
if not svntest.verify.RegexOutput('.*format 22 with WORKING nodes.*',
match_all=False).matches(errput):
raise svntest.Failure()
# Look insde post-upgrade database
read_tree_conflict_data(sbox, 'A/D/G/pi')
read_tree_conflict_data(sbox, 'A/D/G/rho')
read_tree_conflict_data(sbox, 'A/D/G/tau')
# no_actual_node(sbox, 'A/D/G') ### not removed but should be?
@Issue(3898)
def delete_in_copy_upgrade(sbox):
"upgrade a delete within a copy"
wc_dir = sbox.wc_dir
replace_sbox_with_tarfile(sbox, 'delete-in-copy.tar.bz2')
svntest.actions.run_and_verify_svn(None, None, [],
'upgrade', sbox.wc_dir)
expected_status = svntest.actions.get_virginal_state(sbox.wc_dir, 1)
expected_status.add({
'A/B-copied' : Item(status='A ', copied='+', wc_rev='-'),
'A/B-copied/lambda' : Item(status=' ', copied='+', wc_rev='-'),
'A/B-copied/E' : Item(status='D ', copied='+', wc_rev='-'),
'A/B-copied/E/alpha' : Item(status='D ', copied='+', wc_rev='-'),
'A/B-copied/E/beta' : Item(status='D ', copied='+', wc_rev='-'),
'A/B-copied/F' : Item(status=' ', copied='+', wc_rev='-'),
})
run_and_verify_status_no_server(sbox.wc_dir, expected_status)
svntest.actions.run_and_verify_svn(None, 'Reverted.*', [], 'revert', '-R',
sbox.ospath('A/B-copied/E'))
expected_status.tweak('A/B-copied/E',
'A/B-copied/E/alpha',
'A/B-copied/E/beta',
status=' ')
run_and_verify_status_no_server(sbox.wc_dir, expected_status)
simple_checksum_verify([[sbox.ospath('A/B-copied/E/alpha'),
'b347d1da69df9a6a70433ceeaa0d46c8483e8c03']])
def replaced_files(sbox):
"upgrade with base and working replaced files"
wc_dir = sbox.wc_dir
replace_sbox_with_tarfile(sbox, 'replaced-files.tar.bz2')
svntest.actions.run_and_verify_svn(None, None, [],
'upgrade', sbox.wc_dir)
# A is a checked-out dir containing A/f and A/g, then
# svn cp wc/A wc/B
# svn rm wc/A/f wc/B/f
# svn cp wc/A/g wc/A/f # A/f replaced by copied A/g
# svn cp wc/A/g wc/B/f # B/f replaced by copied A/g (working-only)
# svn rm wc/A/g wc/B/g
# touch wc/A/g wc/B/g
# svn add wc/A/g wc/B/g # A/g replaced, B/g replaced (working-only)
# svn ps pX vX wc/A/g
# svn ps pY vY wc/B/g
expected_status = svntest.wc.State(sbox.wc_dir,
{
'' : Item(status=' ', wc_rev='5'),
'A' : Item(status=' ', wc_rev='5'),
'A/f' : Item(status='R ', wc_rev='-', copied='+'),
'A/g' : Item(status='RM', wc_rev='5'),
'B' : Item(status='A ', wc_rev='-', copied='+'),
'B/f' : Item(status='R ', wc_rev='-', copied='+'),
'B/g' : Item(status='RM', wc_rev='-'),
})
run_and_verify_status_no_server(sbox.wc_dir, expected_status)
simple_property_verify(sbox.wc_dir, {
'A/f' : {'pAg' : 'vAg' },
'A/g' : {'pX' : 'vX' },
'B/f' : {'pAg' : 'vAg' },
'B/g' : {'pY' : 'vY' },
})
simple_checksum_verify([
[sbox.ospath('A/f'), '395dfb603d8a4e0348d0b082803f2b7426c76eb9'],
[sbox.ospath('A/g'), None],
[sbox.ospath('B/f'), '395dfb603d8a4e0348d0b082803f2b7426c76eb9'],
[sbox.ospath('B/g'), None]])
svntest.actions.run_and_verify_svn(None, 'Reverted.*', [], 'revert',
sbox.ospath('A/f'), sbox.ospath('B/f'),
sbox.ospath('A/g'), sbox.ospath('B/g'))
simple_property_verify(sbox.wc_dir, {
'A/f' : {'pAf' : 'vAf' },
'A/g' : {'pAg' : 'vAg' },
'B/f' : {'pAf' : 'vAf' },
'B/g' : {'pAg' : 'vAg' },
})
simple_checksum_verify([
[sbox.ospath('A/f'), '958eb2d755df2d9e0de6f7b835aec16b64d83f6f'],
[sbox.ospath('A/g'), '395dfb603d8a4e0348d0b082803f2b7426c76eb9'],
[sbox.ospath('B/f'), '958eb2d755df2d9e0de6f7b835aec16b64d83f6f'],
[sbox.ospath('B/g'), '395dfb603d8a4e0348d0b082803f2b7426c76eb9']])
def upgrade_with_scheduled_change(sbox):
"upgrade 1.6.x wc with a scheduled change"
replace_sbox_with_tarfile(sbox, 'upgrade_with_scheduled_change.tar.bz2')
svntest.actions.run_and_verify_svn(None, None, [],
'upgrade', sbox.wc_dir)
expected_status = svntest.actions.get_virginal_state(sbox.wc_dir, 1)
expected_status.add({
'A/scheduled_file_1' : Item(status='A ', wc_rev='-'),
})
run_and_verify_status_no_server(sbox.wc_dir, expected_status)
@Issue(3777)
def tree_replace1(sbox):
"upgrade 1.6 with tree replaced"
replace_sbox_with_tarfile(sbox, 'tree-replace1.tar.bz2')
svntest.actions.run_and_verify_svn(None, None, [], 'upgrade', sbox.wc_dir)
expected_status = svntest.wc.State(sbox.wc_dir,
{
'' : Item(status=' M', wc_rev=17),
'B' : Item(status='R ', copied='+', wc_rev='-'),
'B/f' : Item(status='R ', copied='+', wc_rev='-'),
'B/g' : Item(status='D ', wc_rev=17),
'B/h' : Item(status='A ', copied='+', wc_rev='-'),
'B/C' : Item(status='R ', copied='+', wc_rev='-'),
'B/C/f' : Item(status='R ', copied='+', wc_rev='-'),
'B/D' : Item(status='D ', wc_rev=17),
'B/D/f' : Item(status='D ', wc_rev=17),
'B/E' : Item(status='A ', copied='+', wc_rev='-'),
'B/E/f' : Item(status='A ', copied='+', wc_rev='-'),
})
run_and_verify_status_no_server(sbox.wc_dir, expected_status)
@Issue(3777)
def tree_replace2(sbox):
"upgrade 1.6 with tree replaced (2)"
replace_sbox_with_tarfile(sbox, 'tree-replace2.tar.bz2')
svntest.actions.run_and_verify_svn(None, None, [], 'upgrade', sbox.wc_dir)
expected_status = svntest.wc.State(sbox.wc_dir,
{
'' : Item(status=' M', wc_rev=12),
'B' : Item(status='R ', copied='+', wc_rev='-'),
'B/f' : Item(status='D ', wc_rev=12),
'B/D' : Item(status='D ', wc_rev=12),
'B/g' : Item(status='A ', copied='+', wc_rev='-'),
'B/E' : Item(status='A ', copied='+', wc_rev='-'),
'C' : Item(status='R ', copied='+', wc_rev='-'),
'C/f' : Item(status='A ', copied='+', wc_rev='-'),
'C/D' : Item(status='A ', copied='+', wc_rev='-'),
'C/g' : Item(status='D ', wc_rev=12),
'C/E' : Item(status='D ', wc_rev=12),
})
run_and_verify_status_no_server(sbox.wc_dir, expected_status)
def upgrade_from_format_28(sbox):
"""upgrade from format 28: rename pristines"""
# Start with a format-28 WC that is a clean checkout of the Greek tree.
replace_sbox_with_tarfile(sbox, 'format_28.tar.bz2')
# Get the old and new pristine file paths for file 'iota'.
checksum = '2c0aa9014a0cd07f01795a333d82485ef6d083e2'
old_pristine_path = os.path.join(sbox.wc_dir, svntest.main.get_admin_name(),
'pristine', checksum[0:2], checksum)
new_pristine_path = old_pristine_path + '.svn-base'
assert os.path.exists(old_pristine_path)
assert not os.path.exists(new_pristine_path)
# Touch the WC to auto-upgrade it
svntest.actions.run_and_verify_svn(None, None, [], 'info', sbox.wc_dir)
assert not os.path.exists(old_pristine_path)
assert os.path.exists(new_pristine_path)
@Issue(3901)
def depth_exclude(sbox):
"upgrade 1.6.x wc that has depth=exclude"
replace_sbox_with_tarfile(sbox, 'depth_exclude.tar.bz2')
svntest.actions.run_and_verify_svn(None, None, [], 'upgrade', sbox.wc_dir)
expected_status = svntest.wc.State(sbox.wc_dir,
{
'' : Item(status=' ', wc_rev='1'),
'A' : Item(status=' ', wc_rev='1'),
'X' : Item(status='A ', copied='+', wc_rev='-'),
})
run_and_verify_status_no_server(sbox.wc_dir, expected_status)
@Issue(3901)
def depth_exclude_2(sbox):
"1.6.x wc that has depth=exclude inside a delete"
replace_sbox_with_tarfile(sbox, 'depth_exclude_2.tar.bz2')
svntest.actions.run_and_verify_svn(None, None, [], 'upgrade', sbox.wc_dir)
expected_status = svntest.wc.State(sbox.wc_dir,
{
'' : Item(status=' ', wc_rev='1'),
'A' : Item(status='D ', wc_rev='1'),
})
run_and_verify_status_no_server(sbox.wc_dir, expected_status)
@Issue(3916)
def add_add_del_del_tc(sbox):
"wc with add-add and del-del tree conflicts"
replace_sbox_with_tarfile(sbox, 'add_add_del_del_tc.tar.bz2')
svntest.actions.run_and_verify_svn(None, None, [], 'upgrade', sbox.wc_dir)
expected_status = svntest.wc.State(sbox.wc_dir,
{
'' : Item(status=' ', wc_rev='4'),
'A' : Item(status=' ', wc_rev='4'),
'A/B' : Item(status='A ', treeconflict='C', copied='+', wc_rev='-'),
'X' : Item(status=' ', wc_rev='3'),
'X/Y' : Item(status='! ', treeconflict='C')
})
run_and_verify_status_no_server(sbox.wc_dir, expected_status)
@Issue(3916)
def add_add_x2(sbox):
"wc with 2 tree conflicts in same entry"
replace_sbox_with_tarfile(sbox, 'add_add_x2.tar.bz2')
svntest.actions.run_and_verify_svn(None, None, [], 'upgrade', sbox.wc_dir)
expected_status = svntest.wc.State(sbox.wc_dir,
{
'' : Item(status=' ', wc_rev='3'),
'A' : Item(status=' ', wc_rev='3'),
'A/X' : Item(status='A ', treeconflict='C', copied='+', wc_rev='-'),
'A/Y' : Item(status='A ', treeconflict='C', copied='+', wc_rev='-'),
})
run_and_verify_status_no_server(sbox.wc_dir, expected_status)
@Issue(3940)
def upgrade_with_missing_subdir(sbox):
"test upgrading a working copy with missing subdir"
sbox.build(create_wc = False)
replace_sbox_with_tarfile(sbox, 'basic_upgrade.tar.bz2')
simple_entries_replace(sbox.wc_dir,
'file:///Users/Hyrum/dev/test/greek-1.6.repo',
sbox.repo_url)
svntest.main.run_svnadmin('setuuid', sbox.repo_dir,
'cafefeed-babe-face-dead-beeff00dfade')
url = sbox.repo_url
wc_dir = sbox.wc_dir
# Attempt to use the working copy, this should give an error
expected_stderr = wc_is_too_old_regex
svntest.actions.run_and_verify_svn(None, None, expected_stderr,
'info', sbox.wc_dir)
# Now remove a subdirectory
svntest.main.safe_rmtree(sbox.ospath('A/B'))
# Now upgrade the working copy and expect a missing subdir
expected_output = svntest.verify.UnorderedOutput([
"Upgraded '%s'\n" % sbox.wc_dir,
"Upgraded '%s'\n" % sbox.ospath('A'),
"Skipped '%s'\n" % sbox.ospath('A/B'),
"Upgraded '%s'\n" % sbox.ospath('A/C'),
"Upgraded '%s'\n" % sbox.ospath('A/D'),
"Upgraded '%s'\n" % sbox.ospath('A/D/G'),
"Upgraded '%s'\n" % sbox.ospath('A/D/H'),
])
svntest.actions.run_and_verify_svn(None, expected_output, [],
'upgrade', sbox.wc_dir)
# And now perform an update. (This used to fail with an assertion)
expected_output = svntest.wc.State(wc_dir, {
'A/B' : Item(verb='Restored'),
'A/B/E' : Item(status='A '),
'A/B/E/alpha' : Item(status='A '),
'A/B/E/beta' : Item(status='A '),
'A/B/lambda' : Item(status='A '),
'A/B/F' : Item(status='A '),
})
expected_disk = svntest.main.greek_state.copy()
expected_status = svntest.actions.get_virginal_state(wc_dir, 1)
# Do the update and check the results in three ways.
svntest.actions.run_and_verify_update(wc_dir,
expected_output,
expected_disk,
expected_status)
@Issue(3994)
def upgrade_locked(sbox):
"upgrade working copy with locked files"
replace_sbox_with_tarfile(sbox, 'upgrade_locked.tar.bz2')
svntest.actions.run_and_verify_svn(None, None, [], 'upgrade', sbox.wc_dir)
expected_status = svntest.wc.State(sbox.wc_dir,
{
'' : Item(status=' ', wc_rev=1),
'A' : Item(status='D ', wc_rev=2),
'A/third' : Item(status='D ', writelocked='K', wc_rev=2),
'other' : Item(status='D ', writelocked='K', wc_rev=4),
'iota' : Item(status=' ', writelocked='K', wc_rev=3),
})
run_and_verify_status_no_server(sbox.wc_dir, expected_status)
@Issue(4015)
def upgrade_file_externals(sbox):
"upgrade with file externals"
sbox.build()
replace_sbox_with_tarfile(sbox, 'upgrade_file_externals.tar.bz2')
svntest.main.run_svnadmin('setuuid', sbox.repo_dir,
'07146bbd-0b64-4aaf-ab70-cd76a0df2d41')
expected_output = svntest.verify.RegexOutput('r2 committed.*')
svntest.actions.run_and_verify_svnmucc(None, expected_output, [],
'propset', 'svn:externals',
'^/A/B/E EX\n^/A/mu muX',
sbox.repo_url + '/A/B/F')
expected_output = svntest.verify.RegexOutput('r3 committed.*')
svntest.actions.run_and_verify_svnmucc(None, expected_output, [],
'propset', 'svn:externals',
'^/A/B/F FX\n^/A/B/lambda lambdaX',
sbox.repo_url + '/A/C')
expected_output = svntest.verify.RegexOutput('r4 committed.*')
svntest.actions.run_and_verify_svnmucc(None, expected_output, [],
'propset', 'pname1', 'pvalue1',
sbox.repo_url + '/A/mu',
'propset', 'pname2', 'pvalue2',
sbox.repo_url + '/A/B/lambda',
'propset', 'pname3', 'pvalue3',
sbox.repo_url + '/A/B/E/alpha')
svntest.actions.run_and_verify_svn(None, None, [], 'upgrade', sbox.wc_dir)
svntest.actions.run_and_verify_svn(None, None, [], 'relocate',
'file:///tmp/repo', sbox.repo_url,
sbox.wc_dir)
expected_output = svntest.wc.State(sbox.wc_dir, {
'A/mu' : Item(status=' U'),
'A/B/lambda' : Item(status=' U'),
'A/B/E/alpha' : Item(status=' U'),
'A/C/FX/EX/alpha' : Item(status=' U'),
'A/C/FX/muX' : Item(status=' U'),
'A/C/lambdaX' : Item(status=' U'),
'A/B/F/EX/alpha' : Item(status=' U'),
'A/B/F/muX' : Item(status=' U'),
})
svntest.actions.run_and_verify_update(sbox.wc_dir, expected_output,
None, None)
### simple_property_verify only sees last line of multi-line
### property values such as svn:externals
simple_property_verify(sbox.wc_dir, {
'A/mu' : {'pname1' : 'pvalue1' },
'A/B/lambda' : {'pname2' : 'pvalue2' },
'A/B/E/alpha' : {'pname3' : 'pvalue3' },
'A/B/F' : {'svn:externals' : '^/A/mu muX'},
'A/C' : {'svn:externals' : '^/A/B/lambda lambdaX'},
'A/B/F/muX' : {'pname1' : 'pvalue1' },
'A/C/lambdaX' : {'pname2' : 'pvalue2' },
})
simple_property_verify(sbox.ospath('A/C/FX'), {
'' : {'svn:externals' : '^/A/mu muX'},
'muX' : {'pname1' : 'pvalue1' },
})
simple_property_verify(sbox.ospath('A/C/FX/EX'), {
'alpha' : {'pname3' : 'pvalue3' },
})
@Issue(4035)
def upgrade_missing_replaced(sbox):
"upgrade with missing replaced dir"
sbox.build(create_wc=False)
replace_sbox_with_tarfile(sbox, 'upgrade_missing_replaced.tar.bz2')
svntest.actions.run_and_verify_svn(None, None, [], 'upgrade', sbox.wc_dir)
svntest.main.run_svnadmin('setuuid', sbox.repo_dir,
'd7130b12-92f6-45c9-9217-b9f0472c3fab')
svntest.actions.run_and_verify_svn(None, None, [], 'relocate',
'file:///tmp/repo', sbox.repo_url,
sbox.wc_dir)
expected_output = svntest.wc.State(sbox.wc_dir, {
'A/B/E' : Item(status=' ', treeconflict='C'),
'A/B/E/alpha' : Item(status=' ', treeconflict='A'),
'A/B/E/beta' : Item(status=' ', treeconflict='A'),
})
expected_status = svntest.actions.get_virginal_state(sbox.wc_dir, 1)
expected_status.tweak('A/B/E', status='! ', treeconflict='C', wc_rev='-')
expected_status.tweak('A/B/E/alpha', 'A/B/E/beta', status='D ')
svntest.actions.run_and_verify_update(sbox.wc_dir, expected_output,
None, expected_status)
svntest.actions.run_and_verify_svn(None, 'Reverted.*', [], 'revert', '-R',
sbox.wc_dir)
expected_status = svntest.actions.get_virginal_state(sbox.wc_dir, 1)
svntest.actions.run_and_verify_status(sbox.wc_dir, expected_status)
@Issue(4033)
def upgrade_not_present_replaced(sbox):
"upgrade with not-present replaced nodes"
sbox.build(create_wc=False)
replace_sbox_with_tarfile(sbox, 'upgrade_not_present_replaced.tar.bz2')
svntest.actions.run_and_verify_svn(None, None, [], 'upgrade', sbox.wc_dir)
svntest.main.run_svnadmin('setuuid', sbox.repo_dir,
'd7130b12-92f6-45c9-9217-b9f0472c3fab')
svntest.actions.run_and_verify_svn(None, None, [], 'relocate',
'file:///tmp/repo', sbox.repo_url,
sbox.wc_dir)
expected_output = svntest.wc.State(sbox.wc_dir, {
'A/B/E' : Item(status='E '),
'A/B/E/alpha' : Item(status='A '),
'A/B/E/beta' : Item(status='A '),
'A/B/lambda' : Item(status='E '),
})
expected_status = svntest.actions.get_virginal_state(sbox.wc_dir, 1)
svntest.actions.run_and_verify_update(sbox.wc_dir, expected_output,
None, expected_status)
########################################################################
# Run the tests
# prop states
#
# .base simple checkout
# .base, .revert delete, copy-here
# .working add, propset
# .base, .working checkout, propset
# .base, .revert, .working delete, copy-here, propset
# .revert, .working delete, add, propset
# .revert delete, add
#
# 1.3.x (f4)
# 1.4.0 (f8, buggy)
# 1.4.6 (f8, fixed)
# list all tests here, starting with None:
test_list = [ None,
basic_upgrade,
upgrade_with_externals,
upgrade_1_5,
update_1_5,
logs_left_1_5,
upgrade_wcprops,
basic_upgrade_1_0,
# Upgrading from 1.4.0-1.4.5 with specific states fails
# See issue #2530
x3_1_4_0,
x3_1_4_6,
x3_1_6_12,
missing_dirs,
missing_dirs2,
delete_and_keep_local,
dirs_only_upgrade,
upgrade_tree_conflict_data,
delete_in_copy_upgrade,
replaced_files,
upgrade_with_scheduled_change,
tree_replace1,
tree_replace2,
upgrade_from_format_28,
depth_exclude,
depth_exclude_2,
add_add_del_del_tc,
add_add_x2,
upgrade_with_missing_subdir,
upgrade_locked,
upgrade_file_externals,
upgrade_missing_replaced,
upgrade_not_present_replaced,
]
if __name__ == '__main__':
svntest.main.run_tests(test_list)
# NOTREACHED
```
#### File: tests/cmdline/wc_tests.py
```python
import shutil, stat, re, os, logging
logger = logging.getLogger()
# Our testing module
import svntest
from svntest import wc
# (abbreviation)
Skip = svntest.testcase.Skip_deco
SkipUnless = svntest.testcase.SkipUnless_deco
XFail = svntest.testcase.XFail_deco
Issues = svntest.testcase.Issues_deco
Issue = svntest.testcase.Issue_deco
Wimp = svntest.testcase.Wimp_deco
Item = wc.StateItem
######################################################################
# Tests
#
# Each test must return on success or raise on failure.
@XFail()
@Issue(4193)
@SkipUnless(svntest.main.is_posix_os)
def status_through_unversioned_symlink(sbox):
"""file status through unversioned symlink"""
sbox.build(read_only = True)
state = svntest.actions.get_virginal_state(sbox.wc_dir, 1)
os.symlink('A', sbox.ospath('Z'))
svntest.actions.run_and_verify_status(sbox.ospath('Z/mu'), state)
@XFail()
@Issue(4193)
@SkipUnless(svntest.main.is_posix_os)
def status_through_versioned_symlink(sbox):
"""file status through versioned symlink"""
sbox.build(read_only = True)
state = svntest.actions.get_virginal_state(sbox.wc_dir, 1)
os.symlink('A', sbox.ospath('Z'))
sbox.simple_add('Z')
state.add({'Z': Item(status='A ')})
svntest.actions.run_and_verify_status(sbox.ospath('Z/mu'), state)
@XFail()
@Issue(4193)
@SkipUnless(svntest.main.is_posix_os)
def status_with_symlink_in_path(sbox):
"""file status with not-parent symlink"""
sbox.build(read_only = True)
state = svntest.actions.get_virginal_state(sbox.wc_dir, 1)
os.symlink('A', sbox.ospath('Z'))
svntest.actions.run_and_verify_status(sbox.ospath('Z/B/lambda'), state)
@XFail()
@Issue(4193)
@SkipUnless(svntest.main.is_posix_os)
def add_through_unversioned_symlink(sbox):
"""add file through unversioned symlink"""
sbox.build(read_only = True)
os.symlink('A', sbox.ospath('Z'))
sbox.simple_append('A/kappa', 'xyz', True)
sbox.simple_add('Z/kappa')
@XFail()
@Issue(4193)
@SkipUnless(svntest.main.is_posix_os)
def add_through_versioned_symlink(sbox):
"""add file through versioned symlink"""
sbox.build(read_only = True)
os.symlink('A', sbox.ospath('Z'))
sbox.simple_add('Z')
sbox.simple_append('A/kappa', 'xyz', True)
sbox.simple_add('Z/kappa')
@XFail()
@Issue(4193)
@SkipUnless(svntest.main.is_posix_os)
def add_with_symlink_in_path(sbox):
"""add file with not-parent symlink"""
sbox.build(read_only = True)
os.symlink('A', sbox.ospath('Z'))
sbox.simple_append('A/B/kappa', 'xyz', True)
sbox.simple_add('Z/B/kappa')
########################################################################
# Run the tests
# list all tests here, starting with None:
test_list = [ None,
status_through_unversioned_symlink,
status_through_versioned_symlink,
status_with_symlink_in_path,
add_through_unversioned_symlink,
add_through_versioned_symlink,
add_with_symlink_in_path,
]
if __name__ == '__main__':
svntest.main.run_tests(test_list)
# NOTREACHED
### End of file.
```
#### File: server-side/svnpubsub/commit-hook.py
```python
SVNLOOK="/usr/local/svn-install/current/bin/svnlook"
#SVNLOOK="/usr/local/bin/svnlook"
import sys
import subprocess
try:
import simplejson as json
except ImportError:
import json
import urllib2
HOST="127.0.0.1"
PORT=2069
def svncmd(cmd):
return subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE)
def svncmd_uuid(repo):
cmd = "%s uuid %s" % (SVNLOOK, repo)
p = svncmd(cmd)
return p.stdout.read().strip()
def svncmd_info(repo, revision):
cmd = "%s info -r %s %s" % (SVNLOOK, revision, repo)
p = svncmd(cmd)
data = p.stdout.read().strip().split("\n")
#print data
return {'author': data[0],
'date': data[1],
'log': "".join(data[3:])}
def svncmd_dirs(repo, revision):
cmd = "%s dirs-changed -r %s %s" % (SVNLOOK, revision, repo)
p = svncmd(cmd)
dirs = []
while True:
line = p.stdout.readline()
if not line:
break
dirs.append(line.strip())
return dirs
def do_put(body):
opener = urllib2.build_opener(urllib2.HTTPHandler)
request = urllib2.Request("http://%s:%d/dirs-changed" %(HOST, PORT), data=body)
request.add_header('Content-Type', 'application/json')
request.get_method = lambda: 'PUT'
url = opener.open(request)
def main(repo, revision):
i = svncmd_info(repo, revision)
data = {'revision': int(revision),
'dirs_changed': [],
'repos': svncmd_uuid(repo),
'author': i['author'],
'log': i['log'],
'date': i['date'],
}
data['dirs_changed'].extend(svncmd_dirs(repo, revision))
body = json.dumps(data)
#print body
do_put(body)
if __name__ == "__main__":
if len(sys.argv) != 3:
print "invalid args"
sys.exit(0)
main(sys.argv[1], sys.argv[2])
```
#### File: svnpubsub/svnpubsub/client.py
```python
import asyncore
import asynchat
import socket
import functools
import time
import xml.sax
# How long the polling loop should wait for activity before returning.
TIMEOUT = 30.0
# Always delay a bit when trying to reconnect. This is not precise, but sets
# a minimum amount of delay. At the moment, there is no further backoff.
RECONNECT_DELAY = 25.0
# If we don't see anything from the server for this amount time, then we
# will drop and reconnect. The TCP connection may have gone down without
# us noticing it somehow.
STALE_DELAY = 60.0
class Client(asynchat.async_chat):
def __init__(self, host, port, commit_callback, event_callback):
asynchat.async_chat.__init__(self)
self.last_activity = time.time()
self.host = host
self.port = port
self.event_callback = event_callback
handler = XMLStreamHandler(commit_callback, event_callback)
self.parser = xml.sax.make_parser(['xml.sax.expatreader'])
self.parser.setContentHandler(handler)
# Wait for the end of headers. Then we start parsing XML.
self.set_terminator('\r\n\r\n')
self.skipping_headers = True
self.create_socket(socket.AF_INET, socket.SOCK_STREAM)
try:
self.connect((host, port))
except:
self.handle_error()
return
### should we allow for repository restrictions?
self.push('GET /commits/xml HTTP/1.0\r\n\r\n')
def handle_connect(self):
self.event_callback('connected')
def handle_close(self):
self.event_callback('closed')
self.close()
def handle_error(self):
self.event_callback('error')
self.close()
def found_terminator(self):
self.skipping_headers = False
# From here on, collect everything. Never look for a terminator.
self.set_terminator(None)
def collect_incoming_data(self, data):
# Remember the last time we saw activity
self.last_activity = time.time()
if not self.skipping_headers:
# Just shove this into the XML parser. As the elements are processed,
# we'll collect them into an appropriate structure, and then invoke
# the callback when we have fully received a commit.
self.parser.feed(data)
class XMLStreamHandler(xml.sax.handler.ContentHandler):
def __init__(self, commit_callback, event_callback):
self.commit_callback = commit_callback
self.event_callback = event_callback
self.rev = None
self.chars = ''
def startElement(self, name, attrs):
if name == 'commit':
self.rev = Revision(attrs['repository'], int(attrs['revision']))
# No other elements to worry about.
def characters(self, data):
self.chars += data
def endElement(self, name):
if name == 'commit':
self.commit_callback(self.rev)
self.rev = None
elif name == 'stillalive':
self.event_callback('ping')
elif self.chars and self.rev:
value = self.chars.strip()
if name == 'path':
self.rev.dirs_changed.append(value)
elif name == 'author':
self.rev.author = value
elif name == 'date':
self.rev.date = value
elif name == 'log':
self.rev.log = value
# Toss out any accumulated characters for this element.
self.chars = ''
class Revision(object):
def __init__(self, uuid, rev):
self.uuid = uuid
self.rev = rev
self.dirs_changed = [ ]
self.author = None
self.date = None
self.log = None
class MultiClient(object):
def __init__(self, hostports, commit_callback, event_callback):
self.commit_callback = commit_callback
self.event_callback = event_callback
# No target time, as no work to do
self.target_time = 0
self.work_items = [ ]
for host, port in hostports:
self._add_channel(host, port)
def _reconnect(self, host, port, event_name):
if event_name == 'closed' or event_name == 'error':
# Stupid connection closed for some reason. Set up a reconnect. Note
# that it should have been removed from asyncore.socket_map already.
self._reconnect_later(host, port)
# Call the user's callback now.
self.event_callback(host, port, event_name)
def _reconnect_later(self, host, port):
# Set up a work item to reconnect in a little while.
self.work_items.append((host, port))
# Only set a target if one has not been set yet. Otherwise, we could
# create a race condition of continually moving out towards the future
if not self.target_time:
self.target_time = time.time() + RECONNECT_DELAY
def _add_channel(self, host, port):
# Simply instantiating the client will install it into the global map
# for processing in the main event loop.
Client(host, port,
functools.partial(self.commit_callback, host, port),
functools.partial(self._reconnect, host, port))
def _check_stale(self):
now = time.time()
for client in asyncore.socket_map.values():
if client.last_activity + STALE_DELAY < now:
# Whoops. No activity in a while. Signal this fact, Close the
# Client, then have it reconnected later on.
self.event_callback(client.host, client.port, 'stale')
# This should remove it from .socket_map.
client.close()
self._reconnect_later(client.host, client.port)
def _maybe_work(self):
# If we haven't reach the targetted time, or have no work to do,
# then fast-path exit
if time.time() < self.target_time or not self.work_items:
return
# We'll take care of all the work items, so no target for future work
self.target_time = 0
# Play a little dance just in case work gets added while we're
# currently working on stuff
work = self.work_items
self.work_items = [ ]
for host, port in work:
self._add_channel(host, port)
def run_forever(self):
while True:
if asyncore.socket_map:
asyncore.loop(timeout=TIMEOUT, count=1)
else:
time.sleep(TIMEOUT)
self._check_stale()
self._maybe_work()
```
#### File: svnpubsub/svnpubsub/server.py
```python
try:
import simplejson as json
except ImportError:
import json
import sys
import twisted
from twisted.internet import reactor
from twisted.internet import defer
from twisted.web import server, static
from twisted.web import resource
from twisted.python import log
try:
from xml.etree import cElementTree as ET
except:
from xml.etree import ElementTree as ET
import time
class Revision:
def __init__(self, r):
self.rev = r.get('revision')
self.repos = r.get('repos')
self.dirs_changed = [x.encode('unicode_escape') for x in r.get('dirs_changed')]
self.author = r.get('author').encode('unicode_escape')
self.log = r.get('log').encode('unicode_escape')
self.date = r.get('date').encode('unicode_escape')
def render_commit(self, format):
if format == "json":
return json.dumps({'commit': {'repository': self.repos,
'revision': self.rev,
'dirs_changed': self.dirs_changed,
'author': self.author,
'log': self.log,
'date': self.date}}) +","
elif format == "xml":
c = ET.Element('commit', {'repository': self.repos, 'revision': "%d" % (self.rev)})
ET.SubElement(c, 'author').text = self.author
ET.SubElement(c, 'date').text = self.date
ET.SubElement(c, 'log').text = self.log
d = ET.SubElement(c, 'dirs_changed')
for p in self.dirs_changed:
x = ET.SubElement(d, 'path')
x.text = p
str = ET.tostring(c, 'UTF-8') + "\n"
return str[39:]
else:
raise Exception("Ooops, invalid format")
def render_dirs_changed(self, format):
if format == "json":
return json.dumps({'commit': {'repository': self.repos,
'revision': self.rev,
'dirs_changed': self.dirs_changed}}) +","
elif format == "xml":
c = ET.Element('commit', {'repository': self.repos, 'revision': "%d" % (self.rev)})
d = ET.SubElement(c, 'dirs_changed')
for p in self.dirs_changed:
x = ET.SubElement(d, 'path')
x.text = p
str = ET.tostring(c, 'UTF-8') + "\n"
return str[39:]
else:
raise Exception("Ooops, invalid format")
HEARTBEAT_TIME = 15
class Client(object):
def __init__(self, pubsub, r, repos, fmt):
self.pubsub = pubsub
r.notifyFinish().addErrback(self.finished)
self.r = r
self.format = fmt
self.repos = repos
self.alive = True
log.msg("OPEN: %s:%d (%d clients online)"% (r.getClientIP(), r.client.port, pubsub.cc()+1))
def finished(self, reason):
self.alive = False
log.msg("CLOSE: %s:%d (%d clients online)"% (self.r.getClientIP(), self.r.client.port, self.pubsub.cc()))
try:
self.pubsub.remove(self)
except ValueError:
pass
def interested_in(self, uuid):
if self.repos is None:
return True
if uuid == self.repos:
return True
return False
def notify(self, data):
self.write(data)
def start(self):
self.write_start()
reactor.callLater(HEARTBEAT_TIME, self.heartbeat, None)
def heartbeat(self, args):
if self.alive:
self.write_heartbeat()
reactor.callLater(HEARTBEAT_TIME, self.heartbeat, None)
def write_data(self, data):
self.write(data[self.format] + "\n")
""" "Data must not be unicode" is what the interfaces.ITransport says... grr. """
def write(self, input):
self.r.write(str(input))
class JSONClient(Client):
def write_start(self):
self.r.setHeader('content-type', 'application/json')
self.write('{"commits": [\n')
def write_heartbeat(self):
self.write(json.dumps({"stillalive": time.time()}) + ",\n")
class XMLClient(Client):
def write_start(self):
self.r.setHeader('content-type', 'application/xml')
self.write("<?xml version='1.0' encoding='UTF-8'?>\n<commits>")
def write_heartbeat(self):
self.write("<stillalive>%f</stillalive>\n" % (time.time()))
class SvnPubSub(resource.Resource):
isLeaf = True
clients = {'commits': [],
'dirs-changed': []}
def cc(self):
return reduce(lambda x,y: len(x)+len(y), self.clients.values())
def remove(self, c):
for k in self.clients.keys():
self.clients[k].remove(c)
def render_GET(self, request):
log.msg("REQUEST: %s" % (request.uri))
uri = request.uri.split('/')
request.setHeader('content-type', 'text/plain')
if len(uri) != 3 and len(uri) != 4:
request.setResponseCode(400)
return "Invalid path\n"
uuid = None
fmt = None
type = uri[1]
if len(uri) == 3:
fmt = uri[2]
else:
fmt = uri[3]
uuid = uri[2]
if type not in self.clients.keys():
request.setResponseCode(400)
return "Invalid Reuqest Type\n"
clients = {'json': JSONClient, 'xml': XMLClient}
clientCls = clients.get(fmt)
if clientCls == None:
request.setResponseCode(400)
return "Invalid Format Requested\n"
c = clientCls(self, request, uuid, fmt)
self.clients[type].append(c)
c.start()
return twisted.web.server.NOT_DONE_YET
def notifyAll(self, rev):
data = {'commits': {},
'dirs-changed': {}}
for x in ['xml', 'json']:
data['commits'][x] = rev.render_commit(x)
data['dirs-changed'][x] = rev.render_dirs_changed(x)
log.msg("COMMIT: r%d in %d paths (%d clients)" % (rev.rev,
len(rev.dirs_changed),
self.cc()))
for k in self.clients.keys():
for c in self.clients[k]:
if c.interested_in(rev.repos):
c.write_data(data[k])
def render_PUT(self, request):
request.setHeader('content-type', 'text/plain')
ip = request.getClientIP()
if ip != "127.0.0.1":
request.setResponseCode(401)
return "Access Denied"
input = request.content.read()
#import pdb;pdb.set_trace()
#print "input: %s" % (input)
r = json.loads(input)
rev = Revision(r)
self.notifyAll(rev)
return "Ok"
def svnpubsub_server():
root = static.File("/dev/null")
s = SvnPubSub()
root.putChild("dirs-changed", s)
root.putChild("commits", s)
root.putChild("commit", s)
return server.Site(root)
if __name__ == "__main__":
log.startLogging(sys.stdout)
# Port 2069 "HTTP Event Port", whatever, sounds good to me
reactor.listenTCP(2069, svnpubsub_server())
reactor.run()
``` |
{
"source": "jmckenna/ZOO-Project",
"score": 2
} |
#### File: echo-py/cgi-env/echo_service.py
```python
import zoo
import sys
import osgeo.ogr
import osgeo.ogr as ogr
def echo(conf,inputs,outputs):
if "a" in inputs:
outputs["a"]["value"]=inputs["a"]["value"]
if "mimeType" in inputs["a"]:
outputs["a"]["mimeType"]=inputs["a"]["mimeType"]
if "mediaType" in inputs["a"]:
outputs["a"]["mediaType"]=inputs["a"]["mediaType"]
if "encoding" in inputs["a"]:
outputs["a"]["encoding"]=inputs["a"]["encoding"]
else:
outputs["a"]["value"]="Empty"
if "b" in inputs:
if "cache_file" in inputs["b"]:
ds = osgeo.ogr.Open(inputs["b"]["cache_file"])
path=""
if outputs["b"]["mimeType"]=="application/json":
path=conf["main"]["tmpPath"]+"/result-"+conf["lenv"]["usid"]+".json"
out_ds = ogr.GetDriverByName("GeoJSON").CopyDataSource(ds, conf["main"]["tmpPath"]+"/result-"+conf["lenv"]["usid"]+".json")
else:
path=conf["main"]["tmpPath"]+"/result-"+conf["lenv"]["usid"]+".xml"
out_ds = ogr.GetDriverByName("GML").CopyDataSource(ds, conf["main"]["tmpPath"]+"/result-"+conf["lenv"]["usid"]+".xml")
else:
try:
import json
lobj=json.loads(inputs["b"]["value"])
path=conf["main"]["tmpPath"]+"/result-"+conf["lenv"]["usid"]+".json"
f = open(path, "w+")
json.dump(lobj,f)
outputs["b"]["mimeType"]="application/json"
except Exception as e:
print("*******",file=sys.stderr)
print(e,file=sys.stderr)
print("*******",file=sys.stderr)
path=conf["main"]["tmpPath"]+"/result-"+conf["lenv"]["usid"]+".xml"
f = open(path, "w+")
f.write(inputs["b"]["value"])
f.close()
outputs["b"]["mimeType"]="text/xml"
outputs["b"]["generated_file"]=path
else:
outputs["b"]["value"]="Empty"
if "c" in inputs:
outputs["c"]["value"]=inputs["c"]["value"]
else:
outputs["c"]["value"]="Empty"
if "pause" in inputs:
import time
nb_sleep=inputs["pause"]["value"]
for i in range(4):
conf["lenv"]["message"]="Sleeping for "+inputs["pause"]["value"]+" seconds ("+str(i)+"/4)"
zoo.update_status(conf,25*(i+1))
time.sleep((float(inputs["pause"]["value"])*25)/100)
return zoo.SERVICE_SUCCEEDED
```
#### File: openoffice/cgi-env/Exporter.py
```python
import uno
import getopt, sys
from unohelper import Base, systemPathToFileUrl, absolutize
from com.sun.star.beans import PropertyValue
from com.sun.star.script import CannotConvertException
from com.sun.star.lang import IllegalArgumentException
from com.sun.star.task import ErrorCodeIOException
from com.sun.star.io import IOException, XOutputStream
class OutputStream( Base, XOutputStream ):
def __init__( self ):
self.closed = 0
def closeOutput(self):
self.closed = 1
def writeBytes( self, seq ):
sys.stdout.write( seq.value )
def flush( self ):
pass
def OdtConverter(conf,inputs,outputs):
# get the uno component context from the PyUNO runtime
localContext = uno.getComponentContext()
# create the UnoUrlResolver
# on a single line
resolver = localContext.ServiceManager.createInstanceWithContext ("com.sun.star.bridge.UnoUrlResolver", localContext )
# connect to the running office
ctx = resolver.resolve( conf["oo"]["server"].replace("::","=")+";urp;StarOffice.ComponentContext" )
smgr = ctx.ServiceManager
# get the central desktop object
desktop = smgr.createInstanceWithContext( "com.sun.star.frame.Desktop",ctx)
# get the file name
adressDoc=systemPathToFileUrl(conf["main"]["dataPath"]+"/"+inputs["InputDoc"]["value"])
propFich=PropertyValue("Hidden", 0, True, 0),
myDocument=0
try:
myDocument = desktop.loadComponentFromURL(adressDoc,"_blank",0,propFich)
except CannotConvertException, e:
print >> sys.stderr, 'Impossible de convertir le fichier pour les raisons suivantes : \n'
print >> sys.stderr, e
sys.exit(0)
except IllegalArgumentException, e:
print >> sys.stderr, 'Impossible de convertir le fichier pour les raisons suivantes : \n'
print >> sys.stderr, e
sys.exit(0)
outputDoc=systemPathToFileUrl(conf["main"]["tmpPath"]+"/"+inputs["OutputDoc"]["value"])
tmp=inputs["OutputDoc"]["value"].split('.');
outputFormat={"pdf": "writer_pdf_Export", "html": "HTML (StarWriter)","odt": "writer8","doc": "MS Word 97","rtf": "Rich Text Format"}
for i in range(len(outputFormat)) :
if tmp[1]==outputFormat.keys()[i] :
filterName=outputFormat[tmp[1]]
prop1Fich = (
PropertyValue( "FilterName" , 0, filterName , 0 ),
PropertyValue( "Overwrite" , 0, True , 0 )
)
break
myDocument.storeToURL(outputDoc,prop1Fich)
myDocument.close(True)
ctx.ServiceManager
outputs["OutputedDocument"]={"value": inputs["OutputDoc"]["value"],"dataType": "string"}
return 3
```
#### File: open-api/server/subscriber.py
```python
from sys import stdout, stdin
import sys
import threading
import redis
import json
import os
mThreads=[]
r=None
if "ZOO_REDIS_HOST" in os.environ:
r = redis.Redis(host=os.environ["ZOO_REDIS_HOST"], port=6379, db=0)
else:
r = redis.Redis(host='redis', port=6379, db=0)
def send(t):
# send string to web page
stdout.write(t+'\n')
stdout.flush()
def listenMessages(jobID=None):
global r
p = r.pubsub()
p.subscribe(jobID)
hasSend=False
for raw_message in p.listen():
try:
send(str(raw_message["data"],'utf-8'))
hasSend=True
try:
tmp=json.loads(str(raw_message["data"],'utf-8'))
if tmp is not None and "outputs" in tmp:
sys.exit()
except Exception as e:
print(str(e))
return
except:
if not(hasSend):
send(str(raw_message["data"]))
def receive():
global n
global mThreads
while True:
t = stdin.readline().strip()
if not t:
break
t1 = t.split(" ")
if t1[0]=="SUB":
mThreads += [threading.Thread(target=listenMessages,kwargs={"jobID":t1[1]})]
mThreads[len(mThreads)-1].start()
else:
send(t)
t0 = threading.Thread(target=receive)
t0.start()
t0.join()
#for i in range(len(mThreads)):
# mThreads[i].join()
```
#### File: status/cgi-env/service.py
```python
def demo(conf,inputs,outputs):
import zoo,time
i=0
while i < 100:
conf["lenv"]["message"]="Step "+str(i)
zoo.update_status(conf,i)
time.sleep(0.5)
i+=1
conf["lenv"]["message"]=zoo._("Error executing the service")
return zoo.SERVICE_FAILED
def demo1(conf,inputs,outputs):
conf["lenv"]["message"]=zoo._("Error executing the service")
return zoo.SERVICE_FAILED
``` |
{
"source": "jmcker/monica2neo4j",
"score": 3
} |
#### File: monica2neo4j/monica_client/client.py
```python
import urllib.parse
import requests
DEFAULT_URL = 'https://app.monicahq.com/api'
DEFAULT_VERSION = '1.0'
class MonicaApiError(Exception):
pass
class MonicaApiClient():
def __init__(self, token, base_url = DEFAULT_URL, version = DEFAULT_VERSION):
self.token = token
self.base_url = base_url
self.version = version
try:
self.me()
except Exception as e:
raise MonicaApiError('Could not access the Monica API. Is your login correct?', e)
def headers(self):
return {
'Content-Type': 'application/json',
'Authorization': f'Bearer {self.token}'
}
def process_json(self, resp):
if (len(resp.history) > 0 and 'api' not in resp.url):
raise MonicaApiError('Monica redirected away from /api. Login information may be invalid')
content_type = resp.headers.get('Content-Type', '')
if ('json' not in content_type):
raise MonicaApiError(f'Endpoint did not return JSON (got {content_type})')
try:
resp_json = resp.json()
except Exception as e:
raise MonicaApiError(f'JSON parse failed: {e}', e)
if ('error' in resp_json):
e = resp_json['error']
code = e['error_code']
msg = e['message']
raise MonicaApiError(f'API error code {code}: {msg}')
return resp_json
def paged_resp(self, next_page_callback):
page = 1
total_pages = 1
while (page <= total_pages):
page_resp = next_page_callback(page)
if ('meta' not in page_resp or 'last_page' not in page_resp['meta']):
raise MonicaApiError('Paged result did not contain paging meta information')
yield page_resp
total_pages = page_resp['meta']['last_page']
page += 1
def get(self, endpoint, **kwargs):
url = urllib.parse.urljoin(self.base_url, endpoint)
param_str = urllib.parse.urlencode(kwargs)
print(f'GET {url}?{param_str}')
try:
resp = requests.get(url, headers=self.headers(), params=kwargs)
except Exception as e:
raise MonicaApiError(f'Request failed: {e}', e)
return self.process_json(resp)
def me(self):
return self.get('me')
def contacts(self, use_iter=True):
if (use_iter):
return self.contacts_iter()
return list(self.contacts_iter())
def tags(self, use_iter=True):
if (use_iter):
return self.tags_iter()
return list(self.tags_iter())
def contacts_iter(self):
next_page_callback = lambda page_num: self.get('contacts', page=page_num)
for page in self.paged_resp(next_page_callback):
yield from page['data']
def tags_iter(self):
next_page_callback = lambda page_num: self.get('tags', page=page_num)
for page in self.paged_resp(next_page_callback):
yield from page['data']
``` |
{
"source": "jmcker/OSC-Control---ETC-Eos",
"score": 3
} |
#### File: jmcker/OSC-Control---ETC-Eos/connect.py
```python
import sys
from OSC import OSCClient, OSCMessage
def main(args):
# Default host and port
HOST = '192.168.1.8'
PORT = 3032
if len(args) == 2:
HOST = args[0]
PORT = args[1]
elif len(args) == 1:
HOST = args[0]
elif len(args) > 2:
errorPrint("OSC Go button accepts at most 2 arguments.\n" + len(args) + " were provided.");
client = OSCClient()
client.connect((HOST, PORT))
client.close();
print
print "Connected"
print
exit();
def errorPrint(message, standardErr = ""):
print "\n\n"
print message
if standardErr:
print "Error message: ", standardErr
print
print "Press ENTER to exit..."
raw_input()
exit()
if __name__ == "__main__":
main(sys.argv[1:])
``` |
{
"source": "jmcker/qtnodes",
"score": 3
} |
#### File: qtnodes/qtnodes/node.py
```python
import uuid
from .qtchooser import QtCore, QtGui, QtWidgets
from .helpers import getTextSize
from .knob import Knob, InputKnob, OutputKnob
from .exceptions import DuplicateKnobNameError
class Node(QtWidgets.QGraphicsItem):
"""A Node is a container for a header and 0-n Knobs.
It can be created, removed and modified by the user in the UI.
"""
def __init__(self, scene = None, **kwargs):
super(Node, self).__init__(**kwargs)
if (scene):
scene.addItem(self)
# This unique id is useful for serialization/reconstruction.
self.uuid = str(uuid.uuid4())
self.header = None
self.x = 0
self.y = 0
self.w = 10
self.h = 10
self.margin = 6
self.roundness = 0
self.fillColor = QtGui.QColor(220, 220, 220)
# General configuration.
self.setFlag(QtWidgets.QGraphicsItem.ItemIsSelectable)
self.setFlag(QtWidgets.QGraphicsItem.ItemIsMovable)
self.setCursor(QtCore.Qt.SizeAllCursor)
self.setAcceptHoverEvents(True)
self.setAcceptTouchEvents(True)
self.setAcceptDrops(True)
def knobs(self, cls=None):
"""Return a list of childItems that are Knob objects.
If the optional `cls` is specified, return only Knobs of that class.
This is useful e.g. to get all InputKnobs or OutputKnobs.
"""
knobs = []
for child in self.childItems():
if isinstance(child, Knob):
knobs.append(child)
if cls:
knobs = filter(knobs, lambda k: k.__class__ is cls)
return knobs
def knob(self, name):
"""Return matching Knob by its name, None otherwise."""
for knob in self.knobs():
if knob.name == name:
return knob
return None
def boundingRect(self):
"""Return the bounding box of the Node, limited in height to its Header.
This is so that the drag & drop sensitive area for the Node is only
active when hovering its Header, as otherwise there would be conflicts
with the hover events for the Node's Knobs.
"""
rect = QtCore.QRectF(self.x,
self.y,
self.w,
self.header.h)
return rect
def updateSizeForChildren(self):
"""Adjust width and height as needed for header and knobs."""
def adjustHeight():
"""Adjust height to fit header and all knobs."""
knobs = [c for c in self.childItems() if isinstance(c, Knob)]
knobsHeight = sum([k.h + self.margin for k in knobs])
heightNeeded = self.header.h + knobsHeight + self.margin
self.h = heightNeeded
def adjustWidth():
"""Adjust width as needed for the widest child item."""
headerWidth = (self.margin + getTextSize(self.header.text).width())
knobs = [c for c in self.childItems() if isinstance(c, Knob)]
knobWidths = [k.w + self.margin + getTextSize(k.displayName).width()
for k in knobs]
maxWidth = max([headerWidth] + knobWidths)
self.w = maxWidth + self.margin
adjustWidth()
adjustHeight()
def addHeader(self, header):
"""Assign the given header and adjust the Node's size for it."""
self.header = header
header.setPos(self.pos())
header.setParentItem(self)
self.updateSizeForChildren()
def addKnob(self, knob):
"""Add the given Knob to this Node.
A Knob must have a unique name, meaning there can be no duplicates within
a Node (the displayNames are not constrained though).
Assign ourselves as the Knob's parent item (which also will put it onto
the current scene, if not yet done) and adjust or size for it.
The position of the Knob is set relative to this Node and depends on it
either being an Input- or OutputKnob.
"""
knobNames = [k.name for k in self.knobs()]
if knob.name in knobNames:
raise DuplicateKnobNameError(
"Knob names must be unique, but {0} already exists."
.format(knob.name))
children = [c for c in self.childItems()]
yOffset = sum([c.h + self.margin for c in children])
xOffset = self.margin / 2
knob.setParentItem(self)
knob.margin = self.margin
self.updateSizeForChildren()
bbox = self.boundingRect()
if isinstance(knob, OutputKnob):
knob.setPos(bbox.right() - knob.w + xOffset, yOffset)
elif isinstance(knob, InputKnob):
knob.setPos(bbox.left() - xOffset, yOffset)
def removeKnob(self, knob):
"""Remove the Knob reference to this node and resize."""
knob.setParentItem(None)
self.updateSizeForChildren()
def paint(self, painter, option, widget):
"""Draw the Node's container rectangle."""
painter.setBrush(QtGui.QBrush(self.fillColor))
painter.setPen(QtGui.QPen(QtCore.Qt.NoPen))
# The bounding box is only as high as the header (we do this
# to limit the area that is drag-enabled). Accommodate for that.
bbox = self.boundingRect()
painter.drawRoundedRect(self.x,
self.y,
bbox.width(),
self.h,
self.roundness,
self.roundness)
def mouseMoveEvent(self, event):
"""Update selected item's (and children's) positions as needed.
We assume here that only Nodes can be selected.
We cannot just update our own childItems, since we are using
RubberBandDrag, and that would lead to otherwise e.g. Edges
visually lose their connection until an attached Node is moved
individually.
"""
nodes = self.scene().selectedItems()
for node in nodes:
for knob in node.knobs():
for edge in knob.edges:
edge.updatePath()
super(Node, self).mouseMoveEvent(event)
def destroy(self):
"""Remove this Node, its Header, Knobs and connected Edges."""
print("destroy node:", self)
self.header.destroy()
for knob in self.knobs():
knob.destroy()
scene = self.scene()
scene.removeItem(self)
del self
``` |
{
"source": "jmckib/magtag_quotes_calendar",
"score": 3
} |
#### File: jmckib/magtag_quotes_calendar/quotes_calendar.py
```python
import json
import random
import time
from adafruit_magtag.magtag import MagTag
weekdays = ("Mon", "Tues", "Wed", "Thurs", "Fri", "Sat", "Sun")
months = ("Jan", "Feb", "Mar", "Apr", "May", "Jun", "Jul", "Aug", "Sep", "Oct", "Nov", "Dec")
def main():
magtag = MagTag()
# Date label
magtag.add_text(
text_font="/fonts/SUPERSCR-24.pcf",
text_position=(130, 6),
text_anchor_point=(0.5,0),
is_data=False,
)
now = time.localtime()
magtag.set_background("/images/months/background-" + months[now.tm_mon - 1].lower() + ".bmp")
quotes = json.load(open("quotes.json"))
SEED = 1
random.seed(SEED)
quotes = sorted(quotes, key=lambda i: random.random())
# Start over every 6 months.
quote = quotes[(now.tm_yday - 1) % 183]
has_author = "author" in quote
def suffix(d):
return 'th' if 11<=d<=13 else {1:'st',2:'nd',3:'rd'}.get(d%10, 'th')
magtag.set_text("%s, %s %s%s" % (weekdays[now.tm_wday], months[now.tm_mon - 1], now.tm_mday, suffix(now.tm_mday)), auto_refresh=False)
position_no_author = (magtag.graphics.display.width // 2, magtag.graphics.display.height // 2 + 15 + quote.get("height-offset", 0))
position_with_author = (magtag.graphics.display.width // 2, magtag.graphics.display.height // 2 + 8 + quote.get("height-offset", 0))
# Quote label
magtag.add_text(
text_font="/fonts/hellovetica-8.pcf",
text_wrap=0 if "no-text-wrap" in quote else quote.get("text-wrap", 46),
line_spacing=1.2,
text_position=position_with_author if has_author else position_no_author,
text_anchor_point=(0.5, 0.5),
is_data=False,
)
magtag.set_text(quote["quote"], index=1, auto_refresh=False)
if has_author:
magtag.add_text(
text_font="/fonts/Arial-Italic-12.pcf",
line_spacing=1.2,
text_position=(magtag.graphics.display.width // 2 - 5, magtag.graphics.display.height - 10),
text_anchor_point=(0.5, 0.5),
is_data=False,
)
magtag.set_text("- " + quote["author"], index=2, auto_refresh=False)
magtag.refresh()
magtag.peripherals.deinit()
``` |
{
"source": "jmckib/tangopy",
"score": 2
} |
#### File: tangopy/tango/tango.py
```python
import glob
import json
from jsonschema import validate
import os
import requests
from requests.auth import HTTPBasicAuth
schema_files = os.path.join(
os.path.dirname(os.path.abspath(__file__)), 'json_schemas/*.schema.json')
# [:-12] to remove .schema.json
name_to_schema = {
os.path.basename(schema_file)[:-12]: json.load(open(schema_file))
for schema_file in glob.glob(schema_files)}
class TangoAPIClient(object):
def __init__(self, user, password):
self.endpoint = 'https://sandbox.tangocard.com'
self.auth = HTTPBasicAuth(user, password)
def _handle_errors(self, uri, response_json, data=None):
if not response_json['success']:
msg = ("Tango API Error: uri=%s, data=%s, response=%s"
% (uri, data, response_json))
raise Exception(msg)
def _request_get_json(self, uri):
response = requests.get(self.endpoint + uri, auth=self.auth)
response_json = response.json()
self._handle_errors(uri, response_json)
return response_json
def _request_post_json(self, uri, data=None):
response = requests.post(self.endpoint + uri,
auth=self.auth,
data=json.dumps(data))
response_json = response.json()
self._handle_errors(uri, response_json, data=data)
return response_json
def create_account(self, customer, identifier, email):
data = {
'customer': customer,
'identifier': identifier,
'email': email,
}
schema = name_to_schema['account_create']
validate(data, schema)
return self._request_post_json('/raas/v1/accounts', data=data)
def get_account(self, customer, identifier):
return self._request_get_json(
'/raas/v1/accounts/%s/%s' % (customer, identifier))
def get_rewards(self):
return self._request_get_json('/raas/v1/rewards')
def register_credit_card(self, customer, identifier, client_ip, card_data):
data = {
'customer': customer,
'account_identifier': identifier,
'client_ip': client_ip,
'credit_card': card_data,
}
schema = name_to_schema['cc_register']
validate(data, schema)
return self._request_post_json('/raas/v1/cc_register', data=data)
def fund_account(self, customer, identifier, amount, client_ip,
security_code, cc_token):
data = {
'customer': customer,
'account_identifier': identifier,
'client_ip': client_ip,
'amount': amount,
'client_ip': client_ip,
'security_code': security_code,
'cc_token': cc_token,
}
schema = name_to_schema['cc_fund']
validate(data, schema)
return self._request_post_json('/raas/v1/cc_fund', data=data)
def place_order(self, customer, identifier,
recipient_name, recipient_email, sku, amount,
reward_message, reward_subject, reward_from):
data = {
'customer': customer,
'account_identifier': identifier,
'recipient': {
'name': recipient_name,
'email': recipient_email,
},
'sku': sku,
'amount': amount,
'reward_message': reward_message,
'reward_subject': reward_subject,
'reward_from': reward_from,
}
schema = name_to_schema['order_create']
validate(data, schema)
return self._request_post_json('/raas/v1/orders', data=data)
``` |
{
"source": "jmckinstry/doodles",
"score": 3
} |
#### File: doodles/fizzbuzz/third.py
```python
s_fastest_precomputed_string = """1
2
Fizz
4
Buzz
Fizz
7
8
Fizz
Buzz
11
Fizz
13
14
FizzBuzz
16
17
Fizz
19
Buzz
Fizz
22
23
Fizz
Buzz
26
Fizz
28
29
FizzBuzz
31
32
Fizz
34
Buzz
Fizz
37
38
Fizz
Buzz
41
Fizz
43
44
FizzBuzz
46
47
Fizz
49
Buzz
Fizz
52
53
Fizz
Buzz
56
Fizz
58
59
FizzBuzz
61
62
Fizz
64
Buzz
Fizz
67
68
Fizz
Buzz
71
Fizz
73
74
FizzBuzz
76
77
Fizz
79
Buzz
Fizz
82
83
Fizz
Buzz
86
Fizz
88
89
FizzBuzz
91
92
Fizz
94
Buzz
Fizz
97
98
Fizz
Buzz"""
def fizzbuzz_fastest():
"""
First thing I look for is if precomputation is acceptable.
The space of this problem is surprisingly small (A max of sum_of_word_lengths * output_line_count),
which seems to be a good candidate for precomputation.
I'll use a module-specific top-level variable to keep python from evaluating the string each time (I'm actually not certain it will, and I'd do more research in the normal programming day)
"""
print (s_fastest_precomputed_string)
def fizzbuzz_smallest():
"""
Since we're counting, we MUST have a counter
Since we MUST have the strings, we'll pre-allocate those (otherwise they're sitting in program memory instead of execution memory and we're copying them anyways)
- (as an aside, I realize that 'fizz' and 'buzz' have common suffixes and could be reduced together using 'fi', 'bu', and 'zz' to save two bytes, but I think
- the code to stitch the words back together will take more than two bytes of execution space so it's not worth it in this case)
Since we MUST have the divisors, we'll store those via program execution space
FizzBuzz is better than brain-dead because of the possiblity of multiple divisors applying to the counter, so do I use a single test for *any* divisors and then branch,
or do I use multiple bools (ie: C-style masking) so I don't duplicate work?
Research says: Python doesn't have masks, and single bools are likely to be smaller than any comparison check, so I'll do tests for all divisors and then act accordingly.
Each test takes a bool to process, but it tosses it away afterwards, so checks should be smallest
"""
x = 1
while x <= 100:
#switch(x): # Python doesn't have switch, research says to use a dictionary for single values, we're multiple values here, do it by hand
if x % 3 and x % 5:
print (x)
elif (x % 3 == 0) and (x % 5 == 0):
print ("FizzBuzz")
elif x % 3 == 0:
print ("Fizz")
else:
print ("Buzz")
x += 1
print ("fizzbuzz_fastest(): ")
fizzbuzz_fastest()
print ("")
print ("fizzbuzz_smallest(): ")
fizzbuzz_smallest()
``` |
{
"source": "jmclong/random-fourier-features-pytorch",
"score": 3
} |
#### File: random-fourier-features-pytorch/rff/layers.py
```python
import torch.nn as nn
from typing import Optional
from torch import Tensor
import rff
class GaussianEncoding(nn.Module):
"""Layer for mapping coordinates using random Fourier features"""
def __init__(self, sigma: Optional[float] = None,
input_size: Optional[float] = None,
encoded_size: Optional[float] = None,
b: Optional[Tensor] = None):
r"""
Args:
sigma (Optional[float]): standard deviation
input_size (Optional[float]): the number of input dimensions
encoded_size (Optional[float]): the number of dimensions the `b` matrix maps to
b (Optional[Tensor], optional): Optionally specify a :attr:`b` matrix already sampled
Raises:
ValueError:
If :attr:`b` is provided and one of :attr:`sigma`, :attr:`input_size`,
or :attr:`encoded_size` is provided. If :attr:`b` is not provided and one of
:attr:`sigma`, :attr:`input_size`, or :attr:`encoded_size` is not provided.
"""
super().__init__()
if b is None:
if sigma is None or input_size is None or encoded_size is None:
raise ValueError(
'Arguments "sigma," "input_size," and "encoded_size" are required.')
b = rff.functional.sample_b(sigma, (encoded_size, input_size))
elif sigma is not None or input_size is not None or encoded_size is not None:
raise ValueError('Only specify the "b" argument when using it.')
self.b = nn.parameter.Parameter(b, requires_grad=False)
def forward(self, v: Tensor) -> Tensor:
r"""Computes :math:`\gamma(\mathbf{v}) = (\cos{2 \pi \mathbf{B} \mathbf{v}} , \sin{2 \pi \mathbf{B} \mathbf{v}})`
Args:
v (Tensor): input tensor of shape :math:`(N, *, \text{input_size})`
Returns:
Tensor: Tensor mapping using random fourier features of shape :math:`(N, *, 2 \cdot \text{encoded_size})`
"""
return rff.functional.gaussian_encoding(v, self.b)
class BasicEncoding(nn.Module):
"""Layer for mapping coordinates using the basic encoding"""
def forward(self, v: Tensor) -> Tensor:
r"""Computes :math:`\gamma(\mathbf{v}) = (\cos{2 \pi \mathbf{v}} , \sin{2 \pi \mathbf{v}})`
Args:
v (Tensor): input tensor of shape :math:`(N, *, \text{input_size})`
Returns:
Tensor: mapped tensor of shape :math:`(N, *, 2 \cdot \text{input_size})`
"""
return rff.functional.basic_encoding(v)
class PositionalEncoding(nn.Module):
"""Layer for mapping coordinates using the positional encoding"""
def __init__(self, sigma: float, m: int):
r"""
Args:
sigma (float): frequency constant
m (int): number of frequencies to map to
"""
super().__init__()
self.sigma = sigma
self.m = m
def forward(self, v: Tensor) -> Tensor:
r"""Computes :math:`\gamma(\mathbf{v}) = (\dots, \cos{2 \pi \sigma^{(j/m)} \mathbf{v}} , \sin{2 \pi \sigma^{(j/m)} \mathbf{v}}, \dots)`
Args:
v (Tensor): input tensor of shape :math:`(N, *, \text{input_size})`
Returns:
Tensor: mapped tensor of shape :math:`(N, *, 2 \cdot m \cdot \text{input_size})`
"""
return rff.functional.positional_encoding(v, self.sigma, self.m)
``` |
{
"source": "jmcmahon443/nvidia-jetson-competition",
"score": 3
} |
#### File: src/master_of_beats/rhythm.py
```python
import sys
import numpy as np
class Models:
MODELS=[]
# TODO: add Constans for method indexes and create an array of methods
class BaseModel(object):
def __init__(self, offset=0):
self.observations=[offset] # beats, sohuld we move that out
self.steps = [0] # intervals between beats
self.predictions=[-1 ,-1]
self.errors=[0] # observations(t) - predictions(t)
self.idx=1
self.offset=0
self.bpm=60 # Not used
self.confidence=1
#self.window (window of interest)
def current_pred(self):
return self.predictions[self.idx]
def adjust(self, t):
self.offset+=t
def update(self,t_stmp):
print(self.idx)
last_i=self.idx-1
self.steps.append(t_stmp-self.observations[last_i]) #this way first one is always 0
self.observations.append(t_stmp)
self.calc_err(last_i)
self.errors.append(self.calc_err(last_i))
print(self.errors[-1])
print(self.steps[-1])
# gets n new predictions, appends them, and returns the new ones
def predict(self,n):
return self.repredict(n, self.idx)
# last=self.observations[self.idx-1]
# next_step=self.fit_fun(1)
# #print("next_step: ", next_step)
# #print("prev.steps: ", self.steps[-10:])
# current_predictions = [(self.fit_fun(i) + last) for i in range(1,n+1)]
# self.predictions.extend(current_predictions)
#print(current_predictions)
# return current_predictions
# when we want to update n predictions starting from after existing index i
def repredict(self, n, idx):
current_predictions = []
last=self.observations[idx-1]
for i in range(1,n+1):
pred=self.fit_fun(i) + last
current_predictions.append(pred)
if idx+i < len(self.predictions):
self.predictions[i] = pred
else:
self.predictions.append(pred)
#print(self.predictions)
return current_predictions
def calc_err(self,i):
#last=self.idx-1
#self.errors.append(self.observations[last]-self.predictions[last])
return self.observations[i]-self.predictions[i]
# should be defined by the inheritng class
def fit_fun(self,i): #lambda?
pass
def eval(self): #some statistics stuff
pass
class AvgFit(BaseModel):
def __init__(self,offset):
# f=mx+p
super(AvgFit, self).__init__(offset)
self.m=0
def update(self,t_stmp):
super(AvgFit, self).update(t_stmp)
l=len(steps)
#also ignore the first 0
self.m = (self.m *(l-2)+ self.steps[-1])/(l-1) # should be faster than mean function
def fit_fun(self,i):
#constant part currently comes from the selected index of beats
return self.m*(i) #+self.observations[self.i]
class RunningAvgFit(BaseModel):
def __init__(self,offset, window):
# f=mx+p
super(Models.RunningAvgFit, self).__init__(offset)
self.m_n=0 #running average
self.n=window # window size for avg
def update(self,t_stmp):
super(Models.RunningAvgFit, self).update(t_stmp)
#also ignore the first 0
# last -n elements stops at first element if it's out of bounds
self.m_n = np.sum(self.steps[-self.n:])/self.n # should be faster than mean function
def fit_fun(self,i):
#constant part currently comes from the selected index of beats
return self.m_n*(i) #+self.observations[self.i
def set_window(self,n):
self.n=n
class BinaryLinearFit(BaseModel):
def __init__(self,offset):
super(BinaryLinearFit, self).__init__(offset)
class MultiLinearFit(BaseModel):
def __init__(self,offset):
super(MultiLinearFit, self).__init__(offset)
# Like this one
class WindowsOfN(BaseModel):
def __init__(self,offset):
super(WindowsOfN, self).__init__(offset)
self.l=1 # windows size
self.T = 0 #time
self.N=[0]
self.hist=10 #history of beats
#rearrange
def update(self,t_stmp):
super(Models.WindowsOfN, self).update(t_stmp)
T=self.index - self.index%i #time
folded=np.array(self.beats[T-self.winsize*self.hist:T+1]).reshape(self.winsize, self.hist)
self.N=np.mean(folded , axis=1) # should i jsut pop and push? TODO: fill with -1's until we got enpough data
#TODO: increase/decrease window. Error method
#also ignore the first 0
# last -n elements stops at first element if it's out of bounds
def fit_fun(self,i):
#constant part currently comes from the selected index of beats
return self.N[i%l]*(i//N+1) #+self.observations[self.i
def eval_window(self):
pass
def adjust_window(self):
pass
def set_window(self,n):
self.N=n
class SecondOrderFit(BaseModel):
def __init__(self,offset):
super(SecondOrderFit, self).__init__(offset)
class KalmanFilter(BaseModel):
def __init__(self,offset):
super(KalmanFilter, self).__init__(offset)
```
#### File: nvidia-jetson-competition/test_scripts/gpio_test.py
```python
from twisted.internet import reactor
# Import this package objects
from sysfs.gpio import Controller, OUTPUT, INPUT, RISING
# Refer to your chip GPIO numbers and set them this way
Controller.available_pins = [1, 2, 3, 4]
# Allocate a pin as Output signal
pin = Controller.alloc_pin(1, OUTPUT)
pin.set() # Sets pin to high logic level
pin.reset() # Sets pin to low logic level
pin.read() # Reads pin logic level
# Allocate a pin as simple Input signal
pin = Controller.alloc_pin(1, INPUT)
pin.read() # Reads pin logic level
# Allocate a pin as level triggered Input signal
def pin_changed(number, state):
print("Pin '%d' changed to %d state" % (number, state))
pin = Controller.alloc_pin(1, INPUT, pin_changed, RISING)
pin.read() # Reads pin logic level
```
#### File: nvidia-jetson-competition/test_scripts/live_pitch.py
```python
import pyaudio
import wave, time
import numpy as np
import aubio
CHUNK = 512
FORMAT = pyaudio.paFloat32
CHANNELS = 1
RATE = 44100
RECORD_SECONDS = 5
WAVE_OUTPUT_FILENAME = "output.wav"
p = pyaudio.PyAudio()
frames = []
# Pitch
tolerance = 0.8
downsample = 1
win_s = 1024 // downsample # fft size
hop_s = 512 // downsample # hop size
# pitch_o = pitch("yin", win_s, hop_s, RATE)
# pitch_o.set_unit("midi")
# pitch_o.set_tolerance(tolerance)
a_tempo = aubio.tempo("default", win_s, hop_s, RATE)
def pyaudio_callback(_in_data, _frame_count, _time_info, _status):
audio_data = np.fromstring(_in_data, dtype=np.float32)
is_beat = a_tempo(audio_data)
if is_beat:
#samples += click
print('tick') # avoid print in audio callback
#audiobuf = samples.tobytes()
return (audio_data, pyaudio.paContinue)
stream = p.open(format=FORMAT,
channels=CHANNELS,
rate=RATE,
input=True,
frames_per_buffer=CHUNK,
stream_callback=pyaudio_callback)
print("* recording")
while stream.is_active():
# buffer = stream.read(CHUNK)
# frames.append(buffer)
#
# signal = np.fromstring(buffer, dtype=np.float32)
#
# pitch = pitch_o(signal)[0]
# confidence = pitch_o.get_confidence()
#
# print('tick')
time.sleep(0.1)
# pyaudio callback
print("* done recording")
stream.stop_stream()
stream.close()
p.terminate()
``` |
{
"source": "jmcmellen/sameeas",
"score": 3
} |
#### File: jmcmellen/sameeas/audioroutines.py
```python
import math, struct, random, array
pi = math.pi
def getFIRrectFilterCoeff(fc, sampRate, filterLen=20):
'Calculate FIR lowpass filter weights using hamming window'
'y(n) = w0 * x(n) + w1 * x(n-1) + ...'
ft = float(fc) / sampRate
#print ft
m = float(filterLen - 1)
weights = []
for n in range(filterLen):
try:
weight = math.sin( 2 * pi * ft * (n - (m / 2))) / (pi * (n - (m / 2)))
hamming = 0.54 - 0.46 * math.cos( 2 * pi * n / m)
weight = weight * hamming
except:
weight = 2 * ft
hamming = 0.54 - 0.46 * math.cos( 2 * pi * n / m)
weight = weight * hamming
weights.append(weight)
return weights
def filterPCMaudio(fc, sampRate, filterLen, sampWidth, numCh, data):
'Run samples through a filter'
samples = array.array('h', data)
filtered = ""
w = getFIRrectFilterCoeff(fc, sampRate, filterLen)
for n in range(len(w), len(samples) - len(w)):
acc = 0
for i in range(len(w)):
acc += w[i] * samples[n - i]
filtered += struct.pack('<h', int(math.floor(acc)))
return filtered
def recursiveFilterPCMaudio(fc, sampRate, sampWidth, numCh, data):
'Predefined filter values, Butterworth lowpass filter'
a0 = 0.02008337 #0.01658193
a1 = 0.04016673 #0.03316386
a2 = a0
b1 = -1.56101808 #-1.60413018
b2 = 0.64135154 #0.67045791
samples = array.array('h', data)
filtered = data[0:2]
y = [0, 0, 0]
for n in range(2, len(samples) - 2):
sample = (a0 * samples[n] + a1 * samples[n -1] + a2 * samples[n-2] -
b1 * y[1] - b2 * y[2])
y[2] = y[1]
y[1] = sample
filtered += struct.pack('<h', int(math.floor(sample)))
return filtered
def bpButterworthFilter6(fc, sampRate, sampWidth, numCh, data):
a0 = 1
a1 = -4.16740087e+00
a2 = 9.56715918e+00
a3 = -1.52777374e+01
a4 = 1.88165959e+01
a5 = -1.84592133e+01
a6 = 1.46959044e+01
a7 = -9.50055587e+00
a8 = 4.97057565e+00
a9 = -2.04987349e+00
a10 = 6.42775774e-01
a11 = -1.38591530e-01
a12 = 1.72096260e-02
b0 = 3.36990647e-03
b1 = 0
b2 = -2.02194388e-02
b3 = 0
b4 = 5.05485971e-02
b5 = -2.15330343e-17
b6 = -6.73981294e-02
b7 = 2.15330343e-17
b8 = 5.05485971e-02
b9 = 0
b10 = -2.02194388e-02
b11 = 0
b12 = 3.36990647e-03
samples = array.array('h', data)
print len(samples)
filtered = data[0:12]
y = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
for n in range(12, len(samples) - 12):
sample = (a0 * samples[n] + a1 * samples[n -1] + a2 * samples[n-2] + a3 * samples[n-3] +
a4 * samples[n-4] + a5 * samples[n-5] + a6 * samples[n-6] + a7 * samples[n-7] +
a8 * samples[n-8] + a9 * samples[n-9] + a10 * samples[n-10] + a11 * samples[n-11] +
a12 * samples[n-12] -
b0 * y[0] - b1 * y[1] - b2 * y[2] - b3 * y[3] - b4 * y[4] - b5 * y[5] -
b6 * y[6] - b7 * y[7] - b8 * y[8] - b9 * y[9] - b10 * y[10] - b11 * y[11] -
b12 * y[12] )
y[12] = y[11]
y[11] = y[10]
y[10] = y[9]
y[9] = y[8]
y[8] = y[7]
y[7] = y[6]
y[6] = y[5]
y[5] = y[4]
y[4] = y[3]
y[3] = y[2]
y[2] = y[1]
y[1] = sample
filtered += struct.pack('<h', int(math.floor(sample)))
return filtered
def convertdbFStoInt( level, sampWidth):
return math.pow(10, (float(level) / 20)) * 32767
def generateSimplePCMToneData(startfreq, endfreq, sampRate, duration, sampWidth, peakLevel, numCh):
"""Generate a string of binary data formatted as a PCM sample stream. Freq is in Hz,
sampRate is in Samples per second, duration is in seconds, sampWidth is in bits,
peakLevel is in dBFS, and numCh is either 1 or 2."""
phase = 0 * pi
level = convertdbFStoInt(peakLevel, sampWidth)
pcm_data = ''
freq = startfreq
slope = 0.5 * (endfreq - startfreq) / float(sampRate * duration)
fade_len = int(0.001 * sampRate) * 0
numSamples = int( round( sampRate * duration))
#print duration * sampRate
for i in range(0, numSamples):
freq = slope * i + startfreq
fade = 1.0
if i < fade_len:
fade = 0.5 * (1 - math.cos(pi * i / (fade_len - 1)))
elif i > (numSamples - fade_len):
fade = 0.5 * (1 - math.cos(pi * (numSamples - i) / (fade_len - 1)))
for ch in range(numCh):
sample = int(( fade * level * math.sin(
(freq * 2 * pi * i)/ sampRate + phase) ))
#print sample
pcm_data += struct.pack('<h', sample)
return pcm_data
def generateDualTonePCMData(freq1, freq2, sampRate, duration, sampWidth, peakLevel, numCh):
"""Generate a string of binary data formatted as a PCM sample stream. Mix two freq
together such as in alert tones or DTMF"""
phase = 0 * pi
level = convertdbFStoInt(peakLevel, sampWidth)
pcm_data = ''
fade_len = int(0.001 * sampRate) * 0
numSamples = int( round( sampRate * duration))
#print duration * sampRate
for i in range(0, numSamples):
fade = 1.0
if i < fade_len:
fade = 0.5 * (1 - math.cos(pi * i / (fade_len - 1)))
elif i > (numSamples - fade_len):
fade = 0.5 * (1 - math.cos(pi * (numSamples - i) / (fade_len - 1)))
for ch in range(numCh):
sample = int(( fade * level * (0.5 * math.sin(
(freq1 * 2 * pi * i)/ sampRate + phase) +
0.5 * math.sin((freq2 * 2 * pi * i)/ sampRate + phase) )))
#print sample
pcm_data += struct.pack('<h', sample)
return pcm_data
def main():
import wave
numCh = 1
peakLevel = -10
sampWidth = 16
sampRate = 44100
file = wave.open('testchirp.wav', 'rb')
samples = file.readframes( file.getnframes())
file.close()
#data = generateDualTonePCMData(853, 960, sampRate, 8, sampWidth, peakLevel, numCh)
data = bpButterworthFilter6(0, sampRate, sampWidth, numCh, samples)
fileout = wave.open( 'test.wav', 'wb')
fileout.setparams( (numCh, sampWidth/8, sampRate, sampRate, 'NONE', '') )
fileout.writeframes(data)
fileout.close()
if __name__ == "__main__":
main()
``` |
{
"source": "jmcmillan1/smack-portfolio",
"score": 2
} |
#### File: share/smack/frontend.py
```python
import os
import sys
import re
import json
from .utils import temporary_file, try_command, temporary_directory,\
llvm_exact_bin, smack_root, smack_header_path, smack_headers,\
smack_lib
from .versions import RUST_VERSION
# Needed for cargo operations
try:
import toml
except ImportError:
pass
def languages():
"""A dictionary of languages per file extension."""
return {
'c': 'c',
'i': 'c',
'cc': 'cxx',
'cpp': 'cxx',
'm': 'objc',
'd': 'd',
'json': 'json',
'svcomp': 'svcomp',
'bc': 'llvm',
'll': 'llvm',
'bpl': 'boogie',
'f': 'fortran',
'for': 'fortran',
'f90': 'fortran',
'f95': 'fortran',
'f03': 'fortran',
'rs': 'rust',
'toml': 'cargo',
}
def frontends():
"""A dictionary of front-ends per language."""
# Avoid circular import
from .svcomp.utils import svcomp_frontend
return {
'c': clang_frontend,
'cxx': clang_plusplus_frontend,
'objc': clang_objc_frontend,
'd': d_frontend,
'json': json_compilation_database_frontend,
'svcomp': svcomp_frontend,
'llvm': llvm_frontend,
'boogie': boogie_frontend,
'fortran': fortran_frontend,
'rust': rust_frontend,
'cargo': cargo_frontend,
}
def extra_libs():
"""A dictionary of extra SMACK libraries required by languages."""
return {
'fortran': fortran_build_libs,
'cxx': cplusplus_build_libs,
'rust': rust_build_libs,
# coming soon - libraries for OBJC, Rust, Swift, etc.
}
def extern_entry_points(args, bcs):
new_bcs = []
for bc in bcs:
new_bc = temporary_file(
os.path.splitext(
os.path.basename(bc))[0],
'.bc',
args)
cmd = ['-in', bc, '-out', new_bc]
for ep in args.entry_points:
cmd += ['-entry-points', ep]
try_command(['extern-statics'] + cmd, console=True)
new_bcs.append(new_bc)
return new_bcs
def default_clang_compile_command(args, lib=False):
cmd = [
llvm_exact_bin('clang'),
'-c',
'-emit-llvm',
'-O0',
'-g',
'-gcolumn-info'
]
# Starting from LLVM 5.0, we need the following two options
# in order to enable optimization passes.
# See: https://stackoverflow.com/a/46753969.
cmd += ['-Xclang', '-disable-O0-optnone']
cmd += ['-I' + path for path in smack_headers(args)]
cmd += args.clang_options.split()
cmd += ['-DMEMORY_MODEL_' + args.mem_mod.upper().replace('-', '_')]
from .top import VProperty
if args.check.contains_mem_safe_props():
cmd += ['-DMEMORY_SAFETY']
if VProperty.INTEGER_OVERFLOW in args.check:
cmd += (['-fsanitize=signed-integer-overflow,shift']
if not lib else ['-DSIGNED_INTEGER_OVERFLOW_CHECK'])
if VProperty.ASSERTIONS not in args.check:
cmd += ['-DDISABLE_SMACK_ASSERTIONS']
if args.float:
cmd += ['-DFLOAT_ENABLED']
if args.pthread:
cmd += ['-DSMACK_MAX_THREADS=' + str(args.max_threads)]
if args.integer_encoding == 'bit-vector':
cmd += ['-DBIT_PRECISE']
if sys.stdout.isatty():
cmd += ['-fcolor-diagnostics']
return cmd
def compile_to_bc(input_file, compile_command, args):
"""Compile a source file to LLVM IR."""
bc = temporary_file(
os.path.splitext(
os.path.basename(input_file))[0],
'.bc',
args)
try_command(compile_command + ['-o', bc, input_file], console=True)
return bc
def d_compile_to_bc(input_file, compile_command, args):
"""Compile a D source file to LLVM IR."""
bc = temporary_file(
os.path.splitext(
os.path.basename(input_file))[0],
'.bc',
args)
try_command(compile_command + ['-of=' + bc, input_file], console=True)
return bc
def fortran_compile_to_bc(input_file, compile_command, args):
"""Compile a FORTRAN source file to LLVM IR."""
# This method only exists as a hack to get flang to work
# with SMACK. When we update to the latest flang on LLVM 5,
# this method will no longer be necessary. The hack is
# self-contained in this method.
# The Debug Info Version in flang is incompatible with
# the version that clang uses. The workaround is to use
# sed to change the file so llvm-link gives a warning
# and not an error.
# compile to human-readable format in order to tweak the IR
compile_command[1] = '-S'
ll = temporary_file(
os.path.splitext(
os.path.basename(input_file))[0],
'.ll',
args)
try_command(compile_command + ['-o', ll, input_file], console=True)
# change the throw level of 'Debug Info Version' from error to warning in
# the IR
try_command(
['sed',
'-i',
's/i32 1, !\"Debug Info Version\"/i32 2, !\"Debug Info Version\"/g',
ll])
try_command([llvm_exact_bin('llvm-as'), ll])
try_command(['rm', ll])
bc = '.'.join(ll.split('.')[:-1] + ['bc'])
return bc
# Frontend functions here
def llvm_frontend(input_file, args):
"""Return LLVM IR file. Exists for symmetry with other frontends."""
return input_file
def clang_frontend(input_file, args):
"""Generate LLVM IR from C-language source(s)."""
compile_command = default_clang_compile_command(args)
return compile_to_bc(input_file, compile_command, args)
def clang_plusplus_frontend(input_file, args):
"""Generate LLVM IR from C++ language source(s)."""
compile_command = default_clang_compile_command(args)
compile_command[0] = llvm_exact_bin('clang++')
return compile_to_bc(input_file, compile_command, args)
def clang_objc_frontend(input_file, args):
"""Generate LLVM IR from Objective-C language source(s)."""
compile_command = default_clang_compile_command(args)
if sys.platform in ['linux', 'linux2']:
objc_flags = try_command(['gnustep-config', '--objc-flags'])
compile_command += objc_flags.split()
elif sys.platform == 'darwin':
sys.exit("Objective-C not yet supported on macOS")
else:
sys.exit("Objective-C not supported for this operating system.")
return compile_to_bc(input_file, compile_command, args)
def d_frontend(input_file, args):
"""Generate Boogie code from D programming language source(s)."""
# note: -g and -O0 are not used here.
# Right now, it works, and with these options, smack crashes.
compile_command = ['ldc2', '-output-ll']
compile_command += ['-I=' + path for path in smack_headers(args)]
args.entry_points += ['_Dmain']
return d_compile_to_bc(input_file, compile_command, args)
def fortran_frontend(input_file, args):
"""Generate Boogie code from Fortran language source(s)."""
# For a fortran file that includes smack.f90 as a module,
# it will not compile unless the file 'smack.mod' exists
# in the working directory. 'smack.mod' is a build artifact
# of compiling smack.f90. Therefore, the solution is to
# compile smack.f90 before the source files.
fortran_build_libs(args)
# The result of this computation will be discarded when SMACK
# builds it's libraries later.
# replace the default entry point with the fortran default 'MAIN_'
args.entry_points += ['MAIN_']
compile_command = default_clang_compile_command(args)
compile_command[0] = 'flang'
return fortran_compile_to_bc(input_file, compile_command, args)
def boogie_frontend(input_file, args):
"""Pass Boogie code to the verifier."""
if len(args.input_files) > 1:
raise RuntimeError("Expected a single Boogie file.")
with open(args.bpl_file, 'a+') as out:
with open(input_file) as f:
out.write(f.read())
def json_compilation_database_frontend(input_file, args):
"""Generate Boogie code from a JSON compilation database."""
if len(args.input_files) > 1:
raise RuntimeError("Expected a single JSON compilation database.")
output_flags = re.compile(r"-o ([^ ]*)[.]o\b")
optimization_flags = re.compile(r"-O[1-9]\b")
with open(input_file) as f:
for cc in json.load(f):
if 'objects' in cc:
# TODO what to do when there are multiple linkings?
bit_codes = [re.sub('[.]o$', '.bc', f) for f in cc['objects']]
try_command([
llvm_exact_bin('llvm-link'),
'-o',
args.bc_file
] + bit_codes)
try_command([
llvm_exact_bin('llvm-link'),
'-o',
args.linked_bc_file,
args.bc_file
] + default_build_libs(args))
else:
command = cc['command']
command = output_flags.sub(r"-o \1.bc", command)
command = optimization_flags.sub("-O0", command)
command = command + " -emit-llvm"
try_command(command.split(), cc['directory'], console=True)
# import here to avoid a circular import
from .top import llvm_to_bpl
llvm_to_bpl(args)
def default_cargo_compile_command(args):
compile_command = [
'cargo',
'+' + RUST_VERSION,
'build']
return compile_command + args
def cargo_frontend(input_file, args):
"""Generate LLVM bitcode from a cargo build."""
def find_target(config, options=None):
target_name = config['package']['name']
# TODO: Shaobo: target selection can be done via Cargo options.
# But we don't capture Cargo options for now.
if options is None:
if 'lib' in config and 'name' in config['lib']:
target_name = config['lib']['name']
return target_name.replace('-', '_')
targetdir = temporary_directory(
os.path.splitext(
os.path.basename(input_file))[0],
None,
args)
rustargs = (default_rust_compile_args(args) +
['--emit=llvm-bc', '-Clto', '-Cembed-bitcode=yes'])
compile_command = default_cargo_compile_command(
['--target-dir', targetdir, '--manifest-path', input_file])
try_command(compile_command, console=True,
env={'RUSTFLAGS': " ".join(rustargs)})
target_name = find_target(toml.load(input_file))
# Find the name of the crate's bc file
bcbase = targetdir + '/debug/deps/'
entries = os.listdir(bcbase)
bcs = []
for entry in entries:
if entry.startswith(target_name + '-') and entry.endswith('.bc'):
bcs.append(bcbase + entry)
bc_file = temporary_file(
os.path.splitext(
os.path.basename(input_file))[0],
'.bc',
args)
try_command([llvm_exact_bin('llvm-link')] + bcs + ['-o', bc_file])
return bc_file
def default_rust_compile_args(args):
return ['-A',
'unused-imports',
'-C',
'opt-level=0',
'-C',
'no-prepopulate-passes',
'-g',
'--cfg',
'verifier="smack"',
'-C',
'passes=name-anon-globals']
def default_rust_compile_command(args):
compile_command = (['rustc', '+' + RUST_VERSION] +
default_rust_compile_args(args))
return compile_command + args
def rust_build_rlib(input_file, args):
compile_command = default_rust_compile_command(
['--crate-type', 'rlib,lib'])
rlib = temporary_file(
'lib' +
os.path.splitext(
os.path.basename(input_file))[0],
'.rlib',
args)
try_command(compile_command + ['-o', rlib, input_file], console=True)
return rlib
def rust_frontend(input_file, args):
"""Generate Boogie code from Rust programming language source(s)."""
rlib = rust_build_rlib(smack_lib() + '/smack.rs', args)
compile_command = default_rust_compile_command(
['--emit=llvm-bc', '--extern', 'smack=' + rlib])
return compile_to_bc(input_file, compile_command, args)
# Build libs functions here
def default_build_libs(args):
"""Generate LLVM bitcodes for SMACK libraries."""
bitcodes = []
libs = ['smack.c', 'stdlib.c', 'errno.c', 'smack-rust.c']
if args.pthread:
libs += ['pthread.c']
if args.strings:
libs += ['string.c']
if args.float:
libs += ['math.c']
libs += ['fenv.c']
compile_command = default_clang_compile_command(args, True)
for c in [os.path.join(smack_lib(), c) for c in libs]:
bc = compile_to_bc(c, compile_command, args)
bitcodes.append(bc)
return bitcodes
def fortran_build_libs(args):
"""Generate FORTRAN-specific LLVM bitcodes for SMACK libraries."""
bitcodes = []
libs = ['smack.f90']
compile_command = default_clang_compile_command(args)
compile_command[0] = 'flang'
for c in [os.path.join(smack_lib(), c) for c in libs]:
bc = fortran_compile_to_bc(c, compile_command, args)
bitcodes.append(bc)
return bitcodes
def cplusplus_build_libs(args):
"""Generate C++ specific LLVM bitcodes for SMACK libraries."""
bitcodes = []
libs = ['smack.cpp']
compile_command = default_clang_compile_command(args, True)
compile_command[0] = llvm_exact_bin('clang++')
for c in [os.path.join(smack_lib(), c) for c in libs]:
bc = compile_to_bc(c, compile_command, args)
bitcodes.append(bc)
return bitcodes
def rust_build_libs(args):
"""Generate Rust specific LLVM bitcodes for SMACK libraries."""
bitcodes = []
libs = ['smack.rs']
compile_command = default_rust_compile_command(
['--emit=llvm-bc', '--crate-type', 'lib'])
for c in [os.path.join(smack_lib(), c) for c in libs]:
bc = compile_to_bc(c, compile_command, args)
bitcodes.append(bc)
return bitcodes
# llvm link files
def link_bc_files(bitcodes, libs, args):
"""Link generated LLVM bitcode and relevant smack libraries."""
smack_libs = default_build_libs(args)
for build_lib in libs:
smack_libs += build_lib(args)
bitcodes = extern_entry_points(args, bitcodes)
try_command([llvm_exact_bin('llvm-link'), '-o', args.bc_file] + bitcodes)
try_command([llvm_exact_bin('llvm-link'), '-o', args.linked_bc_file,
args.bc_file] + smack_libs)
# import here to avoid a circular import
from .top import llvm_to_bpl
llvm_to_bpl(args)
```
#### File: smack-portfolio/test/regtest.py
```python
from os import path
from multiprocessing.pool import ThreadPool
import multiprocessing
import os
import logging
import yaml
import psutil
import argparse
import subprocess
import re
import glob
import time
import sys
import shlex
OVERRIDE_FIELDS = ['verifiers', 'memory', 'time-limit', 'memory-limit', 'skip']
APPEND_FIELDS = ['flags', 'checkbpl', 'checkout']
LANGUAGES = {'c': {'*.c'},
'cargo': {'Cargo.toml'},
'cplusplus': {'*.cpp'},
'rust': {'*.rs'},
'llvm-ir': {"*.ll"}}
def bold(text):
return '\033[1m' + text + '\033[0m'
def red(text, log_file):
if log_file:
return text
else:
return '\033[0;31m' + text + '\033[0m'
def green(text, log_file):
if log_file:
return text
else:
return '\033[0;32m' + text + '\033[0m'
def get_result(output):
if re.search(r'SMACK timed out', output):
return 'timeout'
elif re.search(r'SMACK found no errors', output):
return 'verified'
elif re.search(r'SMACK found an error', output):
return 'error'
else:
return 'unknown'
def merge(metadata, yamldata):
for key in OVERRIDE_FIELDS:
if key in yamldata:
metadata[key] = yamldata[key]
for key in APPEND_FIELDS:
if key in yamldata:
if key in metadata:
metadata[key] += yamldata[key]
else:
metadata[key] = yamldata[key]
def metadata(file):
m = {}
prefix = []
for d in path.dirname(file).split('/'):
prefix += [d]
yaml_file = path.join(*(prefix + ['config.yml']))
if path.isfile(yaml_file):
with open(yaml_file, "r") as f:
data = yaml.safe_load(f)
merge(m, data)
with open(file, "r") as f:
for line in f.readlines():
match = re.search(r'@skip', line)
if match:
m['skip'] = True
match = re.search(r'@flag (.*)', line)
if match:
m['flags'] += shlex.split(match.group(1).strip())
match = re.search(r'@expect (.*)', line)
if match:
m['expect'] = match.group(1).strip()
match = re.search(r'@checkbpl (.*)', line)
if match:
m['checkbpl'].append(match.group(1).strip())
match = re.search(r'@checkout (.*)', line)
if match:
m['checkout'].append(match.group(1).strip())
if not m['skip']:
if 'expect' not in m:
print(red("WARNING: @expect MISSING IN %s" % file, None))
m['expect'] = 'verified'
if not m['expect'] in ['verified', 'error', 'timeout', 'unknown']:
print(red("WARNING: unexpected @expect annotation '%s'" %
m['expect'], None))
return m
# integer constants
PASSED = 0
TIMEDOUT = 1
UNKNOWN = 2
FAILED = -1
def process_test(
cmd,
test,
memory,
verifier,
expect,
checkbpl,
checkout,
log_file):
"""
This is the worker function for each process. This function process the
supplied test and returns a tuple containing indicating the test results.
:return: A tuple with the
"""
str_result = "{0:>20}\n".format(test)
str_result += "{0:>20} {1:>10} :".format(memory, verifier)
t0 = time.time()
p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE,
universal_newlines=True)
out, err = p.communicate()
elapsed = time.time() - t0
status = 0
bplfile = cmd[cmd.index('-bpl') + 1]
with open(os.devnull, 'w') as devnull:
for f in checkbpl:
with open(bplfile) as bpl:
checker = subprocess.Popen(
shlex.split(f), stdin=bpl, stdout=devnull, stderr=devnull)
checker.wait()
status = status or checker.returncode
for f in checkout:
checker = subprocess.Popen(
shlex.split(f),
stdin=subprocess.PIPE,
stdout=devnull,
stderr=devnull)
checker.communicate(input=out.encode())
status = status or checker.returncode
# get the test results
result = get_result(out + err)
if result == expect and status == 0:
str_result += green('PASSED ', log_file)
elif result == 'timeout':
str_result += red('TIMEOUT', log_file)
elif result == 'unknown':
str_result += red('UNKNOWN', log_file)
else:
str_result += red('FAILED ', log_file)
str_result += ' [%.2fs]' % round(elapsed, 2)
return str_result
passed = failed = timeouts = unknowns = 0
def tally_result(result):
"""
Tallies the result of each worker. This will only be called by the main
thread.
"""
# log the info
logging.info(result)
global passed, failed, timeouts, unknowns
if "PASSED" in result:
passed += 1
elif "FAILED" in result:
failed += 1
elif "TIMEOUT" in result:
timeouts += 1
elif "UNKNOWN" in result:
unknowns += 1
def get_extensions(languages):
languages = list(languages.split(','))
extensions = set()
for language in languages:
extensions |= LANGUAGES[language]
return extensions
def get_tests(folder, extensions):
tests = []
for ext in extensions:
tests_path = path.dirname(__file__)
tests.extend(glob.glob(path.join(tests_path, folder, ext)))
tests.sort()
return tests
def main():
"""
Main entry point for the test suite.
"""
t0 = time.time()
num_cpus = multiprocessing.cpu_count()
mem_total = psutil.virtual_memory().total / (1024 * 1024)
# configure the CLI
parser = argparse.ArgumentParser()
parser.add_argument(
"--exhaustive",
help="check all configurations on all examples",
action="store_true")
parser.add_argument(
"--all-configs",
help="check all configurations per example",
action="store_true")
parser.add_argument(
"--all-examples",
help="check all examples",
action="store_true")
parser.add_argument("--folder", action="store", default="**/**", type=str,
help="sets the regressions folder to run")
parser.add_argument(
"--threads",
action="store",
dest="n_threads",
default=num_cpus,
type=int,
help='''execute regressions using the selected number of threads in
parallel''')
parser.add_argument(
"--log",
action="store",
dest="log_level",
default="DEBUG",
type=str,
help="sets the logging level (DEBUG, INFO, WARNING)")
parser.add_argument(
"--output-log",
action="store",
dest="log_path",
type=str,
help="sets the output log path. (std out by default)")
parser.add_argument(
"--languages",
action="store",
default="c",
choices=list(
LANGUAGES.keys()),
help='''Comma separated list of langauges to test. C[c],C++[cplusplus],
Rust[rust]''')
args = parser.parse_args()
if args.exhaustive:
args.all_examples = True
args.all_configs = True
extensions = get_extensions(args.languages)
tests = get_tests(args.folder, extensions)
# configure the logging
log_format = ''
log_level = logging.DEBUG
# add more log levels later (if needed)
if args.log_level.upper() == "INFO":
log_level = logging.INFO
elif args.log_level.upper() == "WARNING":
log_level = logging.WARNING
# if the user supplied a log path, write the logs to that file.
# otherwise, write the logs to std out.
if args.log_path:
logging.basicConfig(
filename=args.log_path,
format=log_format,
level=log_level)
else:
logging.basicConfig(format=log_format, level=log_level)
logging.debug("Creating Pool with '%d' Workers" % args.n_threads)
p = ThreadPool(processes=args.n_threads)
try:
# start the tests
logging.info("Running regression tests...")
# start processing the tests.
results = []
for test in tests:
# get the meta data for this test
meta = metadata(test)
if meta['memory-limit'] > mem_total:
continue
if meta['skip'] is True:
continue
if meta['skip'] is not False and not args.all_examples:
continue
# build up the subprocess command
cmd = ['smack', test]
cmd += ['--time-limit', str(meta['time-limit'])]
cmd += meta['flags']
for memory in meta['memory'][:100 if args.all_configs else 1]:
cmd += ['--mem-mod=' + memory]
for verifier in (meta['verifiers']
[:100 if args.all_configs else 1]):
name = path.splitext(path.basename(test))[0]
cmd += ['--verifier=' + verifier]
cmd += ['-bc', "%s-%s-%s.bc" % (name, memory, verifier)]
cmd += ['-bpl', "%s-%s-%s.bpl" % (name, memory, verifier)]
r = p.apply_async(
process_test,
args=(
cmd[:],
test,
memory,
verifier,
meta['expect'],
meta['checkbpl'],
meta['checkout'],
args.log_path,
),
callback=tally_result)
results.append(r)
# keep the main thread active while there are active workers
for r in results:
r.wait()
except KeyboardInterrupt:
logging.debug("Caught KeyboardInterrupt, terminating workers")
p.terminate() # terminate any remaining workers
p.join()
else:
logging.debug("Quitting normally")
# close the pool. this prevents any more tasks from being submitted.
p.close()
p.join() # wait for all workers to finish their tasks
# log the elapsed time
elapsed_time = time.time() - t0
logging.info(' ELAPSED TIME [%.2fs]' % round(elapsed_time, 2))
# log the test results
logging.info(' PASSED count: %d' % passed)
logging.info(' FAILED count: %d' % failed)
logging.info(' TIMEOUT count: %d' % timeouts)
logging.info(' UNKNOWN count: %d' % unknowns)
# if there are any failed tests or tests that timed out, set the system
# exit code to a failure status
if timeouts > 0 or failed > 0 or unknowns > 0:
sys.exit(1)
if __name__ == "__main__":
main()
``` |
{
"source": "JMCobar/THESIS",
"score": 3
} |
#### File: JMCobar/THESIS/5-Test.py
```python
import dash
from dash.dependencies import Output, Input
import dash_core_components as dcc
import dash_html_components as html
import plotly
import random
import plotly.graph_objs as go
import numpy as np
from collections import deque
X = deque(maxlen=20)
X.append(1)
Y = deque(maxlen=20)
Y.append(1)
lastline = []
app = dash.Dash(__name__)
app.layout = html.Div(
[
dcc.Graph(id='live-graph', animate=True),
dcc.Interval(
id='graph-update',
interval=1*1000
),
]
)
@app.callback(Output('live-graph', 'figure'),
[Input('graph-update', 'n_intervals')])
def update_graph_scatter(input_data):
global lastline
line = open('sampleText.txt').readlines()
st = set(lastline)
anotherline = [ele for ele in line if ele not in st]
if anotherline:
for eachline in anotherline:
cleanline = eachline.split(',')
X.append(int(cleanline[0]))
Y.append(int(cleanline[1]))
lastline = line
data = plotly.graph_objs.Scatter(
x=list(X),
y=list(Y),
name='Scatter',
mode= 'lines+markers'
)
return {'data': [data],'layout' : go.Layout(xaxis=dict(range=[min(X),max(X)]),
yaxis=dict(range=[min(Y),max(Y)]),)}
if __name__ == '__main__':
app.run_server(debug=True)
``` |
{
"source": "jmcollis/GitSavvy",
"score": 2
} |
#### File: GitSavvy/common/global_events.py
```python
import sublime
from sublime_plugin import EventListener, WindowCommand
from . import util
from ..core.settings import SettingsMixin
class GsInterfaceFocusEventListener(EventListener):
"""
Trigger handlers for view life-cycle events.
"""
def on_activated(self, view):
# status bar is handled by GsStatusBarEventListener
util.view.refresh_gitsavvy(view, refresh_status_bar=False)
def on_close(self, view):
util.view.handle_closed_view(view)
git_view_syntax = {
'MERGE_MSG': 'Packages/GitSavvy/syntax/make_commit.sublime-syntax',
'COMMIT_EDITMSG': 'Packages/GitSavvy/syntax/make_commit.sublime-syntax',
'PULLREQ_EDITMSG': 'Packages/GitSavvy/syntax/make_commit.sublime-syntax',
'git-rebase-todo': 'Packages/GitSavvy/syntax/rebase_interactive.sublime-syntax',
}
class GitCommandFromTerminal(EventListener, SettingsMixin):
def on_load(self, view):
if view.file_name():
name = view.file_name().split("/")[-1]
if name in git_view_syntax.keys():
syntax_file = git_view_syntax[name]
if "COMMIT_EDITMSG" == name and self.savvy_settings.get("use_syntax_for_commit_editmsg"):
syntax_file = util.file.get_syntax_for_file("COMMIT_EDITMSG")
view.set_syntax_file(syntax_file)
view.settings().set("git_savvy.{}_view".format(name), True)
view.set_scratch(True)
def on_pre_close(self, view):
if view.file_name():
name = view.file_name().split("/")[-1]
if name in git_view_syntax.keys():
view.run_command("save")
PROJECT_MSG = """
<body>
<p>Add the key <code>"GitSavvy"</code> as follows</p>
{<br>
"settings": {<br>
"GitSavvy": {<br>
// GitSavvy settings go here<br>
<br>
}<br>
}
</body>
"""
class KeyboardSettingsListener(EventListener):
def on_post_window_command(self, window, command, args):
if command == "edit_settings":
base = args.get("base_file", "")
if base.endswith("sublime-keymap") and "/GitSavvy/Default" in base:
w = sublime.active_window()
w.focus_group(0)
w.run_command("open_file", {"file": "${packages}/GitSavvy/Default.sublime-keymap"})
w.focus_group(1)
elif base.endswith("GitSavvy.sublime-settings"):
w = sublime.active_window()
view = w.active_view()
sublime.set_timeout(
lambda: view.show_popup(PROJECT_MSG), 1000)
class GsEditSettingsCommand(WindowCommand):
"""
For some reasons, the command palette doesn't trigger `on_post_window_command` for
dev version of Sublime Text. The command palette would call `gs_edit_settings` and
subsequently trigger `on_post_window_command`.
"""
def run(self, **kwargs):
self.window.run_command("edit_settings", kwargs)
class GsEditProjectSettingsCommand(WindowCommand):
"""
For some reasons, the command palette doesn't trigger `on_post_window_command` for
dev version of Sublime Text. The command palette would call `gs_edit_settings` and
subsequently trigger `on_post_window_command`.
"""
def run(self):
project_file_name = self.window.project_file_name()
project_data = self.window.project_data()
if not project_file_name or project_data is None:
sublime.error_message("No project data found.")
return
sublime.set_timeout(lambda: self.window.run_command("edit_settings", {
"user_file": project_file_name,
"base_file": "${packages}/GitSavvy/GitSavvy.sublime-settings"
}), 100)
```
#### File: common/util/dates.py
```python
from datetime import datetime
TEN_MINS = 600
ONE_HOUR = 3600
TWO_HOURS = 7200
ONE_DAY = 86400
def fuzzy(event, base=None, date_format=None):
if not base:
base = datetime.now()
if date_format:
event = datetime.strptime(event, date_format)
elif type(event) == str:
event = datetime.fromtimestamp(int(event))
elif type(event) == int:
event = datetime.fromtimestamp(event)
elif type(event) != datetime:
raise Exception(
"Cannot convert object `{}` to fuzzy date string".format(event))
delta = base - event
if delta.days == 0:
if delta.seconds < 60:
return "{} seconds ago".format(delta.seconds)
elif delta.seconds < 120:
return "1 min and {} secs ago".format(delta.seconds - 60)
elif delta.seconds < TEN_MINS:
return "{} mins and {} secs ago".format(
delta.seconds // 60,
delta.seconds % 60)
elif delta.seconds < ONE_HOUR:
return "{} minutes ago".format(delta.seconds // 60)
elif delta.seconds < TWO_HOURS:
return "1 hour and {} mins ago".format(
delta.seconds % ONE_HOUR // 60)
return "over {} hours ago".format(delta.seconds // ONE_HOUR)
elif delta.days < 2:
return "over a day ago"
elif delta.days < 7:
return "over {} days ago".format(delta.days)
return "{date:%b} {date.day}, {date.year}".format(date=event)
```
#### File: core/commands/fetch.py
```python
import sublime
from sublime_plugin import WindowCommand
from ..git_command import GitCommand
from ...common import util
from ..ui_mixins.quick_panel import show_remote_panel
class GsFetchCommand(WindowCommand, GitCommand):
"""
Display a panel of all git remotes for active repository and
do a `git fetch` asynchronously.
"""
def run(self, remote=None):
if remote:
return self.do_fetch(remote)
show_remote_panel(self.on_remote_selection, show_option_all=True, allow_direct=True)
def on_remote_selection(self, remote):
if not remote:
return
if remote is True:
sublime.set_timeout_async(lambda: self.do_fetch())
else:
sublime.set_timeout_async(lambda: self.do_fetch(remote))
def do_fetch(self, remote=None):
if remote is None:
self.window.status_message("Starting fetch all remotes...")
else:
self.window.status_message("Starting fetch {}...".format(remote))
self.fetch(remote)
self.window.status_message("Fetch complete.")
util.view.refresh_gitsavvy(self.window.active_view())
```
#### File: core/git_mixins/checkout_discard.py
```python
class CheckoutDiscardMixin():
def discard_all_unstaged(self):
"""
Any changes that are not staged or committed will be reverted
to their state in HEAD. Any new files will be deleted.
"""
self.git("clean", "-df")
self.git("checkout", "--", ".")
def discard_untracked_file(self, *fpaths):
"""
Given a list of absolute paths or paths relative to the repo's root,
remove the file or directory from the working tree.
"""
self.git("clean", "-df", "--", *fpaths)
def checkout_file(self, *fpaths):
"""
Given a list of absolute paths or paths relative to the repo's root,
discard any changes made to the file and revert it in the working
directory to the state it is in HEAD.
"""
self.git("checkout", "--", *fpaths)
def checkout_ref(self, ref, fpath=None):
"""
Given a ref (local branch, remote branch, tag, etc), check it out.
"""
if fpath:
self.git("checkout", ref, "--", fpath)
else:
self.git("checkout", ref)
```
#### File: core/git_mixins/ignore.py
```python
import os
from ...common import util
linesep = None
class IgnoreMixin():
def add_ignore(self, path_or_pattern):
"""
Add the provided relative path or pattern to the repo's `.gitignore` file.
"""
global linesep
if not linesep:
# Use native line ending on Windows only when `autocrlf` is set to `true`.
if os.name == "nt":
autocrlf = self.git("config", "--global", "core.autocrlf",
throw_on_stderr=False).strip() == "true"
linesep = os.linesep if autocrlf else "\n"
else:
linesep = os.linesep
gitignore = os.path.join(self.repo_path, ".gitignore")
if os.path.exists(gitignore):
with util.file.safe_open(gitignore, "r", encoding="utf-8") as fp:
ignore_lines = fp.read().splitlines()
else:
ignore_lines = []
ignore_lines += [path_or_pattern, ""]
with util.file.safe_open(gitignore, "w", encoding="utf-8", newline=linesep) as fp:
fp.write("\n".join(ignore_lines))
```
#### File: tests/mockito/mocking.py
```python
from collections import deque
import inspect
import functools
from . import invocation
from . import signature
from . import utils
from .mock_registry import mock_registry
__all__ = ['mock']
class _Dummy(object):
# We spell out `__call__` here for convenience. All other magic methods
# must be configured before use, but we want `mock`s to be callable by
# default.
def __call__(self, *args, **kwargs):
return self.__getattr__('__call__')(*args, **kwargs)
def remembered_invocation_builder(mock, method_name, *args, **kwargs):
invoc = invocation.RememberedInvocation(mock, method_name)
return invoc(*args, **kwargs)
class Mock(object):
def __init__(self, mocked_obj, strict=True, spec=None):
self.mocked_obj = mocked_obj
self.strict = strict
self.spec = spec
self.invocations = deque()
self.stubbed_invocations = deque()
self.original_methods = {}
self._signatures_store = {}
def remember(self, invocation):
self.invocations.appendleft(invocation)
def finish_stubbing(self, stubbed_invocation):
self.stubbed_invocations.appendleft(stubbed_invocation)
def clear_invocations(self):
self.invocations = deque()
# STUBBING
def get_original_method(self, method_name):
if self.spec is None:
return None
try:
return self.spec.__dict__.get(method_name)
except AttributeError:
return getattr(self.spec, method_name, None)
def set_method(self, method_name, new_method):
setattr(self.mocked_obj, method_name, new_method)
def replace_method(self, method_name, original_method):
def new_mocked_method(*args, **kwargs):
# we throw away the first argument, if it's either self or cls
if (inspect.ismethod(new_mocked_method) or
inspect.isclass(self.mocked_obj) and
not isinstance(new_mocked_method, staticmethod)):
args = args[1:]
return remembered_invocation_builder(
self, method_name, *args, **kwargs)
new_mocked_method.__name__ = method_name
if original_method:
new_mocked_method.__doc__ = original_method.__doc__
new_mocked_method.__wrapped__ = original_method
try:
new_mocked_method.__module__ = original_method.__module__
except AttributeError:
pass
if (inspect.ismethod(original_method)):
new_mocked_method = utils.newmethod(
new_mocked_method, self.mocked_obj)
if isinstance(original_method, staticmethod):
new_mocked_method = staticmethod(new_mocked_method)
elif isinstance(original_method, classmethod):
new_mocked_method = classmethod(new_mocked_method)
elif (inspect.isclass(self.mocked_obj) and # TBC: Inner classes
inspect.isclass(original_method)):
new_mocked_method = staticmethod(new_mocked_method)
self.set_method(method_name, new_mocked_method)
def stub(self, method_name):
try:
self.original_methods[method_name]
except KeyError:
original_method = self.get_original_method(method_name)
self.original_methods[method_name] = original_method
self.replace_method(method_name, original_method)
def forget_stubbed_invocation(self, invocation):
assert invocation in self.stubbed_invocations
if len(self.stubbed_invocations) == 1:
mock_registry.unstub(self.mocked_obj)
return
self.stubbed_invocations.remove(invocation)
if not any(
inv.method_name == invocation.method_name
for inv in self.stubbed_invocations
):
original_method = self.original_methods.pop(invocation.method_name)
self.restore_method(invocation.method_name, original_method)
def restore_method(self, method_name, original_method):
# If original_method is None, we *added* it to mocked_obj, so we
# must delete it here.
# If we mocked an instance, our mocked function will actually hide
# the one on its class, so we delete as well.
if (not original_method or not inspect.isclass(self.mocked_obj) and
inspect.ismethod(original_method)):
delattr(self.mocked_obj, method_name)
else:
self.set_method(method_name, original_method)
def unstub(self):
while self.original_methods:
method_name, original_method = self.original_methods.popitem()
self.restore_method(method_name, original_method)
# SPECCING
def has_method(self, method_name):
if self.spec is None:
return True
return hasattr(self.spec, method_name)
def get_signature(self, method_name):
if self.spec is None:
return None
try:
return self._signatures_store[method_name]
except KeyError:
sig = signature.get_signature(self.spec, method_name)
self._signatures_store[method_name] = sig
return sig
class _OMITTED(object):
def __repr__(self):
return 'OMITTED'
OMITTED = _OMITTED()
def mock(config_or_spec=None, spec=None, strict=OMITTED):
"""Create 'empty' objects ('Mocks').
Will create an empty unconfigured object, that you can pass
around. All interactions (method calls) will be recorded and can be
verified using :func:`verify` et.al.
A plain `mock()` will be not `strict`, and thus all methods regardless
of the arguments will return ``None``.
.. note:: Technically all attributes will return an internal interface.
Because of that a simple ``if mock().foo:`` will surprisingly pass.
If you set strict to ``True``: ``mock(strict=True)`` all unexpected
interactions will raise an error instead.
You configure a mock using :func:`when`, :func:`when2` or :func:`expect`.
You can also very conveniently just pass in a dict here::
response = mock({'text': 'ok', 'raise_for_status': lambda: None})
You can also create an empty Mock which is specced against a given
`spec`: ``mock(requests.Response)``. These mock are by default strict,
thus they raise if you want to stub a method, the spec does not implement.
Mockito will also match the function signature.
You can pre-configure a specced mock as well::
response = mock({'json': lambda: {'status': 'Ok'}},
spec=requests.Response)
Mocks are by default callable. Configure the callable behavior using
`when`::
dummy = mock()
when(dummy).__call_(1).thenReturn(2)
All other magic methods must be configured this way or they will raise an
AttributeError.
See :func:`verify` to verify your interactions after usage.
"""
if type(config_or_spec) is dict:
config = config_or_spec
else:
config = {}
spec = config_or_spec
if strict is OMITTED:
strict = False if spec is None else True
class Dummy(_Dummy):
if spec:
__class__ = spec # make isinstance work
def __getattr__(self, method_name):
if strict:
raise AttributeError(
"'Dummy' has no attribute %r configured" % method_name)
return functools.partial(
remembered_invocation_builder, theMock, method_name)
def __repr__(self):
name = 'Dummy'
if spec:
name += spec.__name__
return "<%s id=%s>" % (name, id(self))
# That's a tricky one: The object we will return is an *instance* of our
# Dummy class, but the mock we register will point and patch the class.
# T.i. so that magic methods (`__call__` etc.) can be configured.
obj = Dummy()
theMock = Mock(Dummy, strict=strict, spec=spec)
for n, v in config.items():
if inspect.isfunction(v):
invocation.StubbedInvocation(theMock, n)(Ellipsis).thenAnswer(v)
else:
setattr(obj, n, v)
mock_registry.register(obj, theMock)
return obj
```
#### File: GitSavvy/tests/test_smart_tag.py
```python
from GitSavvy.core.commands.tag import smart_incremented_tag
import unittest
class TestSmartTag(unittest.TestCase):
def test_smart_tag(self):
self.assertEquals(smart_incremented_tag('v1.3.2', "prerelease"), 'v1.3.3-0')
self.assertEquals(smart_incremented_tag('v1.3.2', "prepatch"), 'v1.3.3-0')
self.assertEquals(smart_incremented_tag('v1.3.2', "preminor"), 'v1.4.0-0')
self.assertEquals(smart_incremented_tag('v1.3.2', "premajor"), 'v2.0.0-0')
self.assertEquals(smart_incremented_tag('v1.3.2', "patch"), 'v1.3.3')
self.assertEquals(smart_incremented_tag('v1.3.2', "minor"), 'v1.4.0')
self.assertEquals(smart_incremented_tag('v1.3.2', "major"), 'v2.0.0')
self.assertEquals(smart_incremented_tag('v1.3.2-1', "prerelease"), 'v1.3.2-2')
self.assertEquals(smart_incremented_tag('v1.3.2-1', "prepatch"), 'v1.3.3-0')
self.assertEquals(smart_incremented_tag('v1.3.2-1', "preminor"), 'v1.4.0-0')
self.assertEquals(smart_incremented_tag('v1.3.2-1', "premajor"), 'v2.0.0-0')
self.assertEquals(smart_incremented_tag('v1.3.2-1', "patch"), 'v1.3.2')
self.assertEquals(smart_incremented_tag('v1.3.2-1', "minor"), 'v1.4.0')
self.assertEquals(smart_incremented_tag('v1.3.2-1', "major"), 'v2.0.0')
self.assertEquals(smart_incremented_tag('v1.3.0-1', "patch"), 'v1.3.0')
self.assertEquals(smart_incremented_tag('v1.3.0-1', "minor"), 'v1.3.0')
self.assertEquals(smart_incremented_tag('v1.3.0-1', "major"), 'v2.0.0')
self.assertEquals(smart_incremented_tag('v1.0.0-1', "major"), 'v1.0.0')
self.assertEquals(smart_incremented_tag('v1.3.2-rc.1', "prerelease"), 'v1.3.2-rc.2')
self.assertEquals(smart_incremented_tag('v1.3.2-rc.11', "prerelease"), 'v1.3.2-rc.12')
self.assertEquals(smart_incremented_tag('v1.3.2-beta1', "prerelease"), 'v1.3.2-beta2')
self.assertEquals(smart_incremented_tag('v1.3.2-beta9', "prerelease"), 'v1.3.2-beta10')
self.assertEquals(smart_incremented_tag('v1.3.2-rc.1', "prepatch"), 'v1.3.3-rc.0')
self.assertEquals(smart_incremented_tag('v1.3.2-rc.1', "preminor"), 'v1.4.0-rc.0')
self.assertEquals(smart_incremented_tag('v1.3.2-rc.1', "premajor"), 'v2.0.0-rc.0')
self.assertEquals(smart_incremented_tag('v1.3.2-rc.1', "patch"), 'v1.3.2')
self.assertEquals(smart_incremented_tag('v1.3.2-rc.1', "minor"), 'v1.4.0')
self.assertEquals(smart_incremented_tag('v1.3.2-rc.1', "major"), 'v2.0.0')
self.assertEquals(smart_incremented_tag('v1.3.0-rc.1', "patch"), 'v1.3.0')
self.assertEquals(smart_incremented_tag('v1.3.0-rc.1', "minor"), 'v1.3.0')
self.assertEquals(smart_incremented_tag('v1.3.0-rc.1', "major"), 'v2.0.0')
self.assertEquals(smart_incremented_tag('v1.0.0-rc.1', "major"), 'v1.0.0')
``` |
{
"source": "jmconroy/umap",
"score": 3
} |
#### File: umap/play_umap/utils_umap.py
```python
import pandas as pd
import umap
import umap.plot
import time
# Some plotting libraries
#import matplotlib.pyplot as plt
from bokeh.plotting import output_notebook
from bokeh.resources import INLINE
output_notebook(resources=INLINE)
def run_umap_example(feature_matrix,n_components=2,init="spectral",n_neighbors=2,category_labels=None):
t0 = time.process_time()
feature_embedding = umap.UMAP(n_components=n_components,init=init,n_neighbors=n_neighbors).fit(feature_matrix)
t1 = time.process_time()
print('Elapsed time for umap %d embedding = %f'%(n_components,t1-t0))
if category_labels is not None:
hover_df = pd.DataFrame(category_labels, columns=['category'])
# For interactive plotting use
# fig = umap.plot.interactive(tfidf_embedding, labels=dataset.target, hover_data=hover_df, point_size=1)
# show(fig)
if n_components==2:
if category_labels is not None:
umap.plot.points(feature_embedding, labels=hover_df['category'])
else:
umap.plot.points(feature_embedding)
return feature_embedding,category_labels
``` |
{
"source": "jmconroy/vectorizers",
"score": 2
} |
#### File: vectorizers/vectorizers/utils_Zhu_Ghodsi.py
```python
import numpy as np
import scipy
from scipy.stats import norm
from sklearn.decomposition import TruncatedSVD
from mvlearn.embed.utils import select_dimension
def ZG_number_of_topics(doc_term, elbow_index=1, n_topics_upper_bound = 1000):
"""
Determines an appropriate number of topics to use using
Zho and Ghodsi as implemented in graspologic's select_dimension
References
----------
[#0] https://graspologic.readthedocs.io/en/latest/reference/embed.html?highlight=select_dimension#graspologic.embed.select_svd
.. [#1] <NAME>. and <NAME>. (2006).
Automatic dimensionality selection from the scree plot via the use of
profile likelihood. Computational Statistics & Data Analysis, 51(2),
pp.918-930.
"""
k_guess = min(n_topics_upper_bound, min(doc_term.shape) - 1)
if k_guess < 2:
# there are too few topics here.
n_topics = 1
return n_topics
# SVD
svd = TruncatedSVD(n_components=k_guess)
svd.fit(doc_term)
# Extract singular values
s = svd.singular_values_[:, None]
#Use Zho and Ghodsi as implemented in kvlearn's select_dimension
#turn s into a 1D array and sort it
s1 = s.ravel()
s1.sort()
elbows=select_dimension(s1,n_elbows=max(2,elbow_index+1))
#Take the elbow requested
try:
n_topics = elbows[0][elbow_index]
except:
print('Warning the %dth elbow was not found using dimension %d instead.'%(elbow_index,k_guess))
n_topics = k_guess
return n_topics
``` |
{
"source": "jmcook1186/biosnicar-py",
"score": 3
} |
#### File: app/api/app.py
```python
import sys
sys.path.append("../../src")
sys.path.append("./src")
sys.path.append("../app/api")
sys.path.append("./api")
from flask import Flask, request, Response
from flask_cors import CORS, cross_origin
import time
import numpy as np
import requests
import matplotlib.pyplot as plt
from setup_snicar import *
from classes import *
from column_OPs import *
from biooptical_funcs import *
from toon_rt_solver import toon_solver
from adding_doubling_solver import adding_doubling_solver
from validate_inputs import *
from display import *
app = Flask(__name__, static_folder="../build", static_url_path='/')
# set root url for host as a dynamic variable
port = int(os.environ.get("PORT", 5000))
host = 'http://localhost:5000/'
success = False
# enable cross domain requests
# because flask is on port 5000, react on port 3000
app = Flask(__name__)
cors = CORS(app)
app.config['CORS_HEADERS'] = ['Content-Type']
@app.route('/')
def root():
"""
Sets up default route to connect backend to frontend.
Args:
None
Returns:
renders index.html to browser
"""
return app.send_static_file('index.html')
# the actual func to run
@app.route('/app/model', methods=['POST'])
@cross_origin()
def run_snicar():
"""
Runs BioSNICAR model and renders output to webpage.
This function takes the use inputs from the POST request submitted
via the React frontend. These inputs are passed to the BioSNCAR
model and a plot of spectral albedo is saved to file.
The broadband albedo, rounded to 2 decimal places, is printed on the
figure.
Args:
None, but receives args as POSt http request via
/run_snicar route.
Returns:
res: http response code
"""
input_file = "app/api/inputs.yaml"
lyr_typ = int(request.json['lyr_typ'])
dz = float(request.json['dz'])
r_eff = int(request.json['r_eff'])
rho = int(request.json['rho'])
bc = int(request.json['bc'])
glacier_algae = int(request.json['glacier_algae'])
snow_algae = int(request.json['snow_algae'])
zenith = int(request.json['zenith'])
print(lyr_typ)
print(dz)
print(r_eff)
print(rho)
# first build classes from config file and validate their contents
(
ice,
illumination,
rt_config,
model_config,
plot_config,
impurities,
) = setup_snicar(input_file)
# load base classes from default inputs.yaml
# then adjust for user inputs
ice.layer_type = [lyr_typ, lyr_typ]
ice.dz = [0.001, dz]
ice.rds = [r_eff, r_eff]
ice.rho = [rho, rho]
impurities[0].conc = [bc,0]
impurities[1].conc = [snow_algae, 0]
impurities[2].conc = [glacier_algae, 0]
illumination.solzen = zenith
illumination.calculate_irradiance()
# validate inputs to ensure no invalid combinations have been chosen
status = validate_inputs(ice, rt_config, model_config, illumination, impurities)
# now get the optical properties of the ice column
ssa_snw, g_snw, mac_snw = get_layer_OPs(ice, model_config)
tau, ssa, g, L_snw = mix_in_impurities(
ssa_snw, g_snw, mac_snw, ice, impurities, model_config
)
# now run one or both of the radiative transfer solvers
outputs = adding_doubling_solver(tau, ssa, g, L_snw, ice, illumination, model_config)
rounded_BBA = np.round(outputs.BBA, 2)
wvl = np.arange(0.205, 5, 0.01)
plt.figure()
plt.plot(wvl, outputs.albedo, linewidth=1)
plt.ylim(0,1)
plt.xlim(0.2,2.5)
plt.text(1.5, 0.8, f"broadband albedo\n{rounded_BBA}", fontsize = 20)
plt.xlabel("Wavelength (microns)")
plt.ylabel("Albedo")
plt.grid(False)
plt.tight_layout()
plt.savefig("app/src/outputs/albedo.jpg")
plt.close()
np.savetxt("app/src/outputs/albedo.csv", outputs.albedo, delimiter=',')
res = Response("success")
res.headers['Access-Control-Allow-Origin'] = '*'
return res
if __name__ == '__main__':
app.run(host = '0.0.0.0', port = port, debug=True)
```
#### File: biosnicar-py/src/geometric_optics_ice.py
```python
import sys
sys.path.append("./src")
import matplotlib.pyplot as plt
import numpy as np
import pandas as pd
import xarray as xr
# Set paths
SAVEPATH = "./Data/GO_files/480band/"
DATAPATH = "./Data/rfidx_ice.nc"
RI_SOURCE = 2
def preprocess_RI(ri_source, path_to_ri):
"""Preprocessing of wavelength and RI data.
Preprocessing function that ensures the wavelengths and real/imaginary
parts of the refractive index for ice is provided in the correct waveband and correct
spectral resolution to interface with BioSNICAR. The refractive indices are taken
from Warren and Brandt 2008.
Grabs appropriates wavelengths, real and imaginary parts of ice
refractive index. The source of the refractive index data is
controlled by var "ri_source" where 0 = Warren 1984, 1 = Warren 2008
and 2 = Picard 2016.
These are then passed as numpy arrays to the Geometrical Optics function.
Args:
ri_source: choice of refractive index
path_to_ri: path to directory containing RI data
Returns:
reals: numpy array of real parts of RI by wavelength
imags: numpy array of imaginary parts of RI by wavelength
wavelengths: numpy array of wavelengths (um)
"""
refidx = xr.open_dataset(path_to_ri)
wavelengths = refidx["wvl"].values
if ri_source == 0:
reals = refidx["re_Wrn84"].values
imags = refidx["im_Wrn84"].values
elif ri_source == 1:
reals = refidx["re_Wrn08"].values
imags = refidx["im_Wrn08"].values
elif ri_source == 2:
reals = refidx["re_Pic16"].values
imags = refidx["im_Pic16"].values
return reals, imags, wavelengths
def calc_optical_params(
side_length,
depth,
reals,
imags,
wavelengths,
plots=False,
report_dims=False,
):
"""Calculates single scattering optical properties.
Van Diedenhoven's parameterisation is used to calculate
the single scatterign optical properties of hexagonal
ice columns of given dimensions.
Args:
side_length: length of side of hexagonal face (um)
depth: length of hexagonal column (um)
reals: numpy array of real parts of RI by wavelength
imags: numpy array of imaginary parts of RI by wavelength
wavelengths: numpy array of wavelenmgths (um)
plots: Boolean to toggle plotting OPs
report_dims: Boolean to toggle printing OP data to terminal
Returns:
g_list: assymetry parameter
ssa_list: single scattering albedo
mac_list: mass absorption coefficient
depth: length of hexagional column (um)
side_length: length of side of hexagonal face (um)
diameter: diameter across hexaginal face.
"""
ssa_list = []
g_list = []
abs_xs_list = []
mac_list = []
V = 1.5 * np.sqrt(3) * side_length**2 * depth # volume
Area_total = (
3 * side_length * (np.sqrt(3) * side_length + depth * 2)
) # total surface area
Area = Area_total / 4 # projected area
apothem = (2 * Area) / (
depth * 6
) # apothem is distance from centre point to midpoint of a side for hexagon
diameter = 2 * apothem # midpoint of one side to midpoint of opposite side
ar = depth / side_length
delta = 0.3
for i in np.arange(0, len(wavelengths), 1):
mr = reals[i]
mi = imags[i]
wl = wavelengths[i]
# ------------------------------------------------
# ---------- input tables (see Figs. 4 and 7) ----
# ------------------------------------------------
# SSA parameterization
a = [0.457593, 20.9738] # for ar=1
# SSA correction for AR != 1 (Table 2)
nc1 = 3
nc2 = 4
c_ij = np.zeros(nc1 * nc2 * 2).reshape((nc1, nc2, 2))
# ---------- Plates ----------
c_ij[:, 0, 0] = [0.000527060, 0.309748, -2.58028]
c_ij[:, 1, 0] = [0.00867596, -0.650188, -1.34949]
c_ij[:, 2, 0] = [0.0382627, -0.198214, -0.674495]
c_ij[:, 3, 0] = [0.0108558, -0.0356019, -0.141318]
# --------- Columns ----------
c_ij[:, 0, 1] = [0.000125752, 0.387729, -2.38400]
c_ij[:, 1, 1] = [0.00797282, 0.456133, 1.29446]
c_ij[:, 2, 1] = [0.00122800, -0.137621, -1.05868]
c_ij[:, 3, 1] = [0.000212673, 0.0364655, 0.339646]
# diffraction g parameterization
b_gdiffr = [-0.822315, -1.20125, 0.996653]
# raytracing g parameterization ar=1
p_a_eq_1 = [0.780550, 0.00510997, -0.0878268, 0.111549, -0.282453]
# ---- g correction for AR != 1 (Also applied to AR=1 as plate) (Table 3)
nq1 = 3
nq2 = 7
q_ij = np.zeros(nq1 * nq2 * 2).reshape((nq1, nq2, 2))
# ---------- Plates ----------
q_ij[:, 0, 0] = [-0.00133106, -0.000782076, 0.00205422]
q_ij[:, 1, 0] = [0.0408343, -0.00162734, 0.0240927]
q_ij[:, 2, 0] = [0.525289, 0.418336, -0.818352]
q_ij[:, 3, 0] = [0.443151, 1.53726, -2.40399]
q_ij[:, 4, 0] = [0.00852515, 1.88625, -2.64651]
q_ij[:, 5, 0] = [-0.123100, 0.983854, -1.29188]
q_ij[:, 6, 0] = [-0.0376917, 0.187708, -0.235359]
# ---------- Columns ----------
q_ij[:, 0, 1] = [-0.00189096, 0.000637430, 0.00157383]
q_ij[:, 1, 1] = [0.00981029, 0.0409220, 0.00908004]
q_ij[:, 2, 1] = [0.732647, 0.0539796, -0.665773]
q_ij[:, 3, 1] = [-1.59927, -0.500870, 1.86375]
q_ij[:, 4, 1] = [1.54047, 0.692547, -2.05390]
q_ij[:, 5, 1] = [-0.707187, -0.374173, 1.01287]
q_ij[:, 6, 1] = [0.125276, 0.0721572, -0.186466]
# --------- refractive index correction of asymmetry parameter
c_g = np.zeros(4).reshape(2, 2)
c_g[:, 0] = [0.96025050, 0.42918060]
c_g[:, 1] = [0.94179149, -0.21600979]
# ---- correction for absorption
s = [1.00014, 0.666094, -0.535922, -11.7454, 72.3600, -109.940]
u = [-0.213038, 0.204016]
# -------- selector for plates or columns
if ar > 1.0:
col_pla = 1 # columns
else:
col_pla = 0 # plates & compacts
# ------------------------------------------------
# ------------ Size parameters -------------------
# ------------------------------------------------
# --- absorption size parameter (Fig. 4, box 1)
Chi_abs = mi / wl * V / Area
# ----- scattering size parameter (Fig. 7, box 1)
Chi_scat = 2.0 * np.pi * np.sqrt(Area / np.pi) / wl
# ------------------------------------------------
# ------------ SINGLE SCATTERING ALBEDO ----------
# ------------------------------------------------
if Chi_abs > 0:
w_1 = 1.0 - a[0] * (
1.0 - np.exp(-Chi_abs * a[1])
) # for AR=1 (Fig. 4, box 2)
l = np.zeros(nc1)
for i in range(nc2):
l[:] += c_ij[:, i, col_pla] * np.log10(ar) ** i # (Fig. 4, box 3)
D_w = (
l[0]
* np.exp(-((np.log(Chi_abs) - l[2]) ** 2) / (2.0 * l[1] ** 2))
/ (Chi_abs * l[1] * np.sqrt(2.0 * np.pi))
) # (Fig. 4, box 3)
w = w_1 + D_w # (Fig. 4, box 4)
else:
w = 1.0
# ------------------------------------------------
# --------------- ASYMMETRY PARAMETER ------------
# ------------------------------------------------
# diffraction g
g_diffr = (
b_gdiffr[0] * np.exp(b_gdiffr[1] * np.log(Chi_scat)) + b_gdiffr[2]
) # (Fig. 7, box 2)
g_diffr = max([g_diffr, 0.5])
# raytracing g at 862 nm
g_1 = 0.0
for i in range(len(p_a_eq_1)):
g_1 += p_a_eq_1[i] * delta**i # (Fig. 7, box 3)
p_delta = np.zeros(nq1)
for i in range(nq2):
p_delta += q_ij[:, i, col_pla] * np.log10(ar) ** i # (Fig. 7, box 4)
Dg = 0.0
for i in range(nq1):
Dg += p_delta[i] * delta**i # (Fig. 7, box 4)
g_rt = 2.0 * (g_1 + Dg) - 1.0 # (Fig. 7, box 5)
# --------- refractive index correction of asymmetry parameter (Fig. 7, box 6)
epsilon = c_g[0, col_pla] + c_g[1, col_pla] * np.log10(ar)
mr1 = 1.3038 # reference value @ 862 nm band
C_m = abs(
(mr1 - epsilon) / (mr1 + epsilon) * (mr + epsilon) / (mr - epsilon)
) # abs function added according to corrigendum to the original paper
# ---- correction for absorption (Fig. 7, box 7)
if Chi_abs > 0:
C_w0 = 0.0
for i in range(len(s)):
C_w0 += s[i] * (1.0 - w) ** i
k = np.log10(ar) * u[col_pla]
C_w1 = k * w - k + 1.0
C_w = C_w0 * C_w1
else:
C_w = 1.0
# raytracing g at required wavelength
g_rt_corr = g_rt * C_m * C_w # (Fig. 7, box 9)
# ----- Calculate total asymmetry parameter and check g_tot <= 1 (Fig. 7, box 9)
g_tot = 1.0 / (2.0 * w) * ((2.0 * w - 1.0) * g_rt_corr + g_diffr)
g_tot = min([g_tot, 1.0])
absXS = Area * (1 - ((np.exp(-4 * np.pi * mi * V)) / (Area * wl)))
MAC = (
absXS / V * 914
) # divide by volume*mass to give mass absorption coefficient
ssa_list.append(w)
g_list.append(g_tot)
abs_xs_list.append(absXS)
mac_list.append(MAC)
if plots:
plt.figure(1)
plt.plot(wavelengths, ssa_list), plt.ylabel("SSA"), plt.xlabel(
"Wavelength (um)"
), plt.grid(b=None)
plt.figure(2)
plt.plot(wavelengths, g_list), plt.ylabel("Assymetry Parameter"), plt.xlabel(
"Wavelength (um)"
), plt.grid(b=None)
plt.figure(3)
plt.plot(wavelengths, mac_list), plt.ylabel(
"Mass Absorption Cross Section"
), plt.xlabel("Wavelength (um)"), plt.grid(b=None)
if report_dims:
print("Width of hexagonal plane = ", np.round(diameter / 10000, 2), " (cm)")
print("depth of hexagonal column = ", depth / 10000, " (cm)")
print("aspect ratio = ", ar)
print("ice crystal volume = ", np.round(V * 1e-12, 2), " (cm^3)")
return g_list, ssa_list, mac_list, depth, side_length, diameter
def net_cdf_updater(
ri_source, savepath, g_list, ssa_list, mac_list, depth, side_length, density
):
"""Updates a template NetCDF file with new OP data.
Args:
ri_source: chocie of refractive index file
savepath: path to save output data
g_list: asymmetry parameter
ssa_list: single scattering albedo
mac_list: mass absorption coefficient
depth: length of hexagional column (um)
side_length: length of side of hexagonal face (um)
density: density of material in kg/m3.
Returns:
None but saves NetCDF file to savepath
"""
filepath_in = savepath
mac_in = np.squeeze(mac_list)
ssa_in = np.squeeze(ssa_list)
g_in = np.squeeze(g_list)
if ri_source == 0:
stb1 = "ice_Wrn84/"
stb2 = "ice_Wrn84_"
elif ri_source == 1:
stb1 = "ice_Wrn08/"
stb2 = "ice_Wrn08_"
elif ri_source == 2:
stb1 = "ice_Pic16/"
stb2 = "ice_Pic16_"
icefile = pd.DataFrame()
icefile["asm_prm"] = g_in
icefile["ss_alb"] = ssa_in
icefile["ext_cff_mss"] = mac_in
icefile = icefile.to_xarray()
icefile.attrs["medium_type"] = "air"
icefile.attrs[
"description"
] = f"""Optical properties for ice grain: hexagonal column of side
length {side_length}um and length {depth}um"""
icefile.attrs["psd"] = "monodisperse"
icefile.attrs["side_length_um"] = depth
icefile.attrs["density_kg_m3"] = density
icefile.attrs[
"origin"
] = "Optical properties derived from geometrical optics calculations"
icefile.to_netcdf(
str(filepath_in + stb1 + stb2 + "{}_{}.nc".format(str(side_length), str(depth)))
)
return
# --------------------------------------------------------------------------------------
# FUNCTON CALLS
# --------------------------------------------------------------------------------------
# reals, imags, wavelengths = preprocess_RI(RI_SOURCE, DATAPATH)
# for side_length in np.arange(2000, 11000, 1000):
# for depth in np.arange(2000, 31000, 1000):
# (
# g_list,
# ssa_list,
# mac_list,
# depth,
# side_length,
# diameter,
# ) = calc_optical_params(
# side_length, depth, reals, imags, wavelengths, plots=False, report_dims=True
# )
# net_cdf_updater(
# RI_SOURCE, SAVEPATH, g_list, ssa_list, mac_list, depth, side_length, 917
# )
if __name__ == '__main__':
pass
```
#### File: biosnicar-py/src/setup_snicar.py
```python
import sys
sys.path.append("./src")
import yaml
from classes import *
def setup_snicar(input_file):
"""Builds impurity array and instances of all classes according to config in yaml file.
Args:
None
Returns:
ice: instance of Ice class
illumination: instance of Illumination class
rt_config: instance of RTConfig class
model_config: instance of ModelConfig class
plot_config: instance of PlotConfig class
display_config: instance of DisplayConfig class
"""
impurities = build_impurities_array(input_file)
(
ice,
illumination,
rt_config,
model_config,
plot_config,
) = build_classes(input_file)
return (
ice,
illumination,
rt_config,
model_config,
plot_config,
impurities,
)
def build_classes(input_file):
"""Instantiates classes according to config in yaml file.
Args:
None
Returns:
ice: instance of Ice class
illumination: instance of Illumination class
rt_config: instance of RTConfig class
model_config: instance of ModelConfig class
plot_config: instance of PlotConfig class
display_config: instance of DisplayConfig class
"""
ice = Ice(input_file)
illumination = Illumination(input_file)
rt_config = RTConfig(input_file)
model_config = ModelConfig(input_file)
plot_config = PlotConfig(input_file)
return ice, illumination, rt_config, model_config, plot_config
def build_impurities_array(input_file):
"""Creates an array of instances of Impurity.
creates an array of impurities - each one an instance of Impurity with
properties defined in yaml file.
Args:
None
Returns:
impurities: array of instances of Impurity
"""
with open(input_file, "r") as ymlfile:
inputs = yaml.load(ymlfile, Loader=yaml.FullLoader)
impurities = []
for i, id in enumerate(inputs["IMPURITIES"]):
name = inputs["IMPURITIES"][id]["NAME"]
file = inputs["IMPURITIES"][id]["FILE"]
cfactor = inputs["IMPURITIES"][id]["CFACTOR"]
coated = inputs["IMPURITIES"][id]["COATED"]
unit = inputs["IMPURITIES"][id]["UNIT"]
conc = inputs["IMPURITIES"][id]["CONC"]
impurities.append(Impurity(file, coated, cfactor, unit, name, conc))
return impurities
if __name__ == '__main__':
pass
``` |
{
"source": "jmcook1186/BrightLink",
"score": 3
} |
#### File: BrightLink/app/ImageAnalysis.py
```python
import ee
import numpy as np
import json
import os
from git import Repo
from dotenv import load_dotenv
import geemap
load_dotenv()
def setupGEE(coords, platform, startDate, endDate):
Map=geemap.Map(basemap='HYBRID')
area = ee.Geometry.Polygon([coords],None,False)
# select platform
if platform =='SENTINEL2':
platformPath = "COPERNICUS/S2"
BandLong = 'B8'
BandShort = 'B4'
elif platform == 'LANDSAT':
platformPath = "LANDSAT/LC08/C01/T1_TOA"
BandLong = 'B5'
BandShort = 'B4'
elif platform == 'MODIS':
platformPath = "MODIS/MOD09GA_006_NDVI"
BandLong = None
BandShort = None
else:
raise ValueError("please select a valid platform from: MODIS, SENTINEL2, LANDSAT")
# define the image
if (platform == "SENTINEL2") or (platform == "LANDSAT"):
collection = ee.ImageCollection(platformPath).filterBounds(area)\
.filterDate(startDate, endDate)\
.select([BandLong, BandShort])
elif platform == "MODIS":
collection = ee.ImageCollection(platformPath)\
.filter(ee.Filter.date(startDate, endDate))
print(" number of image: ",collection.size().getInfo())
if collection.size().getInfo() == 0:
raise ValueError("There are no valid images for this area/date combination")
return collection, area
def albedo_LANDSAT():
return
def albedo_Sentinel():
return
# perform any calculation on the image collection here
def ndvi_S2(img):
ndvi = ee.Image(img.normalizedDifference(['B8', 'B4'])).rename(["ndvi"])
return ndvi
def ndvi_LANDSAT(img):
ndvi = ee.Image(img.normalizedDifference(['B5','B4'])).rename(['ndvi'])
return ndvi
# export the latitude, longitude and array
def LatLonImg(img, area):
img = img.addBands(ee.Image.pixelLonLat())
img = img.reduceRegion(reducer=ee.Reducer.toList(),\
geometry=area,\
maxPixels=1e13,\
scale=10);
data = np.array((ee.Array(img.get("result")).getInfo()))
lats = np.array((ee.Array(img.get("latitude")).getInfo()))
lons = np.array((ee.Array(img.get("longitude")).getInfo()))
return lats, lons, data
# covert the lat, lon and array into an image
def toImage(lats,lons,data):
# get the unique coordinates
uniqueLats = np.unique(lats)
uniqueLons = np.unique(lons)
# get number of columns and rows from coordinates
ncols = len(uniqueLons)
nrows = len(uniqueLats)
# determine pixelsizes
ys = uniqueLats[1] - uniqueLats[0]
xs = uniqueLons[1] - uniqueLons[0]
# create an array with dimensions of image
arr = np.zeros([nrows, ncols], np.float32) #-9999
# fill the array with values
counter =0
for y in range(0,len(arr),1):
for x in range(0,len(arr[0]),1):
if lats[counter] == uniqueLats[y] and lons[counter] == uniqueLons[x] and counter < len(lats)-1:
counter+=1
arr[len(uniqueLats)-1-y,x] = data[counter] # we start from lower left corner
return arr
def runAnalysis(collection, platform, score_type, savepath, area, plot):
# map over the image collection
if platform == "SENTINEL2":
myCollection = collection.map(ndvi_S2)
elif platform == "LANDSAT":
myCollection = collection.map(ndvi_LANDSAT)
elif platform == "MODIS":
myCollection = collection
# get the median
result = ee.Image(myCollection.mean()).rename(['result'])
# get the lon, lat and result as 1d array
lat, lon, data = LatLonImg(result, area)
# 1d to 2d array
image = toImage(lat,lon,data)
if score_type == "count":
ndvi_score = (np.size(image[image>0.5])/np.size(image))*100
elif score_type == "median":
ndvi_score = np.median(image)*100
elif score_type == "mean":
ndvi_score = np.mean(image)*100
print("NDVI score = ",ndvi_score)
if plot:
import matplotlib.pyplot as plt
plt.imshow(image)
plt.colorbar()
print("saveing to {}".format(str(savepath+'image_ndvi.jpg')))
plt.savefig(str(savepath+'/image_ndvi.jpg'))
return ndvi_score
```
#### File: tests/integrative/conftest.py
```python
import pytest
from brownie import (
BrightLink,
Contract,
accounts,
network,
)
@pytest.fixture
def checkNetwork():
assert network.show_active() == 'kovan'
return
@pytest.fixture(scope='module')
def load_owner():
owner = accounts.load('main')
return owner
@pytest.fixture(scope='module')
def load_customer():
customer = accounts.load('account2')
return customer
@pytest.fixture(scope='module')
def load_donor():
donor = accounts.load('account3')
return donor
@pytest.fixture(scope="module")
def get_deployed_contract(load_owner):
contract = load_owner.deploy(BrightLink,'0xff795577d9ac8bd7d90ee22b6c1703490b6512fd',\
'0xdCf0aF9e59C002FA3AA091a46196b37530FD48a8',\
'0xa36085F69e2889c224210F603D836748e7dC0088',\
'0x88757f2f99175387aB4C6a4b3067c77A695b0349',\
'0xc57B33452b4F7BB189bB5AfaE9cc4aBa1f7a4FD8',\
'd5270d1c311941d0b08bead21fea7747',\
100000000000000000)
return contract
@pytest.fixture(scope='module')
def set_deposit_amount():
return 500e18
``` |
{
"source": "jmcouffin/pyRoovit",
"score": 2
} |
#### File: BulkRoov.pulldown/DownRoov.pushbutton/script.py
```python
import clr
import os
# import pyrevit libraries
from pyrevit import forms
from pyrevit import revit, DB, UI
from pyrevit import script
# create custom message class based on sheet object
class ViewSheetToDownrev(forms.TemplateListItem):
@property
def name(self):
return self.item.SheetNumber + ' - ' + self.item.Name
# get document
doc = revit.doc
# get all sheets in document
sheets = DB.FilteredElementCollector(revit.doc)\
.OfCategory(DB.BuiltInCategory.OST_Sheets)\
.WhereElementIsNotElementType()\
.ToElements()
unsortedSheets,unsortedNumbers = [],[]
# build set of sheet Ids for sorting and deletion
for s in sheets:
unsortedSheets.append(s.Id.IntegerValue)
unsortedNumbers.append(s.SheetNumber)
# sort the list of sheets by their numbers
allSheets = [s for _, s in sorted(zip(unsortedNumbers, unsortedSheets))]
# select a revision from the list
downrev = forms.select_revisions(title='Select a Revision to add...', button_name='Select', width=500, multiple=False)
# display primary UI if revision provided and sheets available
if not allSheets:
forms.alert("No Sheets available to Downrev.", title= "Script cancelled")
elif not downrev:
forms.alert("Revision not selected.", title= "Script cancelled")
else:
# ask user for sheets to down
return_options = \
forms.SelectFromList.show(
[ViewSheetToDownrev(revit.doc.GetElement(DB.ElementId(s)))
for s in allSheets],
title='Select Sheets to Downrev',
width=500,
button_name='Downrev Sheets',
multiselect=True
)
# if user selects sheets, attempt to downrev them
if return_options:
rev_pass = 0
with revit.Transaction('Downrev Sheets'):
for sht in return_options:
sheetRevs = sht.GetAdditionalRevisionIds()
if downrev.Id in sheetRevs:
sheetRevs.Remove(downrev.Id)
else:
continue
try:
sht.SetAdditionalRevisionIds(sheetRevs)
rev_pass += 1
except:
pass
# display the purging outcome
form_message = str(rev_pass) + "/" + str(len(return_options)) + " Sheets successfully de-revisioned."
forms.alert(form_message, title= "Script complete", warn_icon=False)
# if script is cancelled
else:
forms.alert("No Sheets de-revisioned.", title= "Script cancelled", warn_icon=False)
``` |
{
"source": "jmcouffin/revitron-ui",
"score": 2
} |
#### File: revitron-ui/lib/revitronui.py
```python
import revitron
import os
import sys
from revitron import _
from pyrevit import forms
from pyrevit import output
from collections import defaultdict
class ElementInfo:
def __init__(self, elements):
out = output.get_output()
self.info = []
for element in elements:
dependents = []
for dep in _(element).getDependent():
depFamType = revitron.Parameter(dep, 'Family and Type').getValueString()
depInfo = '{} {}, {}'.format(out.linkify(dep.Id), _(dep).getCategoryName(), depFamType).strip(', ')
depInfo = depInfo + '<br>'
dependents.append(depInfo)
self.info.append([
out.linkify(element.Id),
_(element).getClassName(),
_(element).getCategoryName(),
revitron.Parameter(element, 'Family and Type').getValueString(),
''.join(dependents)
])
def show(self, title = ''):
out = output.get_output()
out.print_table(self.info,
title = title,
columns = ['ID', 'Class', 'Category', 'Family / Type', 'Dependent'])
class DWG:
def __init__(self):
self.config = revitron.DocumentConfigStorage().get('revitron.export', defaultdict())
if not self.config:
print('Please configure your DWG exporter first!')
sys.exit()
setup = self.config.get('DWG_Export_Setup')
self.exporter = revitron.DWGExporter(setup)
def export(self, sheet):
return self.exporter.exportSheet(sheet,
self.config.get('Sheet_Export_Directory'),
self.config.get('Sheet_Naming_Template'))
class PDF:
def __init__(self):
self.config = revitron.DocumentConfigStorage().get('revitron.export', defaultdict())
if not self.config:
print('Please configure your PDF exporter first!')
sys.exit()
self.exporter = revitron.PDFExporter(self.config.get('PDF_Printer_Address'), self.config.get('PDF_Temporary_Output_Path'))
self.sizeParamName = self.config.get('Sheet_Size_Parameter_Name')
self.defaultSize = self.config.get('Default_Sheet_Size')
self.orientationParamName = self.config.get('Sheet_Orientation_Parameter_Name')
self.defaultOrientation = self.config.get('Default_Sheet_Orientation')
def export(self, sheet):
sheetSize = False
sheetOrientation = False
if self.sizeParamName:
sheetSize = _(sheet).get(self.sizeParamName)
if self.orientationParamName:
sheetOrientation = _(sheet).get(self.orientationParamName)
if not sheetSize:
sheetSize = self.defaultSize
if not sheetOrientation:
sheetOrientation = self.defaultOrientation
return self.exporter.printSheet(sheet,
sheetSize,
sheetOrientation,
self.config.get('Sheet_Export_Directory'),
self.config.get('Sheet_Naming_Template')
)
class SelectType:
def __init__(self, elementTypes, title):
self.title = title
self.options = []
for elementType in elementTypes:
self.options.append(OptionListTypes(elementType))
def show(self, multiselect=False):
return forms.SelectFromList.show(self.options,
title=self.title,
multiselect=multiselect,
button_name='Select Type')
class OptionListTypes(forms.TemplateListItem):
@property
def name(self):
return revitron.ParameterTemplate(self.item, '{Family Name} - {Type Name}', False).render()
class RoomTags():
@staticmethod
def add(method, title):
roomTagTypes = revitron.Filter().byCategory('Room Tags').onlyTypes().getElements()
roomTagType = SelectType(roomTagTypes, title).show()
scope = revitron.Selection.get()
if not scope:
scope = revitron.ACTIVE_VIEW.Id
if roomTagType:
rooms = revitron.Filter(scope).byCategory('Rooms').noTypes().getElements()
max_value = len(rooms)
counter = 0
with forms.ProgressBar(title='Tagging rooms ... ({value} of {max_value})') as pb:
for room in rooms:
counter = counter + 1
method(room, roomTagType.Id)
pb.update_progress(counter, max_value)
```
#### File: rpm/system/ui.py
```python
from pyrevit import script
from pyrevit import output
from pyrevit import forms
from rpm import config
from rpm.system.update import Update
from rpm.system.session import Session
class UI:
@staticmethod
def checkUpdates(noInteraction = False):
hasUpdates = False
out = script.get_output()
pyRevit = Update.checkPyRevit()
extensions = Update.checkExtensions()
if pyRevit:
install = 'Close open Revit sessions and install pyRevit core update now'
skip = 'Skip update and keep Revit sessions open'
res = forms.alert('A pyRevit core update is ready to be installed.\n'
'Note that all running Revit sessions will be closed automatically when installing the update.',
title = 'pyRevit Update',
options = [install, skip])
if res == install:
Update.pyRevit()
else:
hasUpdates = True
if extensions:
install = 'Install extension updates now'
skip = 'Skip updates'
if noInteraction:
res = install
else:
res = forms.alert('There are pyRevit extension updates ready to be installed.',
title = 'pyRevit Extensions Updates',
options = [install, skip])
if res == install:
UI.printLogo()
UI.printTitle()
Update.extensions()
out.print_html('<br><br>Update has finished. Reloading ...<br><br>')
Session.reload()
else:
hasUpdates = True
return hasUpdates
@staticmethod
def printLogo():
out = script.get_output()
try:
out.center()
except:
pass
out.print_html('<div style="text-align:center; margin: 30px 0"><img src="{}" style="max-width:500px;"></div>'.format(config.RPM_DIR + '/svg/rpm-ui.svg'))
@staticmethod
def printTitle():
out = script.get_output()
out.print_html('<h2>Revitron Package Manager</h2>')
``` |
{
"source": "jmcparland/sms-proxy",
"score": 2
} |
#### File: test/unit/test_models.py
```python
import pytest
import json
import urllib
import uuid
from datetime import datetime
from sms_proxy.api import app, VirtualTN, ProxySession
from sms_proxy.database import db_session, init_db, destroy_db, engine
from sms_proxy.settings import TEST_DB
def teardown_module(module):
if TEST_DB in app.config['SQLALCHEMY_DATABASE_URI']:
VirtualTN.query.delete()
ProxySession.query.delete()
db_session.commit()
else:
raise AttributeError(("The production database is turned on. "
"Flip settings.DEBUG to True"))
def setup_module(module):
if TEST_DB in app.config['SQLALCHEMY_DATABASE_URI']:
VirtualTN.query.delete()
ProxySession.query.delete()
db_session.commit()
else:
raise AttributeError(("The production database is turned on. "
"Flip settings.DEBUG to True"))
@pytest.fixture
def fresh_session():
"""
Clears out any outstanding rows in the VirtualTN and ProxySession
tables. Creates and returns a linked VirtualTN and ProxySession.
"""
VirtualTN.query.delete()
ProxySession.query.delete()
new_tn = VirtualTN('1234567897')
db_session.add(new_tn)
db_session.commit()
new_session = ProxySession(
new_tn.value, '12223334444', '12223335555', expiry_window=1)
new_tn.session_id = new_session.id
db_session.add(new_tn)
db_session.add(new_session)
db_session.commit()
return new_tn, new_session
@pytest.mark.parametrize("tns, available", [
({1234567891: False}, False),
({1234567892: False, 1234567893: True}, False),
({1234567894: True}, True),
({1234567895: True, 1234567896: True}, True),
])
def test_virtual_tn_available(tns, available):
"""
The 'get_next_available' function returns the first non-reserved
VirtualTN.
"""
VirtualTN.query.delete()
for num, available in tns.iteritems():
new_tn = VirtualTN(num)
if not available:
new_tn.session_id = 'active_session_id'
db_session.add(new_tn)
db_session.commit()
available_tn = VirtualTN.get_next_available()
if not available:
assert available_tn is None
else:
for num, available in tns.iteritems():
if available:
assert available_tn.value == str(num)
return
def test_clean_expired_sessions(fresh_session):
"""
The 'clean_expired' method does clear expired ProxySessions
"""
new_tn, new_session = fresh_session
new_session.expiry_date = datetime.utcnow()
db_session.add(new_session)
db_session.commit()
sessions = ProxySession.query.all()
assert len(sessions) == 1
ProxySession.clean_expired()
sessions = ProxySession.query.all()
assert new_tn.session_id is None
assert len(sessions) == 0
def test_terminate_session(fresh_session):
"""
The 'terminate' method delete's the ProxySession and releases
the associated VirtualTN back to the pool.
"""
new_tn, new_session = fresh_session
sessions = ProxySession.query.all()
assert new_session.virtual_TN == new_tn.value
assert len(sessions) == 1
ProxySession.terminate(new_session.id)
released_tn = VirtualTN.query.filter_by(value=new_tn.value).one()
assert released_tn.session_id is None
sessions = ProxySession.query.all()
assert len(sessions) == 0
def test_get_other_participant(fresh_session):
"""
The 'get_other_participant' method is able to traverse the ProxySession
associated to the VirtualTN and sender number.
"""
new_tn, new_session = fresh_session
other_participant, session_id = ProxySession.get_other_participant(
new_tn.value, new_session.participant_a)
assert other_participant == new_session.participant_b
assert session_id == new_session.id
other_participant, session_id = ProxySession.get_other_participant(
new_tn.value, new_session.participant_b)
assert other_participant == new_session.participant_a
assert session_id == new_session.id
``` |
{
"source": "jmcph4/advent2017",
"score": 4
} |
#### File: advent2017/day3/day3.py
```python
def taxicab_distance(a, b):
"""
Returns the Manhattan distance of the given points
"""
n = len(a)
d = 0
for i in range(n):
d += abs(a[i] - b[i])
return d
PERIOD = 2 # the period of the sequence
def sequ():
"""
Generates the sequence corresponding to the number of steps to take at each
turn when traversing memory
"""
step = 0
val = 1
while True:
if step == PERIOD:
step = 0
val += 1
yield val
step += 1
# movements
def right(pos):
"""
Move right
"""
return (pos[0]+1, pos[1])
def left(pos):
"""
Move left
"""
return (pos[0]-1, pos[1])
def up(pos):
"""
Move up
"""
return (pos[0], pos[1]+1)
def down(pos):
"""
Move down
"""
return (pos[0], pos[1]-1)
PORT_NUM = 1 # address of sole I/O port
def coordinates(n):
"""
Returns the co-ordinates of the given address in memory
"""
if n == PORT_NUM: # orient ourselves w.r.t. to I/O port
return (0, 0)
pos = (0, 0)
seq = sequ()
diff = n - 1
while diff > 0:
if diff == 0: # are we there yet?
return pos
# right branch
branch_length = next(seq)
for i in range(branch_length):
if diff == 0: # are we there yet?
return pos
pos = right(pos)
diff -= 1 # decrement difference
# up branch
branch_length = next(seq)
for i in range(branch_length):
if diff == 0: # are we there yet?
return pos
pos = up(pos)
diff -= 1 # decrement difference
# left branch
branch_length = next(seq)
for i in range(branch_length):
if diff == 0: # are we there yet?
return pos
pos = left(pos)
diff -= 1 # decrement difference
# down branch
branch_length = next(seq)
for i in range(branch_length):
if diff == 0: # are we there yet?
return pos
pos = down(pos)
diff -= 1 # decrement difference
return pos
def distance(n):
"""
Returns the Manhattan distance from the I/O port to the given address
"""
port_loc = coordinates(PORT_NUM)
n_loc = coordinates(n)
return taxicab_distance(port_loc, n_loc)
def num_steps(n):
"""
Returns the number of steps required to get from the given address to the
I/O port
"""
if n == PORT_NUM:
return 0
pos = coordinates(n)
return distance(n)
"""
# tests
print(num_steps(1)) # 0
print(num_steps(12)) # 3
print(num_steps(23)) # 2
print(num_steps(1024)) # 31
"""
INPUT_FILE_PATH = "input.txt"
def main():
with open(INPUT_FILE_PATH) as f:
n = int(f.readline())
print(num_steps(n))
``` |
{
"source": "jmcph4/lm5",
"score": 3
} |
#### File: lm5/lm5/input.py
```python
class Input(object):
def __init__(self, type, data):
self.__type = type
self.__data = deepcopy(data)
def __repr__(self):
return repr(self.__data)
def __str__(self):
return str(self.__type) + str(self.__data)
```
#### File: lm5/lm5/test.py
```python
from copy import deepcopy
from .inputtype import InputType
from .errors import UnsupportedInputTypeError
class Test(object):
def __init__(self, target, supported_types, name=None):
self.__target = deepcopy(target)
self.__types = set(deepcopy(supported_types))
self.__inputs = {}
if name is None:
self.__name = "Test"
else:
self.__name = name
@property
def name(self):
return self.__name
@property
def types(self):
return self.__types
def add_input(self, type, data):
if type in self.__types:
self.__inputs[type] = data
else:
raise UnsupportedInputTypeError()
def remove_input(self, type):
if type in self.__types:
self.__inputs.pop(type)
else:
raise UnsupportedInputTypeError()
def run(self):
target = self.__init_target()
target.exec()
return (target.retval, target.stdout, target.stderr)
def from_config(self, config_data):
fields = config_data.keys()
if InputType.ARGV in fields:
self.add_input(InputType.ARGV, config_data[InputType.ARGV])
if InputType.STDIN in fields:
self.add_input(InputType.STDIN, config_data[InputType.STDIN])
def __init_target(self):
target = deepcopy(self.__target)
if InputType.ARGV in self.__inputs.keys():
args = self.__inputs[InputType.ARGV]
for i in range(len(args)):
arg = args[i]
target.add_arg(i, arg)
if InputType.STDIN in self.__inputs.keys():
target.append_string_stdin(self.__inputs[InputType.STDIN])
return target
``` |
{
"source": "jmcph4/py-snappy",
"score": 2
} |
#### File: py-snappy/py_snappy/main.py
```python
import functools
import itertools
from typing import Any, Callable, Iterable, Tuple, TypeVar
from .constants import TAG_LITERAL, TAG_COPY1, TAG_COPY2, TAG_COPY4
from .exceptions import BaseSnappyError, CorruptError, TooLargeError
# Each encoded block begins with the varint-encoded length of the decoded data,
# followed by a sequence of chunks. Chunks begin and end on byte boundaries.
# The first byte of each chunk is broken into its 2 least and 6 most
# significant bits called l and m: l ranges in [0, 4) and m ranges in [0, 64).
# l is the chunk tag. Zero means a literal tag. All other values mean a copy
# tag.
#
# For literal tags:
# - If m < 60, the next 1 + m bytes are literal bytes.
# - Otherwise, let n be the little-endian unsigned integer denoted by the
# next m - 59 bytes. The next 1 + n bytes after that are literal bytes.
#
# For copy tags, length bytes are copied from offset bytes ago, in the style of
# Lempel-Ziv compression algorithms. In particular:
# - For l == 1, the offset ranges in [0, 1<<11) and the length in [4, 12).
# The length is 4 + the low 3 bits of m. The high 3 bits of m form bits
# 8-10 of the offset. The next byte is bits 0-7 of the offset.
# - For l == 2, the offset ranges in [0, 1<<16) and the length in [1, 65).
# The length is 1 + m. The offset is the little-endian unsigned integer
# denoted by the next 2 bytes.
# - For l == 3, this tag is a legacy format that is no longer supported.
def uint8(n: int) -> int:
return n & ((1 << 8) - 1)
def uint32(n: int) -> int:
return n & ((1 << 32) - 1)
def uint64(n: int) -> int:
return n & ((1 << 64) - 1)
def uvarint(buf: bytes) -> Tuple[int, int]:
"""
uvarint decodes a uint64 from buf and returns that value and the number of
bytes read (> 0). If an error occurred, the value is 0 and the number of
bytes n is <= 0 meaning:
n == 0: buf too small
n < 0: value larger than 64 bits (overflow)
and -n is the number of bytes read"""
x, s = 0, 0
for idx, b in enumerate(buf):
if b < 0x80:
if idx > 9 or (idx == 9 and b > 1):
return 0, -1 * (idx + 1) # overflow
return x | uint64(b) << s, idx + 1
x |= uint64(b & 0x7F) << s
s += 7
return 0, 0
TReturn = TypeVar("TReturn")
def bytes_gen(fn: Callable[..., Iterable[int]]) -> Callable[..., bytes]:
@functools.wraps(fn)
def inner(*args: Any, **kwargs: Any) -> bytes:
return bytes(fn(*args, **kwargs))
return inner
def tuple_gen(
fn: Callable[..., Iterable[TReturn]]
) -> Callable[..., Tuple[TReturn, ...]]:
@functools.wraps(fn)
def inner(*args: Any, **kwargs: Any) -> Iterable[TReturn]:
return tuple(fn(*args, **kwargs))
return inner
@bytes_gen
def putuvarint(x: int) -> Iterable[int]:
"""
putuvarint encodes a uint64.
"""
while x >= 0x80:
yield uint8(x) | 0x80
x >>= 7
yield x
def extract_meta(src: bytes) -> Tuple[int, int]:
"""
Return a 2-tuple:
- the length of the decoded block
- the number of bytes that the length header occupied.
"""
v, n = uvarint(src)
if n <= 0 or v > 0xFFFFFFFF:
raise CorruptError
if v > 0x7FFFFFFF:
raise TooLargeError
return v, n
def decompress(buf: bytes) -> bytes:
"""
decompress returns the decompressed form of buf.
"""
block_length, s = extract_meta(buf)
src = tuple(c for c in buf)
src_len = len(src)
dst = [0] * block_length
d, offset, length = 0, 0, 0
while s < src_len:
b = src[s] & 0x03
if b == TAG_LITERAL:
x = src[s] >> 2
if x < 60:
s += 1
elif x == 60:
s += 2
if s > src_len:
raise CorruptError
x = src[s - 1]
elif x == 61:
s += 3
if s > src_len:
raise CorruptError
x = src[s - 2] | (src[s - 1] << 8)
elif x == 62:
s += 4
if s > src_len:
raise CorruptError
x = src[s - 3] | (src[s - 2] << 8) | (src[s - 1] << 16)
elif x == 63:
s += 5
if s > src_len:
raise CorruptError
x = (
src[s - 4]
| (src[s - 3] << 8) # noqa: W503
| (src[s - 2] << 16) # noqa: W503
| (src[s - 1] << 24) # noqa: W503
)
length = x + 1
if length <= 0:
raise BaseSnappyError("Unsupported literal length")
if length > len(dst) - d or length > src_len - s:
raise CorruptError
dst = list(
itertools.chain( # noqa: E203
dst[:d],
src[s : s + length], # noqa: E203
dst[d + length :], # noqa: E203
)
)
d += length
s += length
continue
elif b == TAG_COPY1:
s += 2
if s > src_len:
raise CorruptError
length = 4 + ((src[s - 2] >> 2) & 0x7)
offset = ((src[s - 2] & 0xE0) << 3) | src[s - 1]
elif b == TAG_COPY2:
s += 3
if s > src_len:
raise CorruptError
length = 1 + (src[s - 3] >> 2)
offset = src[s - 2] | (src[s - 1] << 8)
elif b == TAG_COPY4:
raise BaseSnappyError("Unsupported COPY_4 tag")
end = d + length
if offset > d or end > len(dst):
raise CorruptError
while d < end:
dst[d] = dst[d - offset]
d += 1
if d != block_length:
raise CorruptError
return bytes(dst[:d])
MAX_OFFSET = 1 << 15
C240 = 60 << 2
C244 = 61 << 2
C248 = 62 << 2
C252 = 63 << 2
C65536 = 1 << 16
C4294967296 = 1 << 32
@tuple_gen
def emit_literal(lit: bytes) -> Iterable[int]:
"""emit_literal returns a literal chunk."""
n = len(lit) - 1
if n < 60:
yield (uint8(n) << 2) | TAG_LITERAL
elif n < C240:
yield C240 | TAG_LITERAL
yield uint8(n)
elif n < C244:
yield C244 | TAG_LITERAL
yield uint8(n)
yield uint8(n >> 8)
elif n < C65536:
yield C248 | TAG_LITERAL
yield uint8(n)
yield uint8(n >> 8)
yield uint8(n >> 16)
elif uint64(n) < C4294967296:
yield C252 | TAG_LITERAL
yield uint8(n)
yield uint8(n >> 8)
yield uint8(n >> 16)
yield uint8(n >> 24)
else:
raise BaseSnappyError("Source buffer is too long")
yield from lit
C8 = 1 << 3
C64 = 1 << 6
C256 = 1 << 8
C2048 = 1 << 11
@tuple_gen
def emit_copy(offset: int, length: int) -> Iterable[int]:
"""emit_copy writes a copy chunk and returns the number of bytes written."""
while length > 0:
x = length - 4
if 0 <= x and x < C8 and offset < C2048:
yield ((uint8(offset >> 8) & 0x07) << 5) | (uint8(x) << 2) | TAG_COPY1
yield uint8(offset)
break
x = length
if x > C64:
x = C64
yield (uint8(x - 1) << 2) | TAG_COPY2
yield uint8(offset)
yield uint8(offset >> 8)
length -= x
C24 = 32 - 8
MAX_TABLE_SIZE = 1 << 14
@bytes_gen
def compress(buf: bytes) -> Iterable[int]:
"""compress returns the compressed form of buf."""
src = tuple(buf)
src_len = len(src)
# The block starts with the varint-encoded length of the decompressed bytes.
yield from (c for c in putuvarint(src_len))
# Return early if src is short.
if src_len <= 4:
if src_len != 0:
yield from emit_literal(src)
return
# Initialize the hash table. Its size ranges from 1<<8 to 1<<14 inclusive.
shift, table_size = C24, C256
while table_size < MAX_TABLE_SIZE and table_size < src_len:
shift -= 1
table_size *= 2
table = [0] * MAX_TABLE_SIZE
# Iterate over the source bytes.
s = 0 # The iterator position.
t = 0 # The last position with the same hash as s.
lit = 0 # The start position of any pending literal bytes.
while s + 3 < src_len:
# Update the hash table.
b0, b1, b2, b3 = src[s : s + 4] # noqa: E203
h = uint32(b0) | (uint32(b1) << 8) | (uint32(b2) << 16) | (uint32(b3) << 24)
p = uint32(h * 0x1E35A7BD) >> shift
# We need to to store values in [-1, inf) in table. To save
# some initialization time, (re)use the table's zero value
# and shift the values against this zero: add 1 on writes,
# subtract 1 on reads.
t, table[p] = table[p] - 1, s + 1
if (
t < 0
or s - t >= MAX_OFFSET # noqa: W503
or b0 != src[t] # noqa: W503
or b1 != src[t + 1] # noqa: W503
or b2 != src[t + 2] # noqa: W503
or b3 != src[t + 3] # noqa: W503
):
# If t is invalid or src[s:s+4] differs from src[t:t+4], accumulate a literal byte.
s += 1
continue
elif lit != s:
# Otherwise, we have a match. First, emit any pending literal bytes.
yield from emit_literal(src[lit:s])
# Extend the match to be as long as possible.
s0 = s
s, t = s + 4, t + 4
while s < src_len and src[s] == src[t]:
s += 1
t += 1
# Emit the copied bytes.
yield from emit_copy(s - t, s - s0)
lit = s
# Emit any final pending literal bytes and return.
if lit != src_len:
yield from emit_literal(src[lit:])
```
#### File: tests/core/test_empty_bytestring.py
```python
import pytest
from py_snappy import compress, decompress, CorruptError
def test_compress_empty_string():
assert compress(b"") == b"\x00"
def test_decompress_empty_string():
with pytest.raises(CorruptError):
decompress(b"")
```
#### File: tests/core/test_round_trip.py
```python
from hypothesis import given, settings
from py_snappy import compress, decompress
from tests.core.strategies import random_test_vectors_large_st
@given(value=random_test_vectors_large_st)
@settings(max_examples=10000)
def test_round_trip(value):
intermediate = compress(value)
result = decompress(intermediate)
assert value == result
``` |
{
"source": "jmcph4/quantmotion",
"score": 3
} |
#### File: quantmotion/quantmotion/asset.py
```python
from copy import deepcopy
class Asset(object):
"""
Represents a generic asset
"""
def __init__(self, name, prices):
self._name = name
self._prices = deepcopy(prices)
@property
def name(self):
return self._name
@property
def prices(self):
return self._prices
def __eq__(self, o):
if isinstance(o, type(self)):
if self._name == o._name and self._prices == o._prices:
return True
else:
return False
else:
return False
def __hash__(self):
return id(self)
def __repr__(self):
return self._name
def price_at(self, dt):
"""
A.price_at(dt) -- returns the price of A at dt
"""
return self._prices[dt]
```
#### File: quantmotion/quantmotion/convert.py
```python
from datetime import datetime
import csv
from io import StringIO
from .ohlcvdata import OHLCVData
from .timeseries import TimeSeries
DEFAULT_CSV_DELIMITER = ","
DEFAULT_CSV_QUOTE = '"'
def convert_csv_to_time_series(csv_data, dt_fmt):
"""
convert_csv_to_time_series(csv_data) -- parse csv_data to TimeSeries type
"""
csv_reader = csv.reader(StringIO(csv_data), delimiter=DEFAULT_CSV_DELIMITER, quotechar=DEFAULT_CSV_QUOTE)
ts_data = []
timestamp = 0
open = 0
high= 0
low = 0
close = 0
volume = 0
for row in csv_reader:
for i in range(len(row)):
if i == 0:
timestamp = datetime.strptime(row[i], dt_fmt)
elif i == 1:
open = float(row[i])
elif i == 2:
high = float(row[i])
elif i == 3:
low = float(row[i])
elif i == 4:
close = float(row[i])
elif i == 5:
volume = float(row[i])
entry = (timestamp, OHLCVData(open=open, high=high, low=low, close=close, volume=volume))
ts_data.append(entry)
return TimeSeries(init=ts_data)
```
#### File: quantmotion/tests/test_timeseries.py
```python
import unittest
from datetime import datetime
from copy import deepcopy
from quantmotion.ohlcvdata import OHLCVData
from quantmotion.timeseries import TimeSeries
class TestTimeSeries(unittest.TestCase):
__valid_initial_data = [
[(datetime(2018, 1, 1), OHLCVData(1.00, 2.43, 0.52, 1.50, 100)),
(datetime(2018, 1, 2), OHLCVData(2.00, 2.05, 1.95, 2.00, 5400))],
[(datetime(2017, 1, 1), OHLCVData(4.50, 4.60, 2.40, 3.50, 567)),
(datetime(2017, 1, 2), OHLCVData(3.50, 5.50, 2.30, 4.00, 900))]
]
__not_found_key = datetime(2001, 1, 1)
__not_found_value = OHLCVData(-100, -200, -50, -120, -100)
def test___init___normal(self):
actual_ts = TimeSeries()
expected_ts = TimeSeries()
self.assertEqual(actual_ts, expected_ts)
def test___init___initial_data(self):
actual_ts = TimeSeries(init=self.__valid_initial_data[0])
expected_ts = TimeSeries(init=self.__valid_initial_data[0])
self.assertEqual(actual_ts, expected_ts)
def test___getitem___normal(self):
actual_ts = TimeSeries(init=self.__valid_initial_data[0])
actual_item = actual_ts[self.__valid_initial_data[0][0][0]]
expected_ts = TimeSeries(init=self.__valid_initial_data[0])
expected_item = self.__valid_initial_data[0][0][1]
self.assertEqual(actual_item, expected_item)
self.assertEqual(len(actual_ts), len(expected_ts))
def test___getitem___slice(self):
ts = TimeSeries(init=self.__valid_initial_data[0])
actual_ts = ts[self.__valid_initial_data[0][1][0]:]
expected_ts = TimeSeries(init=self.__valid_initial_data[0][1:])
self.assertEqual(actual_ts, expected_ts)
def test___getitem___not_found(self):
actual_ts = TimeSeries(init=self.__valid_initial_data[0])
with self.assertRaises(KeyError):
item = actual_ts[self.__not_found_key]
def test___setitem___normal(self):
actual_ts = TimeSeries(init=self.__valid_initial_data[0])
actual_ts[self.__valid_initial_data[0][0][0]] = self.__valid_initial_data[0][1][1]
self.assertEqual(actual_ts[self.__valid_initial_data[0][0][0]], self.__valid_initial_data[0][1][1])
def test___setitem___not_found(self):
actual_ts = TimeSeries(init=self.__valid_initial_data[0])
actual_ts[self.__not_found_key] = self.__not_found_value
data = deepcopy(self.__valid_initial_data[0])
data.append((self.__not_found_key, self.__not_found_value))
expected_ts = TimeSeries(init=data)
self.assertEqual(actual_ts, expected_ts)
def test___delitem___normal(self):
actual_ts = TimeSeries(init=self.__valid_initial_data[0])
del actual_ts[self.__valid_initial_data[0][0][0]]
expected_ts = TimeSeries(init=self.__valid_initial_data[0][1:])
self.assertEqual(actual_ts, expected_ts)
def test___delitem___not_found(self):
actual_ts = TimeSeries(init=self.__valid_initial_data[0])
with self.assertRaises(KeyError):
del actual_ts[self.__not_found_key]
def test___eq___normal(self):
a = TimeSeries(init=self.__valid_initial_data[0])
b = TimeSeries(init=self.__valid_initial_data[0])
self.assertTrue(a == b)
def test___eq___unequal(self):
a = TimeSeries(init=self.__valid_initial_data[0])
b = TimeSeries(init=self.__valid_initial_data[1])
self.assertFalse(a == b)
def test___iter___normal(self):
actual_ts = TimeSeries(init=self.__valid_initial_data[0])
actual_items = []
for k, v in actual_ts:
actual_items.append((k, v))
expected_items = deepcopy(self.__valid_initial_data[0])
expected_ts = TimeSeries(init=expected_items)
self.assertEqual(actual_items, expected_items)
self.assertEqual(actual_ts, expected_ts)
def test_insert_normal(self):
actual_ts = TimeSeries(init=self.__valid_initial_data[0])
actual_ts.insert(self.__not_found_key, self.__not_found_value)
expected_values = deepcopy(self.__valid_initial_data[0])
expected_values.append((self.__not_found_key, self.__not_found_value))
expected_ts = TimeSeries(init=expected_values)
self.assertEqual(actual_ts, expected_ts)
def test_remove_normal(self):
actual_ts = TimeSeries(init=self.__valid_initial_data[0])
actual_ts.remove(self.__valid_initial_data[0][0][0])
expected_ts = TimeSeries(init=self.__valid_initial_data[0][1:])
self.assertEqual(actual_ts, expected_ts)
def test___add___normal(self):
a = TimeSeries(init=self.__valid_initial_data[0])
b = TimeSeries(init=self.__valid_initial_data[1])
c = a + b
expected_ts = TimeSeries(init=self.__valid_initial_data[0] +
self.__valid_initial_data[1])
self.assertEqual(c, expected_ts)
def test___sub___normal(self):
a = TimeSeries(init=self.__valid_initial_data[0])
b = TimeSeries(init=self.__valid_initial_data[0])
c = a - b
expected_ts = TimeSeries()
self.assertEqual(c, expected_ts)
if __name__ == "__main__":
unittest.main()
``` |
{
"source": "jmcph4/skovo",
"score": 3
} |
#### File: skovo/skovo/ts.py
```python
from datetime import *
from plotly import *
class TimeSeries(object):
"""
Representation of a time series
"""
def __init__(self, name, data, datetime_format=None):
self._name = str(name)
self._data = {}
if datetime_format is None:
datetime_format = "%Y-%m-%dT%H:%M:%S"
for pair in data:
self._data[datetime.strptime(pair[0], datetime_format)] = pair[1]
"""
The name of the time series
"""
@property
def name(self):
return self._name
"""
The underlying data as a dictionary whose keys are timestamps
"""
@property
def data(self):
return self._data
"""
Adds a new entry to the time series (where entry is a 2-tuple)
"""
def add(self, entry):
self._data[entry[0]] = entry[1]
"""
Removes the specified entry from the time series
"""
def remove(self, time):
self._data.pop(time)
def __getitem__(self, key):
return self.data[key]
def __contains__(self, item):
if item in self.data:
return True
else:
return False
def __len__(self):
return len(self.data)
def __repr__(self):
s = ""
for k, v in self.data.items():
s += str(k) + " " + str(v) + "\n"
return s
def _plot_layout(self):
return graph_objs.Layout(title=self.name, xaxis={"title": "Time"})
def plot(self):
data = []
pairs = []
x_data = []
y_data = []
for k, v in self.data.items():
pairs.append((k, v))
for p in sorted(pairs, key=lambda x: x[0]):
x_data.append(p[0])
y_data.append(p[1])
trace = graph_objs.Scatter(
x=x_data,
y=y_data)
layout = self._plot_layout()
data = [trace]
figure = graph_objs.Figure(data=data, layout=layout)
offline.plot(figure)
class ABSTimeSeries(TimeSeries):
"""
A representation of a time series from the Australian Bureau of Statistics
"""
def __init__(self, name, unit, series_type, data_type, frequency,
collection_month, series_start, series_end, num_obs,
series_id, data):
super().__init__(name, data, "%b-%Y")
self._unit = str(unit)
self._series_type = str(series_type)
self._data_type = str(data_type)
self._frequency = str(frequency)
self._collection_month = str(collection_month)
self._series_start = series_start
self._series_end = series_end
self._num_obs = int(num_obs)
self._series_id = str(series_id)
"""
The unit that the values of the time series are in
"""
@property
def unit(self):
return self._unit
"""
The type of time series
"""
@property
def series_type(self):
return self._series_type
"""
The type of data the time series stores (according to the ABS)
"""
@property
def data_type(self):
return self._data_type
"""
The frequency that the ABS collects data on
"""
@property
def frequency(self):
return self._frequency
"""
The month the ABS collects data for this time series
"""
@property
def collection_month(self):
return self._collection_month
"""
The start of the time series (according to the ABS)
"""
@property
def series_start(self):
return self._series_start
"""
The end of the time series (according to the ABS)
"""
@property
def series_end(self):
return self._series_end
"""
The number of observations in the time series (according to the ABS)
Note that this is not necessarily equal to the actual size of the time
series (for bounds-checking purposes, use len())
"""
@property
def num_obs(self):
return self._num_obs
"""
The ID assigned to the time series by the ABS
"""
@property
def series_id(self):
return self._series_id
def __str__(self):
return self.name + " (" + self.series_id + ")"
def _plot_layout(self):
return graph_objs.Layout(title=self.name, xaxis={"title": "Time"}, yaxis={"title": self.unit})
``` |
{
"source": "jmcph4/timekeeper",
"score": 3
} |
#### File: timekeeper/timekeeper/log.py
```python
from datetime import datetime
import sqlite3
from . import slice
class Log(object):
"""
Represents a series of slices, forming a log of how time was spent
"""
DT_FMT = "%Y-%m-%d %H:%M"
_COL_WIDTH = 15
def __init__(self, slices):
self._slices = {}
for s in slices:
self._slices[s.start] = (s, False)
@property
def slices(self):
sl = {}
for k, v in self._slices.items():
sl[k] = v[0]
return sl
def get_slice(self, dt):
"""
Returns the slice at the specified time
"""
return self._slices.get(dt)[0]
def set_slice(self, s, saved=False):
"""
Adds s to the log, overwriting any slice previously at that location
"""
self._slices[s.start] = (s, saved)
def __repr__(self):
s = "Start | End | Category | Description \n"
s += "-----------------+------------------+-----------------+-------------------------------\n"
for k, v in self._slices.items():
start_str = v[0].start.strftime(self.DT_FMT)
end_str = v[0].end.strftime(self.DT_FMT)
if not v[1]:
saved_notice = "(!)"
else:
saved_notice = ""
s += saved_notice + start_str + " | " + end_str + " | " + v[0].category + " " * (self._COL_WIDTH - len(v[0].category)) + " | " + v[0].description + "\n"
return s
def save(self, db_path):
"""
Saves the log to the specified database file by inserting each slice
into the SQL table
"""
conn = sqlite3.connect(db_path)
c = conn.cursor()
c.execute('''CREATE TABLE IF NOT EXISTS log (id INTEGER PRIMARY KEY AUTOINCREMENT, start DATETIME, end DATETIME, category VARCHAR, description TEXT)''')
for k, v in self._slices.items():
if not v[1]: # if not saved
start_str = v[0].start.strftime(self.DT_FMT)
end_str = v[0].end.strftime(self.DT_FMT)
data = (start_str, end_str, v[0].category, v[0].description)
c.execute('''INSERT INTO log (start, end, category, description) VALUES (?, ?, ?, ?)''', data)
conn.commit()
v = (v[0], True) # set slice as saved
conn.close()
def load(self, db_path):
"""
Loads a log from the specified database file by inserting each slice
into the log object from the SQL table
"""
conn = sqlite3.connect(db_path)
c = conn.cursor()
c.execute('''SELECT * FROM log''')
data = c.fetchall()
for d in data:
self.set_slice(slice.Slice(datetime.strptime(d[1], self.DT_FMT),
datetime.strptime(d[2], self.DT_FMT),
d[3], d[4]), True)
conn.close()
def __len__(self):
length = 0
for k, v in self._slices.items():
length += len(v[0])
return length
def category_aggregate(self):
"""
Returns a dictionary associating each category in the log with the total
number of minutes attributed to it
"""
categories = {}
for k, v in self._slices.items():
categories[v[0].category] = 0
for k, v in self._slices.items():
categories[v[0].category] += len(v[0])
return categories
def ranged_category_aggregate(self, start, end):
"""
Same as category_aggregate() but only applies to slices within the range
[start, end]
"""
new_slices = []
for k, v in self.slices.items():
if k > start and k < end:
new_slices.append(v)
tmp = Log(new_slices)
return tmp.category_aggregate()
```
#### File: timekeeper/timekeeper/slice.py
```python
from datetime import datetime
MINUTES_IN_DAY = 60 * 24
SECONDS_IN_MINUTE = 60
class Slice(object):
"""
A period of time with a start and an end
"""
DT_FMT = "%Y-%m-%d %H:%M"
def __init__(self, start, end, category, description):
self._start = start
self._end = end
self._category = category
self._description = description
@property
def start(self):
return self._start
@property
def end(self):
return self._end
@property
def category(self):
return self._category
@property
def description(self):
return self._description
def __repr__(self):
s = self._start.strftime(self.DT_FMT)
s += ", "
s += self._end.strftime(self.DT_FMT)
s += ", "
s += self._category
s += ", "
s += self._description
return s
def __len__(self):
delta = self._end - self._start
length = delta.days * MINUTES_IN_DAY + (delta.seconds // SECONDS_IN_MINUTE)
return length
``` |
{
"source": "jmcrawford45/pystachio",
"score": 3
} |
#### File: pystachio/pystachio/base.py
```python
import copy
from pprint import pformat
from .naming import Namable, Ref
from .parsing import MustacheParser
from .typing import TypeCheck
class Environment(Namable):
"""
A mount table for Refs pointing to Objects or arbitrary string substitutions.
"""
__slots__ = ('_table',)
@staticmethod
def wrap(value):
if isinstance(value, dict):
return Environment(value)
elif isinstance(value, (Environment, Object)):
return value
else:
if isinstance(value, (int, float, str)):
return str(value)
else:
raise ValueError(
'Environment values must be strings, numbers, Objects or other Environments. '
'Got %s instead.' % type(value))
def _assimilate_dictionary(self, d):
for key, val in d.items():
val = Environment.wrap(val)
rkey = Ref.wrap(key)
if isinstance(val, Environment):
for vkey, vval in val._table.items():
self._table[rkey + vkey] = vval
else:
self._table[rkey] = val
def _assimilate_table(self, mt):
for key, val in mt._table.items():
self._table[key] = val
def __init__(self, *dicts, **kw):
self._table = {}
for d in list(dicts) + [kw]:
if isinstance(d, dict):
self._assimilate_dictionary(d)
elif isinstance(d, Environment):
self._assimilate_table(d)
else:
raise ValueError("Environment expects dict or Environment, got %s" % repr(d))
def find(self, ref):
if ref in self._table:
return self._table[ref]
targets = [key for key in self._table if Ref.subscope(key, ref)]
if not targets:
raise Namable.NotFound(self, ref)
else:
for key in sorted(targets, reverse=True):
scope = self._table[key]
if not isinstance(scope, Namable):
continue
subscope = Ref.subscope(key, ref)
# If subscope is empty, then we should've found it in the ref table.
assert not subscope.is_empty()
try:
resolved = scope.find(subscope)
return resolved
except Namable.Error:
continue
raise Namable.NotFound(self, ref)
def __repr__(self):
return 'Environment(%s)' % pformat(self._table)
class Object(object):
"""
Object base class, encapsulating a set of variable bindings scoped to this object.
"""
__slots__ = ('_scopes',)
class CoercionError(ValueError):
def __init__(self, src, dst, message=None):
error = "Cannot coerce '%s' to %s" % (src, dst.__name__)
ValueError.__init__(self, '%s: %s' % (error, message) if message else error)
class InterpolationError(Exception): pass
@classmethod
def checker(cls, obj):
raise NotImplementedError
def __init__(self):
self._scopes = ()
def get(self):
raise NotImplementedError
def __hash__(self):
si, _ = self.interpolate()
return hash(si.get())
def copy(self):
"""
Return a copy of this object.
"""
self_copy = self.dup()
self_copy._scopes = copy.copy(self._scopes)
return self_copy
@staticmethod
def translate_to_scopes(*args, **kw):
scopes = [arg if isinstance(arg, Namable) else Environment.wrap(arg)
for arg in args]
if kw:
scopes.append(Environment(kw))
return tuple(scopes)
def bind(self, *args, **kw):
"""
Bind environment variables into this object's scope.
"""
new_self = self.copy()
new_scopes = Object.translate_to_scopes(*args, **kw)
new_self._scopes = tuple(reversed(new_scopes)) + new_self._scopes
return new_self
def in_scope(self, *args, **kw):
"""
Scope this object to a parent environment (like bind but reversed.)
"""
new_self = self.copy()
new_scopes = Object.translate_to_scopes(*args, **kw)
new_self._scopes = new_self._scopes + new_scopes
return new_self
def scopes(self):
return self._scopes
def check(self):
"""
Type check this object.
"""
try:
si, uninterp = self.interpolate()
# TODO(wickman) This should probably be pushed out to the interpolate leaves.
except (Object.CoercionError, MustacheParser.Uninterpolatable) as e:
return TypeCheck(False, "Unable to interpolate: %s" % e)
return self.checker(si)
def __ne__(self, other):
return not (self == other)
def __mod__(self, namable):
if isinstance(namable, dict):
namable = Environment.wrap(namable)
interp, _ = self.bind(namable).interpolate()
return interp
def interpolate(self):
"""
Interpolate this object in the context of the Object's environment.
Should return a 2-tuple:
The object with as much interpolated as possible.
The remaining unbound Refs necessary to fully interpolate the object.
If the object is fully interpolated, it should be typechecked prior to
return.
"""
raise NotImplementedError
```
#### File: pystachio/pystachio/choice.py
```python
from .base import Object
from .typing import Type, TypeCheck, TypeFactory, TypeMetaclass
class ChoiceFactory(TypeFactory):
"""A Pystachio type representing a value which can be one of several
different types.
For example, a field which could be either an integer, or an integer
expression (where IntegerExpression is a struct type) could be written
Choice("IntOrExpr", (Integer, IntegerExpression))
"""
PROVIDES = 'Choice'
@staticmethod
def create(type_dict, *type_parameters):
"""
type_parameters should be:
(name, (alternative1, alternative2, ...))
where name is a string, and the alternatives are all valid serialized
types.
"""
assert len(type_parameters) == 2
name = type_parameters[0]
alternatives = type_parameters[1]
assert isinstance(name, str)
assert isinstance(alternatives, (list, tuple))
choice_types = []
for c in alternatives:
choice_types.append(TypeFactory.new(type_dict, *c))
return TypeMetaclass(str(name), (ChoiceContainer,), {'CHOICES': choice_types, 'TYPE_PARAMETERS': (str(name), tuple(t.serialize_type() for t in choice_types))
})
class ChoiceContainer(Object, Type):
"""The inner implementation of a choice type value.
This just stores a value, and then tries to coerce it into one of the alternatives when
it's checked or interpolated.
"""
__slots__ = ('_value',)
def __init__(self, val):
super(ChoiceContainer, self).__init__()
self._value = val
def get(self):
return self.unwrap().get()
def unwrap(self):
"""Get the Pystachio value that's wrapped in this choice."""
return self.interpolate()[0]
def dup(self):
return self.__class__(self._value)
def __hash__(self):
return hash(self.get())
def __unicode__(self):
return unicode(self.unwrap())
def __str__(self):
return str(self.unwrap())
def __repr__(self):
return '%s(%s)' % (self.__class__.__name__,
repr(self._value))
def __eq__(self, other):
if not isinstance(other, ChoiceContainer):
return False
if len(self.CHOICES) != len(other.CHOICES):
return False
for myalt, otheralt in zip(self.CHOICES, other.CHOICES):
if myalt.serialize_type() != otheralt.serialize_type():
return False
si, _ = self.interpolate()
oi, _ = other.interpolate()
return si == oi
def _unwrap(self, ret_fun, err_fun):
"""Iterate over the options in the choice type, and try to perform some
action on them. If the action fails (returns None or raises either CoercionError
or ValueError), then it goes on to the next type.
Args:
ret_fun: a function that takes a wrapped option value, and either returns a successful
return value or fails.
err_fun: a function that takes the unwrapped value of this choice, and generates
an appropriate error.
Returns: the return value from a successful invocation of ret_fun on one of the
type options. If no invocation fails, then returns the value of invoking err_fun.
"""
for opt in self.CHOICES:
if isinstance(self._value, opt):
return ret_fun(self._value)
else:
try:
o = opt(self._value)
ret = ret_fun(o)
if ret:
return ret
except (self.CoercionError, ValueError):
pass
return err_fun(self._value)
def check(self):
# Try each of the options in sequence:
# There are three cases for matching depending on the value:
# (1) It's a pystachio value, and its type is the type alternative. Then typecheck
# succeeds.
# (2) It's a pystachio value, but its type is not the current alternative. Then the
# typecheck proceeds to the next alternative.
# (3) It's not a pystachio value. Then we try to coerce it to the type alternative.
# If it succeeds, then the typecheck succeeds. Otherwise, it proceeds to the next
# type alternative.
# If none of the type alternatives succeed, then the check fails. match
def _check(v):
tc = v.in_scope(*self.scopes()).check()
if tc.ok():
return tc
def _err(v):
return TypeCheck.failure(
"%s typecheck failed: value %s did not match any of its alternatives" %
(self.__class__.__name__, v))
return self._unwrap(_check, _err)
def interpolate(self):
def _inter(v):
return v.in_scope(*self.scopes()).interpolate()
def _err(v):
raise self.CoercionError(self._value, self.__class__)
return self._unwrap(_inter, _err)
@classmethod
def type_factory(cls):
return 'Choice'
@classmethod
def type_parameters(cls):
return cls.TYPE_PARAMETERS
@classmethod
def serialize_type(cls):
return (cls.type_factory(),) + cls.type_parameters()
def Choice(*args):
"""Helper function for creating new choice types.
This can be called either as:
Choice(Name, [Type1, Type2, ...])
or:
Choice([Type1, Type2, ...])
In the latter case, the name of the new type will be autogenerated, and will
look like "Choice_Type1_Type2".
"""
if len(args) == 2:
name, alternatives = args
else:
name = "Choice_" + "_".join(a.__name__ for a in args[0])
alternatives = args[0]
assert isinstance(name, str)
assert all(issubclass(t, Type) for t in alternatives)
return TypeFactory.new({}, ChoiceFactory.PROVIDES, name,
tuple(t.serialize_type() for t in alternatives))
```
#### File: pystachio/pystachio/naming.py
```python
from functools import lru_cache
import re
class frozendict(dict):
"""A hashable dictionary."""
def __key(self):
return tuple((k, self[k]) for k in sorted(self))
def __hash__(self):
return hash(self.__key())
def __eq__(self, other):
return self.__key() == other.__key()
def __ne__(self, other):
return self.__key() != other.__key()
def __repr__(self):
return 'frozendict(%s)' % dict.__repr__(self)
class Namable(object):
"""
An object that can be named/dereferenced.
"""
class Error(Exception): pass
class Unnamable(Error):
def __init__(self, obj):
super(Namable.Unnamable, self).__init__('Object is not indexable: %s' %
obj.__class__.__name__)
class NamingError(Error):
def __init__(self, obj, ref):
super(Namable.NamingError, self).__init__('Cannot dereference object %s by %s' % (
obj.__class__.__name__, ref.action()))
class NotFound(Error):
def __init__(self, obj, ref):
super(Namable.NotFound, self).__init__('Could not find %s in object %s' % (ref.action().value,
obj.__class__.__name__))
def find(self, ref):
"""
Given a ref, return the value referencing that ref.
Raises Namable.NotFound if not found.
Raises Namable.NamingError if try to dereference object in an invalid way.
Raises Namable.Unnamable if try to dereference into an unnamable type.
"""
raise NotImplementedError
class Ref(object):
"""
A reference into to a hierarchically named object.
"""
# ref re
# ^[^\d\W]\w*\Z
_DEREF_RE = r'[^\d\W]\w*'
_INDEX_RE = r'[\w\-\./]+'
_REF_RE = re.compile(r'(\.' + _DEREF_RE + r'|\[' + _INDEX_RE + r'\])')
_VALID_START = re.compile(r'[a-zA-Z_]')
_COMPONENT_SEPARATOR = '.'
class Component(object):
def __init__(self, value):
self._value = value
@property
def value(self):
return self._value
def __hash__(self):
return hash(self.value)
def __eq__(self, other):
return self.__class__ == other.__class__ and self.value == other.value
def __ne__(self, other):
return not (self == other)
def __lt__(self, other):
return self.value < other.value
def __gt__(self, other):
return self.value > other.value
class Index(Component):
RE = re.compile('^[\w\-\./]+$')
def __repr__(self):
return '[%s]' % self._value
class Dereference(Component):
RE = re.compile('^[^\d\W]\w*$')
def __repr__(self):
return '.%s' % self._value
class InvalidRefError(Exception): pass
class UnnamableError(Exception): pass
@staticmethod
def wrap(value):
if isinstance(value, Ref):
return value
else:
return Ref.from_address(value)
@staticmethod
@lru_cache(maxsize=128)
def from_address(address):
components = []
if not address or not isinstance(address, str):
raise Ref.InvalidRefError('Invalid address: %s' % repr(address))
if not (address.startswith('[') or address.startswith('.')):
if Ref._VALID_START.match(address[0]):
components = Ref.split_components('.' + address)
else:
raise Ref.InvalidRefError(address)
else:
components = Ref.split_components(address)
return Ref(components)
def __init__(self, components):
self._components = tuple(components)
self._hash = None
def components(self):
return self._components
def action(self):
return self._components[0]
def is_index(self):
return isinstance(self.action(), Ref.Index)
def is_dereference(self):
return isinstance(self.action(), Ref.Dereference)
def is_empty(self):
return len(self.components()) == 0
def rest(self):
return Ref(self.components()[1:])
@lru_cache(maxsize=128)
def __add__(self, other):
sc = self.components()
oc = other.components()
return Ref(sc + oc)
@staticmethod
@lru_cache(maxsize=10000)
def subscope(ref1, ref2):
rc = ref1.components()
sc = ref2.components()
if rc == sc[0:len(rc)]:
if len(sc) > len(rc):
return Ref(sc[len(rc):])
def scoped_to(self, ref):
return Ref.subscope(self, ref)
@staticmethod
def split_components(address):
def map_to_namable(component):
if (component.startswith('[') and component.endswith(']') and
Ref.Index.RE.match(component[1:-1])):
return Ref.Index(component[1:-1])
elif component.startswith('.') and Ref.Dereference.RE.match(component[1:]):
return Ref.Dereference(component[1:])
else:
raise Ref.InvalidRefError('Address %s has bad component %s' % (address, component))
splits = Ref._REF_RE.split(address)
if any(splits[0::2]):
raise Ref.InvalidRefError('Badly formed address %s' % address)
splits = splits[1::2]
return [map_to_namable(spl) for spl in splits]
def address(self):
joined = ''.join(str(comp) for comp in self._components)
if joined.startswith('.'):
return joined[1:]
else:
return joined
def __str__(self):
return '{{%s}}' % self.address()
def __repr__(self):
return 'Ref(%s)' % self.address()
def __eq__(self, other):
return self.components() == other.components()
def __ne__(self, other):
return self.components() != other.components()
@staticmethod
def compare(self, other):
if len(self.components()) < len(other.components()):
return -1
elif len(self.components()) > len(other.components()):
return 1
else:
return (self.components() > other.components()) - (self.components() < other.components())
def __lt__(self, other):
return Ref.compare(self, other) == -1
def __gt__(self, other):
return Ref.compare(self, other) == 1
def __hash__(self):
if not self._hash:
self._hash = hash(self.components())
return self._hash
```
#### File: pystachio/pystachio/typing.py
```python
from .naming import frozendict
class TypeCheck(object):
"""
Encapsulate the results of a type check pass.
"""
class Error(Exception):
pass
@staticmethod
def success():
return TypeCheck(True, "")
@staticmethod
def failure(msg):
return TypeCheck(False, msg)
def __init__(self, success, message):
self._success = success
self._message = message
def message(self):
return self._message
def ok(self):
return self._success
def __repr__(self):
if self.ok():
return 'TypeCheck(OK)'
else:
return 'TypeCheck(FAILED): %s' % self._message
class TypeFactoryType(type):
_TYPE_FACTORIES = {}
def __new__(mcs, name, parents, attributes):
"""Args:
mcs(metaclass): the class object to create an instance of. Since this is actually
creating an instance of a type factory class, it's really a metaclass.
name (str): the name of the type to create.
parents (list(class)): the superclasses.
attributes (map(string, value)):
"""
if 'PROVIDES' not in attributes:
return type.__new__(mcs, name, parents, attributes)
else:
provides = attributes['PROVIDES']
new_type = type.__new__(mcs, name, parents, attributes)
TypeFactoryType._TYPE_FACTORIES[provides] = new_type
return new_type
TypeFactoryClass = TypeFactoryType('TypeFactoryClass', (object,), {})
class TypeFactory(TypeFactoryClass):
@staticmethod
def get_factory(type_name):
assert type_name in TypeFactoryType._TYPE_FACTORIES, (
'Unknown type: %s, Existing factories: %s' % (
type_name, TypeFactoryType._TYPE_FACTORIES.keys()))
return TypeFactoryType._TYPE_FACTORIES[type_name]
@staticmethod
def create(type_dict, *type_parameters, **kwargs):
"""
Implemented by the TypeFactory to produce a new type.
Should return:
reified type
(with usable type.__name__)
"""
raise NotImplementedError("create unimplemented for: %s" % repr(type_parameters))
@staticmethod
def new(type_dict, type_factory, *type_parameters, **kwargs):
"""
Create a fully reified type from a type schema.
"""
type_tuple = (type_factory,) + type_parameters
if type_tuple not in type_dict:
factory = TypeFactory.get_factory(type_factory)
reified_type = factory.create(type_dict, *type_parameters, **kwargs)
type_dict[type_tuple] = reified_type
return type_dict[type_tuple]
@staticmethod
def wrapper(factory):
assert issubclass(factory, TypeFactory)
def wrapper_function(*type_parameters):
return TypeFactory.new({}, factory.PROVIDES, *tuple(
[typ.serialize_type() for typ in type_parameters]))
return wrapper_function
@staticmethod
def load(type_tuple, into=None):
"""
Determine all types touched by loading the type and deposit them into
the particular namespace.
"""
type_dict = {}
TypeFactory.new(type_dict, *type_tuple)
deposit = into if (into is not None and isinstance(into, dict)) else {}
for reified_type in type_dict.values():
deposit[reified_type.__name__] = reified_type
return deposit
@staticmethod
def load_json(json_list, into=None):
"""
Determine all types touched by loading the type and deposit them into
the particular namespace.
"""
def l2t(obj):
if isinstance(obj, list):
return tuple(l2t(L) for L in obj)
elif isinstance(obj, dict):
return frozendict(obj)
else:
return obj
return TypeFactory.load(l2t(json_list), into=into)
@staticmethod
def load_file(filename, into=None):
import json
with open(filename) as fp:
return TypeFactory.load_json(json.load(fp), into=into)
class TypeMetaclass(type):
def __instancecheck__(cls, other):
if not hasattr(other, 'type_parameters'):
return False
if not hasattr(other, '__class__'):
return False
if cls.__name__ != other.__class__.__name__:
return False
return cls.type_factory() == other.type_factory() and (
cls.type_parameters() == other.type_parameters())
def __new__(mcls, name, parents, attributes):
"""Creates a new Type object (an instance of TypeMetaclass).
Args:
name (str): the name of the new type.
parents (list(str)): a list of superclasses.
attributes: (???): a map from name to value for "parameters" for defining
the new type.
"""
return type.__new__(mcls, name, parents, attributes)
class Type(object):
@classmethod
def type_factory(cls):
""" Return the name of the factory that produced this class. """
raise NotImplementedError
@classmethod
def type_parameters(cls):
""" Return the type parameters used to produce this class. """
raise NotImplementedError
@classmethod
def serialize_type(cls):
return (cls.type_factory(),) + cls.type_parameters()
@classmethod
def dump(cls, fp):
import json
json.dump(cls.serialize_type(), fp)
def check(self):
"""
Returns a TypeCheck object explaining whether or not a particular
instance of this object typechecks.
"""
raise NotImplementedError
``` |
{
"source": "jmcriffey/django-user-guide",
"score": 2
} |
#### File: jmcriffey/django-user-guide/settings.py
```python
import os
import django
from django.conf import settings
def configure_settings():
if not settings.configured:
# Determine the database settings depending on if a test_db var is set in CI mode or not
test_db = os.environ.get('DB', None)
if test_db is None:
db_config = {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'ambition_dev',
'USER': 'ambition_dev',
'PASSWORD': '<PASSWORD>',
'HOST': 'localhost'
}
elif test_db == 'postgres':
db_config = {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'USER': 'postgres',
'NAME': 'user_guide',
}
else:
raise RuntimeError('Unsupported test DB {0}'.format(test_db))
settings.configure(
DATABASES={
'default': db_config,
},
MIDDLEWARE_CLASSES=(
'django.middleware.csrf.CsrfViewMiddleware',
),
INSTALLED_APPS=(
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.admin',
'user_guide',
'user_guide.tests',
) + (('south',) if django.VERSION[1] <= 6 else ()),
ROOT_URLCONF='user_guide.urls',
DEBUG=False,
USER_GUIDE_SHOW_MAX=5,
USER_GUIDE_CSS_URL='custom-style.css',
USER_GUIDE_JS_URL='custom-script.js',
STATIC_URL='/static/',
SECRET_KEY='somethignmadeup',
)
```
#### File: django-user-guide/user_guide/models.py
```python
from django.contrib.auth.models import User
from django.db import models
import six
@six.python_2_unicode_compatible
class Guide(models.Model):
"""
Describes a guide to be tied to any number of users.
"""
# The html that should be rendered in a guide.
html = models.TextField()
# The type of guide to render. The only guide type currently supported is 'Window.'
guide_type = models.CharField(max_length=16, choices=(('WINDOW', 'Window'),), default='WINDOW')
# The name of the guide. Mainly for display purposes.
guide_name = models.CharField(max_length=64, unique=True)
# A tag for the given guide. For filtering purposes.
guide_tag = models.TextField(default='all')
# An ordering parameter for the guide. To show a guide first, give it a larger guide_importance.
guide_importance = models.IntegerField(default=0)
# The creation time of the guide.
creation_time = models.DateTimeField(auto_now_add=True)
def __str__(self):
return str(self.guide_name)
class GuideInfo(models.Model):
"""
Ties a guide to a user.
"""
# The user that should see this guide.
user = models.ForeignKey(User)
# The guide that should be shown to the user.
guide = models.ForeignKey(Guide)
# Has the guide been seen by a user?
is_finished = models.BooleanField(default=False)
# Save the finished time for convenience
finished_time = models.DateTimeField(null=True, blank=True)
class Meta:
unique_together = ('user', 'guide')
ordering = ['-guide__guide_importance', 'guide__creation_time']
```
#### File: user_guide/templatetags/user_guide_tags.py
```python
import re
from django import template
from django.conf import settings
from django.template import loader
from django.template.defaulttags import CsrfTokenNode
from user_guide.models import GuideInfo
register = template.Library()
# The maximum number of guides to show per page
USER_GUIDE_SHOW_MAX = getattr(settings, 'USER_GUIDE_SHOW_MAX', 10)
# Use cookies to determine if guides should be shown
USER_GUIDE_USE_COOKIES = getattr(settings, 'USER_GUIDE_USE_COOKIES', False)
# The url to any custom CSS
USER_GUIDE_CSS_URL = getattr(
settings,
'USER_GUIDE_CSS_URL',
None
)
# The url to any custom JS
USER_GUIDE_JS_URL = getattr(
settings,
'USER_GUIDE_JS_URL',
None
)
@register.simple_tag(takes_context=True)
def user_guide(context, *args, **kwargs):
"""
Creates html items for all appropriate user guides.
Kwargs:
guide_name: A string name of a specific guide.
guide_tags: An array of string guide tags.
limit: An integer maxmimum number of guides to show at a single time.
Returns:
An html string containing the user guide scaffolding and any guide html.
"""
user = context['request'].user if 'request' in context and hasattr(context['request'], 'user') else None
if user and user.is_authenticated(): # No one is logged in
limit = kwargs.get('limit', USER_GUIDE_SHOW_MAX)
filters = {
'user': user,
'is_finished': False
}
# Handle special filters
if kwargs.get('guide_name'):
filters['guide__guide_name'] = kwargs.get('guide_name')
if kwargs.get('guide_tags'):
filters['guide__guide_tag__in'] = kwargs.get('guide_tags')
# Set the html
html = ''.join((
'<div data-guide="{0}" class="django-user-guide-item">{1}</div>'.format(
guide_info.id,
guide_info.guide.html
) for guide_info in GuideInfo.objects.select_related('guide').filter(**filters).only('guide')[:limit]
))
# Return the rendered template with the guide html
return loader.render_to_string('user_guide/window.html', {
'html': re.sub(r'\{\s*static\s*\}', settings.STATIC_URL, html),
'css_href': '{0}user_guide/build/django-user-guide.css'.format(settings.STATIC_URL),
'js_src': '{0}user_guide/build/django-user-guide.js'.format(settings.STATIC_URL),
'custom_css_href': USER_GUIDE_CSS_URL,
'custom_js_src': USER_GUIDE_JS_URL,
'use_cookies': str(USER_GUIDE_USE_COOKIES).lower(),
'csrf_node': CsrfTokenNode().render(context)
})
else:
return ''
```
#### File: user_guide/tests/no_admin_tests.py
```python
import six
if six.PY3: # pragma: no cover
from importlib import reload
from django.conf import settings
from django.test import TestCase
from user_guide import admin
class NoAdminTest(TestCase):
"""
Tests loading of the admin module when django.contrib.admin is not installed.
"""
def test_no_admin(self):
with self.settings(INSTALLED_APPS=[app for app in settings.INSTALLED_APPS if app != 'django.contrib.admin']):
reload(admin)
self.assertIsNotNone(admin)
```
#### File: django-user-guide/user_guide/views.py
```python
from django.http import HttpResponse
from django.views.generic import View
from user_guide import models
class GuideSeenView(View):
def post(self, request):
guide_id = request.POST.get('id', 0)
is_finished = request.POST.get('is_finished', False)
guide_info = models.GuideInfo.objects.get(id=guide_id)
if guide_info and guide_info.user.id == request.user.id and is_finished:
guide_info.is_finished = True
guide_info.save()
return HttpResponse(status=200)
``` |
{
"source": "JmcRobbie/murSlam",
"score": 3
} |
#### File: murSlam/src/slam.py
```python
import numpy as np
import rospy
from std_msgs.msg import String
from sensor_msgs.msg import PointCloud2
import sensor_msgs.point_cloud2 as pc2
from ekfslam import EkfSlam
from raceCarSensors import racecarSensors
from slamTopics import slamTopics
def main():
topics = slamTopics()
sense = racecarSensors(topics)
rospy.init_node('slam', anonymous=False)
'''
Call subscribers to various important nodes
'''
rospy.Subscriber("/lidar/cones", PointCloud2, sense.lidarCallback)
rospy.Subscriber("/camera/cones", PointCloud2, sense.cameraCallback)
'''
Slam begin!
'''
'''
Rospy spin the node
'''
rospy.spin()
if __name__ == "__main__":
main()
``` |
{
"source": "JmcRobbie/novaRoverDemos",
"score": 3
} |
#### File: a_star_family/src/visuals.py
```python
import statistics
import matplotlib.pyplot as plt
import numpy as np
# The function responsible for displaying the plots in the screen
def visualiser(time_stats, memory_stats, path_stats):
# Converting to appropriate data
func_names = []
performance = []
error = []
peak_memory = []
avg_path = []
for name, number in time_stats.items():
func_names.append(name)
performance.append(statistics.mean(number))
error.append(statistics.stdev(number))
peak_memory.append(memory_stats[name])
avg_path.append(path_stats[name] / len(number))
y_pos = np.arange(len(func_names))
# Plotting the runtime performance
fig1 = plt.figure(figsize=(10, 10))
ax1 = fig1.add_subplot(211)
ax1.barh(y_pos, performance, xerr=error, align='center',
color='green', ecolor='black')
ax1.set_yticks(y_pos)
ax1.set_yticklabels(func_names)
# Read labels top to bottom
ax1.invert_yaxis()
# Labels
ax1.set_xscale('log')
ax1.set_xlabel('Mean Runtime (ms)')
ax1.set_title('Runtime Comparison')
# Plotting path visuals
ax_path = fig1.add_subplot(212)
ax_path.barh(y_pos, avg_path, align='center',
color='purple', ecolor='black')
# Setting y-axis labels
ax_path.set_yticks(y_pos)
ax_path.set_yticklabels(func_names)
# Adding x-axis labels
# Read labels top to bottom
ax_path.invert_yaxis()
ax_path.set_xlabel('Path Length')
ax_path.set_title('Distance Travelled')
# Adding some padding between layouts
fig1.tight_layout(pad=4.0)
# Plotting the memory performance
fig2 = plt.figure(figsize=(10, 10))
ax2 = fig2.add_subplot()
ax2.barh(y_pos, peak_memory, align='center')
ax2.set_yticks(y_pos)
ax2.set_yticklabels(func_names)
# Read labels top to bottom
ax2.invert_yaxis()
# Labels
ax2.set_xlabel('Peak Memory Use (KB)')
ax2.set_title('Memory Usage Comparison')
fig2.tight_layout()
# Show the plot
plt.show()
# A function to draw the grid with path found by each of the algorithms
def plot_diagram(functions, args, maze_x, maze_y):
# Loop through all the algorithms
for func in functions:
path, status = func(*args)
# Creating an identify matrix of given dimensions
grid = np.ones([maze_x, maze_y])
# Populate different kinds of grids
for i in args[0]:
grid[i] = 0
for j in path:
grid[j] = 2
grid[path[0]] = 3
grid[path[-1]] = 4
# Create a figure and save it
plt.imshow(grid.T)
plt.colorbar()
filename = "results/" + func.__name__ + ".pdf"
plt.savefig(filename)
plt.close()
```
#### File: python_benchmarking/src/benchmarker.py
```python
import math
import time
import random
import statistics
import tracemalloc
import sys
'''
The core function which handles the benchmarking of different algorithms
@param functions - A tuple containing the functions we want to compare
@param args - A tuple containing the arguments we want to pass into each function
'''
def benchmarker(functions, args):
# Determining the number of iterations to be made
iterations = 100 if len(sys.argv) < 2 else int(sys.argv[1])
# Dictionary to hold the runtime of the comparing functions
times = {f.__name__: [] for f in functions}
# Dictionary to hold memory
peak_memory = {f.__name__: 0 for f in functions}
# Loading the arguments to proper functions
argument_dict = {}
for i in range(len(functions)):
argument_dict[functions[i].__name__] = args[i]
# Running each function randomly around 3000 times
for i in range(iterations):
for _ in range(len(functions)):
# Choose a function randomly from the list and load its arguments
func = random.choice(functions)
func_args = argument_dict[func.__name__]
# Time its execution start tracing memory allocation
t0 = time.time()
tracemalloc.start()
# Run the functions with the arguments
func(*func_args)
# Stop memory tracing
peak = tracemalloc.get_traced_memory()[1]
tracemalloc.stop()
# Stop timer
t1 = time.time()
times[func.__name__].append((t1-t0)*1000)
peak_memory[func.__name__] = peak \
if peak > peak_memory[func.__name__] else peak_memory[func.__name__]
#Printing the statistics
print()
for name, numbers in times.items():
print('FUNCTION:', name, 'Run', len(numbers), 'times')
print('\tMEDIAN:', statistics.median(numbers), 'ms')
print('\tMEAN:', statistics.mean(numbers), 'ms')
print('\tSTDEV:', statistics.stdev(numbers), 'ms')
print('\tPEAK MEMORY: ', peak_memory[name], 'KB')
``` |
{
"source": "jmcroft7/dadpro",
"score": 3
} |
#### File: flask_app/config/mysqlconnection.py
```python
import pymysql.cursors
class MySQLConnection:
def __init__(self, db):
connection = pymysql.connect(host='localhost',
user='root',
password='<PASSWORD>',
db=db,
charset='utf8mb4',
cursorclass=pymysql.cursors.DictCursor,
autocommit=True)
self.connection = connection
def query_db(self, query, data=None):
with self.connection.cursor() as cursor:
try:
query = cursor.mogrify(query, data)
print("Running Query:", query)
executable = cursor.execute(query, data)
if query.lower().find("insert") >= 0:
self.connection.commit()
return cursor.lastrowid
elif query.lower().find("select") >= 0:
result = cursor.fetchall()
return result
else:
self.connection.commit()
except Exception as e:
print("Something went wrong", e)
return False
finally:
self.connection.close()
def connectToMySQL(db):
return MySQLConnection(db)
```
#### File: flask_app/models/author.py
```python
from flask_app import app
from flask import render_template, request, redirect, jsonify, session
from flask import flash
import requests
# class for User
class Author:
def __init__(self, id, contents):
self.id = id
self.contents = contents
# classmethods
# ==========================================================
# retrieve office character name
@classmethod
def getauthor(cls):
rawauthor = requests.get('https://officeapi.dev/api/characters/random')
words2 = rawauthor.json()
print(words2['data'])
endauthor = words2['data']
return endauthor
```
#### File: flask_app/models/joke.py
```python
from flask_app import app
from flask import render_template, request, redirect, jsonify, session
from flask import flash
import requests
# class for User
class Joke:
def __init__(self, id, contents):
self.id = id
self.contents = contents
# classmethods
# ==========================================================
# retrieve joke
@classmethod
def getquote(cls):
rawjoke = requests.get('https://icanhazdadjoke.com', headers={"Accept": "application/json"})
words = rawjoke.json()
print(words)
endjoke = words['joke']
return endjoke
# ==========================================================
# get list of jokes
@classmethod
def getlist(cls):
rawlist = requests.get('https://icanhazdadjoke.com/search', headers={"Accept": "application/json"})
list = rawlist.json()
print(list)
endlist = list['results']
return endlist
# ==========================================================
# count how many jokes retrieved
@classmethod
def getlistcount(cls):
rawlist = requests.get('https://icanhazdadjoke.com', headers={"Accept": "application/json"})
list = rawlist.json()
print(list)
endlist = list['total_jokes']
return endlist
# ==========================================================
# parse through jokes using keyword
@classmethod
def searching(cls, data):
rawlist = requests.get(f'https://icanhazdadjoke.com/search?term={data}', headers={"Accept": "application/json"})
list = rawlist.json()
print(list)
endlist = list['results']
return endlist
# ==========================================================
# count how many jokes are recieved with the parse
@classmethod
def counting(cls, data):
rawlist = requests.get(f'https://icanhazdadjoke.com/search?term={data}', headers={"Accept": "application/json"})
list = rawlist.json()
print(list)
endlist = list['total_jokes']
return endlist
```
#### File: flask_app/models/user.py
```python
from flask_app import app
from flask_app.config.mysqlconnection import connectToMySQL
from flask import flash
import re
EMAIL_REGEX = re.compile(r'^[a-zA-Z0-9.+_-]+@[a-zA-Z0-9._-]+\.[a-zA-Z]+$')
from flask_bcrypt import Bcrypt
bcrypt = Bcrypt(app)
# class for User
class User:
def __init__(self, data):
self.id = data['id']
self.first_name = data['first_name']
self.last_name = data['last_name']
self.email = data['email']
self.password = data['password']
self.bg_color = data['bg_color']
self.created_at = data['created_at']
self.updated_at = data['updated_at']
# staticmethods
# =============================================
@staticmethod
def validate_user(data):
is_valid = True
if len(data['first_name']) < 2:
flash('The first name minimum length is 3 characters!', 'firstname')
is_valid = False
if len(data['last_name']) < 2:
flash('The last name minimum length is 3 characters!', 'lastname')
is_valid = False
if len(data['email']) < 6:
flash('The email minimum must be at least 7 characters long to be valid!', 'email')
is_valid = False
if not EMAIL_REGEX.match(data['email']):
flash('Email is not valid! make sure it has an @ symbol and a .com/net/org etc!')
is_valid = False
if len(data['password']) < 7:
flash('The password minimum must be at least 8 characters long!', 'password')
is_valid = False
if data['conpass'] != data['password']:
flash('The passwords must match!', 'password2')
is_valid = False
return is_valid
# classmethods
# ==========================================================
# check if email already exists
@classmethod
def get_by_email(cls, data):
query = 'SELECT * FROM users WHERE email = %(email)s;'
results = connectToMySQL('dadjoke_clone').query_db(query, data)
# didnt find a matching user
if len(results) < 1:
return False
return cls(results[0])
# ==========================================================
# save registration into a user
@classmethod
def save(cls, data):
query = "INSERT INTO users (first_name, last_name, email, password, updated_at) VALUES (%(first_name)s, %(last_name)s, %(email)s, %(password)s, NOW());"
results = connectToMySQL("dadjoke_clone").query_db(query, data)
return results
# ==========================================================
# retrieve user info
@classmethod
def user_info(cls, data):
query = "SELECT * FROM users WHERE id = %(id)s"
results = connectToMySQL('dadjoke_clone').query_db(query, data)
return cls(results[0])
# ==========================================================
# update user info
@classmethod
def updateUser(cls, data):
query = "UPDATE users SET bg_color = %(keycolor)s, updated_at = NOW() WHERE id = %(id)s;"
results = connectToMySQL('dadjoke_clone').query_db(query, data)
return results
# ==========================================================
# delete user info
@classmethod
def deleteUser(cls, data):
query = "DELETE FROM users WHERE id = %(id)s;"
results = connectToMySQL('dadjoke_clone').query_db(query, data)
return results
``` |
{
"source": "jmcroft7/randomAlgo",
"score": 4
} |
#### File: randomAlgo/basics-py/Basics.py
```python
def oneTo255():
for i in range(1, 256, 1):
print(i);
oneTo255();
print("=====");
# Print odds 1-255
def oddsTo255():
for i in range(1, 256, 2):
print(i);
oddsTo255();
print("=====");
# Sigma of 255
def sigma255():
sum = 0
for i in range(1, 256, 1):
sum += i;
print(sum);
sigma255();
print("=====");
# Iterating through an array
def IterateArray(arr):
length = len(arr);
for i in range(length):
print(arr[i]);
arr1 = [1,2,3,4,5,6,7];
IterateArray(arr1);
print("=====");
# Find max value in array
def findMax(arr):
length = len(arr);
maxVal = arr[0];
for i in range(length + 1):
if (i > maxVal):
maxVal = i;
print(maxVal);
findMax(arr1);
print("=====");
# Create an array consisting of only odds.
def oddArray(num):
oddArr = []
for i in range(1, num + 1, 1):
oddArr += [i]
print(oddArr);
oddArray(10);
print("=====");
``` |
{
"source": "jmcrumb/open-virtual-assistant",
"score": 3
} |
#### File: nova-core/nlp/nlp.py
```python
import io
from threading import Semaphore
import gtts
import speech_recognition as sr
from playsound import playsound
UNKNOWN_VALUE_ERROR = sr.UnknownValueError
class SpeechRecognition:
'''Singleton implmentation of Speech Recognition utility functions'''
_instance = None
def __new__(cls):
if cls._instance is None:
cls._instance = super(SpeechRecognition, cls).__new__(cls)
cls._instance._initialization_routine()
return cls._instance
def _initialization_routine(self) -> None:
self.recognizer = sr.Recognizer()
# self.tts_engine = pyttsx3.init()
self._mutex = Semaphore()
# pygame.mixer.init()
# def set_voice(self, voice_index: int) -> None:
# voices = self.tts_engine.getProperty('voices')
# self.tts_engine.setProperty('voice', voices[voice_index].id)
def speech_to_text(self, input_: str) -> str:
'''Speech to text which accepts str'''
return input_
def speech_to_text(self, timeout=None) -> str:
text: str = ''
with sr.Microphone() as source:
self._mutex.acquire()
try:
self.recognizer.adjust_for_ambient_noise(source, duration=3)
print('[NLP] Listening')
audio_data = self.recognizer.listen(source, timeout=timeout)
print('[NLP] Processing')
text = self.recognizer.recognize_google(audio_data)
print(f'[NLP] Input recognized: {text}')
finally:
self._mutex.release()
return text
def text_to_speech(self, input_: str, language='en', tld='com') -> any:
with io.BytesIO() as f:
self._mutex.acquire()
# self.tts_engine.say(input_)
# self.tts_engine.runAndWait()
gtts.gTTS(input_, lang=language).save('.temp_tts_output.mp3')
playsound('.temp_tts_output.mp3')
# gtts.gTTS(input_, lang=language, tld=tld).write_to_fp(f)
# f.seek(0)
# song = AudioSegment.from_file(f, format="mp3")
# play(song)
self._mutex.release()
def is_nova_invocation(self, keyword='nova', timeout=2) -> bool:
text = self.speech_to_text(timeout=timeout).lower()
if keyword in text:
print(f'[NLP] Invocation recognized')
return True
return False
```
#### File: nova-core/plugins/command_not_found_plugin.py
```python
from core.abstract_plugin import NovaPlugin
class CommandNotFoundPlugin(NovaPlugin):
def get_keywords(self) -> list:
return []
def execute(self, command: str) -> str:
return 'I\'m sorry, I don\'t understand'
def help_command(self, command: str) -> str:
return 'This command is used by the core to respond to users when there is an error'
```
#### File: nova-core/plugins/hello_world_plugin.py
```python
from core.abstract_plugin import NovaPlugin
class HelloWorldPlugin(NovaPlugin):
def get_keywords(self) -> list:
return ['hello', 'hi', 'hey']
def execute(self, command: str) -> str:
if ('hello' in command, 'hi' in command or 'hey' in command):
return 'Hello! My name is Nova. How can I help you?'
return None
def help_command(self, command: str) -> str:
return 'Helping the world by helping you.'
``` |
{
"source": "jmcs/environmental",
"score": 2
} |
#### File: jmcs/environmental/setup.py
```python
import platform
import sys
from setuptools import setup, find_packages
from setuptools.command.test import test as TestCommand
VERSION_MAJOR = 1
VERSION_MINOR = 3
VERSION = '{VERSION_MAJOR}.{VERSION_MINOR}'.format_map(locals())
python_version_major, python_version_minor = (int(version) for version in platform.python_version_tuple()[:-1])
if python_version_major < 3:
print("Environmental doesn't support Python 2")
requires = []
if python_version_major == 3 and python_version_minor < 4: # if version is below 3.4
requires.append('pathlib')
if python_version_major == 3 and python_version_minor < 5: # if version is below 3.5
requires.append('typing')
class PyTest(TestCommand):
def initialize_options(self):
TestCommand.initialize_options(self)
self.cov = None
self.pytest_args = ['--cov', 'environmental', '--cov-report', 'term-missing']
def finalize_options(self):
TestCommand.finalize_options(self)
self.test_args = []
self.test_suite = True
def run_tests(self):
import pytest
errno = pytest.main(self.pytest_args)
sys.exit(errno)
setup(
name='environmental',
packages=find_packages(),
version=VERSION,
description='Map a python configuration from environment variables',
long_description=open('README.rst').read(),
author='<NAME>',
url='https://github.com/zalando/environmental',
license='Apache License Version 2.0',
install_requires=requires,
tests_require=['pytest-cov', 'pytest'],
cmdclass={'test': PyTest},
classifiers=[
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Operating System :: OS Independent',
],
)
``` |
{
"source": "jmcshane/experimental",
"score": 2
} |
#### File: tekton_pipeline/api/tekton_watch.py
```python
import time
from kubernetes import client
from kubernetes import watch as k8s_watch
from table_logger import TableLogger
from tekton_pipeline.constants import constants
from tekton_pipeline.utils import utils
def watch(name, plural, namespace=None, timeout_seconds=600, version=constants.TEKTON_VERSION):
"""Watch the created or patched tekton objects in the specified namespace"""
if namespace is None:
namespace = utils.get_default_target_namespace()
tbl = TableLogger(
columns='NAME,SUCCEEDED,REASON,STARTED,COMPLETED',
colwidth={'NAME': 20, 'SUCCEEDED': 20, 'REASON': 20, 'STARTED': 20, 'COMPLETED': 20},
border=False)
stream = k8s_watch.Watch().stream(
client.CustomObjectsApi().list_namespaced_custom_object,
constants.TEKTON_GROUP,
version,
namespace,
plural,
timeout_seconds=timeout_seconds)
for event in stream:
tekton = event['object']
tekton_name = tekton['metadata']['name']
if name and name != tekton_name:
continue
else:
if tekton.get('status', ''):
status = ''
reason = ''
startTime = tekton['status'].get('startTime','')
completionTime = tekton['status'].get('completionTime','')
for condition in tekton['status'].get('conditions', {}):
status = condition.get('status', '')
reason = condition.get('reason', '')
tbl(tekton_name, status, reason, startTime, completionTime)
else:
tbl(tekton_name, '', '', '', '')
# Sleep 2 to avoid status section is not generated within a very short time.
time.sleep(2)
continue
if name == tekton_name and status != 'Unknown':
break
```
#### File: tekton_pipeline/models/v1beta1_embedded_task.py
```python
import pprint
import re # noqa: F401
import six
from tekton_pipeline.configuration import Configuration
class V1beta1EmbeddedTask(object):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
openapi_types = {
'description': 'str',
'metadata': 'V1beta1PipelineTaskMetadata',
'params': 'list[V1beta1ParamSpec]',
'resources': 'V1beta1TaskResources',
'results': 'list[V1beta1TaskResult]',
'sidecars': 'list[V1beta1Sidecar]',
'step_template': 'V1Container',
'steps': 'list[V1beta1Step]',
'volumes': 'list[V1Volume]',
'workspaces': 'list[V1beta1WorkspaceDeclaration]'
}
attribute_map = {
'description': 'description',
'metadata': 'metadata',
'params': 'params',
'resources': 'resources',
'results': 'results',
'sidecars': 'sidecars',
'step_template': 'stepTemplate',
'steps': 'steps',
'volumes': 'volumes',
'workspaces': 'workspaces'
}
def __init__(self, description=None, metadata=None, params=None, resources=None, results=None, sidecars=None, step_template=None, steps=None, volumes=None, workspaces=None, local_vars_configuration=None): # noqa: E501
"""V1beta1EmbeddedTask - a model defined in OpenAPI""" # noqa: E501
if local_vars_configuration is None:
local_vars_configuration = Configuration()
self.local_vars_configuration = local_vars_configuration
self._description = None
self._metadata = None
self._params = None
self._resources = None
self._results = None
self._sidecars = None
self._step_template = None
self._steps = None
self._volumes = None
self._workspaces = None
self.discriminator = None
if description is not None:
self.description = description
if metadata is not None:
self.metadata = metadata
if params is not None:
self.params = params
if resources is not None:
self.resources = resources
if results is not None:
self.results = results
if sidecars is not None:
self.sidecars = sidecars
if step_template is not None:
self.step_template = step_template
if steps is not None:
self.steps = steps
if volumes is not None:
self.volumes = volumes
if workspaces is not None:
self.workspaces = workspaces
@property
def description(self):
"""Gets the description of this V1beta1EmbeddedTask. # noqa: E501
Description is a user-facing description of the task that may be used to populate a UI. # noqa: E501
:return: The description of this V1beta1EmbeddedTask. # noqa: E501
:rtype: str
"""
return self._description
@description.setter
def description(self, description):
"""Sets the description of this V1beta1EmbeddedTask.
Description is a user-facing description of the task that may be used to populate a UI. # noqa: E501
:param description: The description of this V1beta1EmbeddedTask. # noqa: E501
:type: str
"""
self._description = description
@property
def metadata(self):
"""Gets the metadata of this V1beta1EmbeddedTask. # noqa: E501
:return: The metadata of this V1beta1EmbeddedTask. # noqa: E501
:rtype: V1beta1PipelineTaskMetadata
"""
return self._metadata
@metadata.setter
def metadata(self, metadata):
"""Sets the metadata of this V1beta1EmbeddedTask.
:param metadata: The metadata of this V1beta1EmbeddedTask. # noqa: E501
:type: V1beta1PipelineTaskMetadata
"""
self._metadata = metadata
@property
def params(self):
"""Gets the params of this V1beta1EmbeddedTask. # noqa: E501
Params is a list of input parameters required to run the task. Params must be supplied as inputs in TaskRuns unless they declare a default value. # noqa: E501
:return: The params of this V1beta1EmbeddedTask. # noqa: E501
:rtype: list[V1beta1ParamSpec]
"""
return self._params
@params.setter
def params(self, params):
"""Sets the params of this V1beta1EmbeddedTask.
Params is a list of input parameters required to run the task. Params must be supplied as inputs in TaskRuns unless they declare a default value. # noqa: E501
:param params: The params of this V1beta1EmbeddedTask. # noqa: E501
:type: list[V1beta1ParamSpec]
"""
self._params = params
@property
def resources(self):
"""Gets the resources of this V1beta1EmbeddedTask. # noqa: E501
:return: The resources of this V1beta1EmbeddedTask. # noqa: E501
:rtype: V1beta1TaskResources
"""
return self._resources
@resources.setter
def resources(self, resources):
"""Sets the resources of this V1beta1EmbeddedTask.
:param resources: The resources of this V1beta1EmbeddedTask. # noqa: E501
:type: V1beta1TaskResources
"""
self._resources = resources
@property
def results(self):
"""Gets the results of this V1beta1EmbeddedTask. # noqa: E501
Results are values that this Task can output # noqa: E501
:return: The results of this V1beta1EmbeddedTask. # noqa: E501
:rtype: list[V1beta1TaskResult]
"""
return self._results
@results.setter
def results(self, results):
"""Sets the results of this V1beta1EmbeddedTask.
Results are values that this Task can output # noqa: E501
:param results: The results of this V1beta1EmbeddedTask. # noqa: E501
:type: list[V1beta1TaskResult]
"""
self._results = results
@property
def sidecars(self):
"""Gets the sidecars of this V1beta1EmbeddedTask. # noqa: E501
Sidecars are run alongside the Task's step containers. They begin before the steps start and end after the steps complete. # noqa: E501
:return: The sidecars of this V1beta1EmbeddedTask. # noqa: E501
:rtype: list[V1beta1Sidecar]
"""
return self._sidecars
@sidecars.setter
def sidecars(self, sidecars):
"""Sets the sidecars of this V1beta1EmbeddedTask.
Sidecars are run alongside the Task's step containers. They begin before the steps start and end after the steps complete. # noqa: E501
:param sidecars: The sidecars of this V1beta1EmbeddedTask. # noqa: E501
:type: list[V1beta1Sidecar]
"""
self._sidecars = sidecars
@property
def step_template(self):
"""Gets the step_template of this V1beta1EmbeddedTask. # noqa: E501
:return: The step_template of this V1beta1EmbeddedTask. # noqa: E501
:rtype: V1Container
"""
return self._step_template
@step_template.setter
def step_template(self, step_template):
"""Sets the step_template of this V1beta1EmbeddedTask.
:param step_template: The step_template of this V1beta1EmbeddedTask. # noqa: E501
:type: V1Container
"""
self._step_template = step_template
@property
def steps(self):
"""Gets the steps of this V1beta1EmbeddedTask. # noqa: E501
Steps are the steps of the build; each step is run sequentially with the source mounted into /workspace. # noqa: E501
:return: The steps of this V1beta1EmbeddedTask. # noqa: E501
:rtype: list[V1beta1Step]
"""
return self._steps
@steps.setter
def steps(self, steps):
"""Sets the steps of this V1beta1EmbeddedTask.
Steps are the steps of the build; each step is run sequentially with the source mounted into /workspace. # noqa: E501
:param steps: The steps of this V1beta1EmbeddedTask. # noqa: E501
:type: list[V1beta1Step]
"""
self._steps = steps
@property
def volumes(self):
"""Gets the volumes of this V1beta1EmbeddedTask. # noqa: E501
Volumes is a collection of volumes that are available to mount into the steps of the build. # noqa: E501
:return: The volumes of this V1beta1EmbeddedTask. # noqa: E501
:rtype: list[V1Volume]
"""
return self._volumes
@volumes.setter
def volumes(self, volumes):
"""Sets the volumes of this V1beta1EmbeddedTask.
Volumes is a collection of volumes that are available to mount into the steps of the build. # noqa: E501
:param volumes: The volumes of this V1beta1EmbeddedTask. # noqa: E501
:type: list[V1Volume]
"""
self._volumes = volumes
@property
def workspaces(self):
"""Gets the workspaces of this V1beta1EmbeddedTask. # noqa: E501
Workspaces are the volumes that this Task requires. # noqa: E501
:return: The workspaces of this V1beta1EmbeddedTask. # noqa: E501
:rtype: list[V1beta1WorkspaceDeclaration]
"""
return self._workspaces
@workspaces.setter
def workspaces(self, workspaces):
"""Sets the workspaces of this V1beta1EmbeddedTask.
Workspaces are the volumes that this Task requires. # noqa: E501
:param workspaces: The workspaces of this V1beta1EmbeddedTask. # noqa: E501
:type: list[V1beta1WorkspaceDeclaration]
"""
self._workspaces = workspaces
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, V1beta1EmbeddedTask):
return False
return self.to_dict() == other.to_dict()
def __ne__(self, other):
"""Returns true if both objects are not equal"""
if not isinstance(other, V1beta1EmbeddedTask):
return True
return self.to_dict() != other.to_dict()
``` |
{
"source": "jmcs/ponto",
"score": 2
} |
#### File: ponto/ponto/paths.py
```python
from pathlib import Path
from os.path import expanduser
HOME = Path(expanduser("~"))
BASE_DIR = HOME / '.ponto' # type: Path
CONFIG_PATH = BASE_DIR / 'ponto.yaml'
DRIVE_DIR = BASE_DIR / 'drive'
DOTFILES_PATH = BASE_DIR / 'home' # path to store dotfiles
def relative_to_home(path) -> Path:
path = Path(path).absolute()
if HOME in path.parents:
path = '~' / path.relative_to(HOME)
return path
``` |
{
"source": "jmcs/smart-format",
"score": 2
} |
#### File: jmcs/smart-format/setup.py
```python
import sys
from setuptools import setup, find_packages
from setuptools.command.test import test as TestCommand
version = '0.0.1'
class PyTest(TestCommand):
def initialize_options(self):
TestCommand.initialize_options(self)
self.cov = None
self.pytest_args = ['--doctest-modules', '--cov', 'smart_format', '--cov-report', 'term-missing',
'smart_format']
def finalize_options(self):
TestCommand.finalize_options(self)
self.test_args = []
self.test_suite = True
def run_tests(self):
import pytest
errno = pytest.main(self.pytest_args)
sys.exit(errno)
setup(
name='smart-format',
packages=find_packages(),
version=version,
description='Smart Formatter',
long_description='',
author='<NAME>',
author_email='<EMAIL>',
url='https://github.com/jmcs/smart-format',
license='MIT License',
tests_require=['pytest-cov', 'pytest'],
cmdclass={'test': PyTest},
classifiers=[
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Operating System :: OS Independent',
],
)
``` |
{
"source": "jmctsm/AoC",
"score": 4
} |
#### File: 2021/Puzzle_03/puz03.py
```python
from typing import List, Dict
def readfile_input() -> List:
return_list = []
with open("input.txt", "r", encoding="utf-8") as input_file:
lines = input_file.readlines()
for line in lines:
stripped_line = line.strip()
line_list = []
line_list = list(stripped_line)
return_list.append(line_list)
return return_list
def puz_part01():
input_list = readfile_input()
gamma_list = []
epsilon_list = []
times_change = 0
line_length = 12
for entry in input_list:
if len(entry) > line_length:
line_length = len(entry)
times_change += 1
if times_change != 0:
print("Need to validate inputs. Length Changed")
exit()
line_pos = 0
while line_pos < line_length:
match_dict = {
"0": 0,
"1": 0,
}
for entry in input_list:
if entry[line_pos] == "0":
match_dict["0"] += 1
elif entry[line_pos] == "1":
match_dict["1"] += 1
else:
print("Character is wrong. Exiting")
print(f"{entry[line_pos] =}")
exit()
if match_dict["0"] > match_dict["1"]:
gamma_list.append("0")
epsilon_list.append("1")
elif match_dict["1"] > match_dict["0"]:
gamma_list.append("1")
epsilon_list.append("0")
else:
print("Numbers are not right. Check them")
exit()
line_pos += 1
num = ""
gamma_num = int(num.join(gamma_list), 2)
num = ""
epsilon_num = int(num.join(epsilon_list), 2)
print("Part 01")
print(f"gamma x epsilon = {gamma_num * epsilon_num}")
print("\n\n")
def find_number(
sort_list: List,
sig_number: str,
line_length: int,
) -> int:
while True:
line_pos = 0
while line_pos < line_length - 1:
match_dict: Dict = {
"0": [],
"1": [],
}
for entry in sort_list:
if entry[line_pos] == "0":
match_dict["0"].append(entry)
elif entry[line_pos] == "1":
match_dict["1"].append(entry)
else:
print("Character is wrong. Exiting")
print(f"{entry[line_pos] =}")
exit()
print(f"0s are {len(match_dict['0'])} and 1s are {len(match_dict['1'])}")
if len(match_dict["0"]) > len(match_dict["1"]):
if sig_number == "1":
sort_list = match_dict["0"]
elif sig_number == "0":
sort_list = match_dict["1"]
elif len(match_dict["0"]) < len(match_dict["1"]):
if sig_number == "1":
sort_list = match_dict["1"]
elif sig_number == "0":
sort_list = match_dict["0"]
elif len(match_dict["0"]) == len(match_dict["1"]):
sort_list = match_dict[str(sig_number)]
else:
print("Numbers are not right. Check them")
exit()
line_pos += 1
print(f"sort_list length is {len(sort_list)}")
if len(sort_list) == 1:
break
if len(sort_list) == 1:
print(f"length of sort_list = {len(sort_list[0])}")
break
print(sort_list)
num = ""
return_number = int(num.join(sort_list[0]), 2)
print(return_number)
return return_number
def puz_part02():
input_list = readfile_input()
times_change = 0
line_length = 12
for entry in input_list:
if len(entry) > line_length:
line_length = len(entry)
times_change += 1
if times_change != 0:
print("Need to validate inputs. Length Changed")
exit()
oxy_gen_rating = find_number(input_list, "1", line_length=line_length)
car_gen_rating = find_number(input_list, "0", line_length=line_length)
print("Part 02")
print(f"Life Support Rating = {oxy_gen_rating * car_gen_rating}")
print("\n\n")
def main():
puz_part01()
puz_part02()
if __name__ == "__main__":
main()
``` |
{
"source": "jmctsm/NetworkScanner",
"score": 3
} |
#### File: scan_mods/grabbing_mods/device_grabber.py
```python
import os
import sys
if "scan_mods" in os.listdir(os.getcwd()):
sys.path.append(os.getcwd())
else:
path = "../"
while True:
if "scan_mods" in os.listdir(path):
sys.path.append(path)
break
else:
path += "../"
from scan_mods.common_validation_checks.check_address import check_address
from scan_mods.common_validation_checks.check_username import check_username
from scan_mods.common_validation_checks.check_password import check_password
from scan_mods.common_validation_checks.check_enable_password import (
check_enable_password,
)
from scan_mods.grabbing_mods.device_specific_info_getter import device_info_getter
import ipaddress
import time
import getpass
import napalm
import paramiko
import json
import datetime
def check_ports(port_dictionary):
"""
This will run through the ports that are supposedly open and see if 22 is listed. If so, it will return the device type if it can be determined
or False if not open
Args:
port_dictionary (dict) : dictionary of either one port or multiple ports of format {<port_number>:<header>}
Return
False : if 22 or SSH is not in the list
string : device type or unknown from header information
int : port number that ssh is running on
"""
if not isinstance(port_dictionary, dict):
raise TypeError(
f"Port dictionary passed was of type {type(port_dictionary).__name__}. It needs to be a dictionary"
)
for key, value in port_dictionary.items():
if not isinstance(key, str):
try:
key = str(key)
except Exception as ex:
print(
f"Port could not be made into a string. It was a type of {type(key).__name__}"
)
print(ex)
raise
if not isinstance(value, dict):
raise TypeError(
f"The Port value was not a dict. It was a type of {type(value).__name__}"
)
if key == "22":
for key, value in port_dictionary[key].items():
if key == "ERROR":
continue
if "Cisco" in value:
return (22, "Cisco")
# This can be expanded as more and more are learned
elif "Ubuntu" in value:
return (22, "Linux")
else:
return (22, "Other")
for value in port_dictionary[key].values():
if "SSH" in value:
return (int(key), "Other")
return (False, False)
def get_device_type(address, port, username, password, enable_password, header):
"""
Will attempt to connect to a device and determine the device type for napalm
Args:
address (str) : string of the IP to connect to
port (int) : integer of the port connecting to
username (str): string of the username
password (str): string of the password
enable_password (str): string of the enable password
open (str): string of the type device is thought to be
return:
str : string of either device type known or if it is unknown. Types will be used for napalm
"""
for item in [address, username, password, header]:
if not isinstance(item, str):
raise TypeError(f"{item} is not a string. It is a {type(item).__name__}")
try:
ipaddress.IPv4Address(address)
except ipaddress.AddressValueError:
raise ipaddress.AddressValueError(
f"{address} is not set up to be an IPv4 adddress."
)
if not isinstance(port, int):
raise TypeError(f"{port} is not a string. It is a {type(port).__name__}")
if port < 0 or port > 65535:
raise ValueError(f"Port number needs to be between 0 and 65535")
if enable_password is not None and not isinstance(enable_password, str):
raise TypeError(
f"{enable_password} is not a string. It is a {type(port).__name__}"
)
ssh_open = paramiko.SSHClient()
ssh_open.set_missing_host_key_policy(paramiko.AutoAddPolicy())
print(f"Attempting connection to {address}")
try:
# to work for Cisco added in the look_for_keys and allow_agent parameters
ssh_open.connect(
address, port, username, password, look_for_keys=False, allow_agent=False
)
except paramiko.AuthenticationException:
return_dict = {
"Version Info": f"[ERROR] paramiko.AuthenticationException: Authentication failed for device {address}"
}
return return_dict
except TimeoutError:
return_dict = {
"Version Info": f"[ERROR] TimeoutError: Connection timed out for device {address}"
}
return return_dict
except paramiko.ssh_exception.NoValidConnectionsError:
return_dict = {
"Version Info": f"[ERROR] paramiko.ssh_exception.NoValidConnectionsError: Unable to connect to port {port} on {address}"
}
return return_dict
if header == "Cisco":
if enable_password is None:
stdin_lines, stdout_lines, stderr_lines = ssh_open.exec_command(
"show version"
)
if stderr_lines.readlines():
raise ValueError(f"Something happened when connecting to {address}")
output_list = []
for line in stdout_lines.readlines():
if line.strip() == "":
continue
output_list.append(line.strip())
elif enable_password is not None:
# this means we have to invoke a shell and start issuing commands :)
shell_connection = ssh_open.invoke_shell()
shell_connection.send("enable\n")
time.sleep(0.5)
shell_connection.send(f"{enable_password}\n")
time.sleep(0.5)
shell_connection.send("term length 0\n")
time.sleep(0.5)
output_bytes = shell_connection.recv(65535)
shell_connection.send("show version\n")
time.sleep(0.5)
output_bytes = shell_connection.recv(65535)
output_list = []
temp_list = output_bytes.decode("utf-8").splitlines()
for item in temp_list:
if item == "":
continue
output_list.append(item)
else:
raise ValueError(
f"Enable password parameter has something jacked up with it. enable_password = {enable_password}"
)
return_dict = {"Version Info": output_list}
return_dict["OS Type"] = "unknown"
for line in output_list:
if "IOS-XE" in line:
return_dict["OS Type"] = "ios"
break
elif "IOS-XE" in line:
return_dict["OS Type"] = "ios"
break
elif "IOS-XR" in line:
return_dict["OS Type"] = "iosxr"
break
elif "Cisco Nexus" in line:
return_dict["OS Type"] = "nxos_ssh"
break
elif "IOSv" in line:
return_dict["OS Type"] = "ios"
break
ssh_open.close()
return return_dict
elif header == "Linux":
stdin_lines, stdout_lines, stderr_lines = ssh_open.exec_command("uname -a")
if stderr_lines.readlines():
raise ValueError(f"Something happened when connecting to {address}")
stdout_list = stdout_lines.readlines()
return_dict = {"Version Info": [stdout_list[0].strip()]}
return_dict["OS Type"] = "linux"
ssh_open.close()
return return_dict
elif header == "Other":
if enable_password is None:
stdin_lines, stdout_lines, stderr_lines = ssh_open.exec_command(
"show version"
)
if stderr_lines.readlines():
raise ValueError(f"Something happened when connecting to {address}")
output_list = []
for line in stdout_lines.readlines():
if line.strip() == "":
continue
output_list.append(line.strip())
if "not found" in output_list:
# Potentially a Linux box then
stdin_lines, stdout_lines, stderr_lines = ssh_open.exec_command(
"uname -a"
)
if stderr_lines.readlines():
raise ValueError(f"Something happened when connecting to {address}")
stdout_list = stdout_lines.readlines()
return_dict = {"Version Info": [stdout_list[0].strip()]}
return_dict["OS Type"] = "linux"
ssh_open.close()
return return_dict
elif enable_password is not None:
# this means we have to invoke a shell and start issuing commands :)
shell_connection = ssh_open.invoke_shell()
shell_connection.send("enable\n")
time.sleep(0.5)
shell_connection.send(f"{enable_password}\n")
time.sleep(0.5)
shell_connection.send("term length 0\n")
time.sleep(0.5)
output_bytes = shell_connection.recv(65535)
shell_connection.send("show version\n")
time.sleep(0.5)
output_bytes = shell_connection.recv(65535)
output_list = []
temp_list = output_bytes.decode("utf-8").splitlines()
for item in temp_list:
if item == "":
continue
output_list.append(item)
else:
raise ValueError(
f"Enable password parameter has something jacked up with it. enable_password = {<PASSWORD>}"
)
return_dict = {"Version Info": output_list}
return_dict["OS Type"] = "unknown"
for line in output_list:
if "IOS-XE" in line:
return_dict["OS Type"] = "ios"
break
elif "IOS-XE" in line:
return_dict["OS Type"] = "ios"
break
elif "IOS-XR" in line:
return_dict["OS Type"] = "iosxr"
break
elif "Cisco Nexus" in line:
return_dict["OS Type"] = "nxos_ssh"
break
elif "IOSv" in line:
return_dict["OS Type"] = "ios"
break
ssh_open.close()
return return_dict
ssh_open.close()
def get_config_napalm(
dev_driver=None,
host=None,
port=None,
usern=None,
passw=<PASSWORD>,
enable_password=<PASSWORD>,
):
"""
Will use napalm to connect to the device. It will return a dictionary of the configs cleaned with no new lines, etc
If full_config is True, it returns the full config
get_config_napalm(dev_driver=device_type["OS Type"], host=address, usern=username, passw=password)
Args:
dev_driver (str) : device driver to use with napalm for connecting
host (str) : ip address of the device connecting to
port (int) : port connecting to so can pass if not regular SSH
usern (str) : username to use to connect to the device
passw (str) : password to use to connect to the device
enable_password (str|None) : enable_password will be None if no enable password is needed else it will be a string of the enable password
"""
for item in [dev_driver, host, usern, passw]:
if not isinstance(item, str):
raise TypeError(f"{item} is not a string. It is a {type(item).__name__}")
if not isinstance(port, int):
raise TypeError(f"{port} is not an int. It is a {type(port).__name__}")
try:
ipaddress.IPv4Address(host)
except ipaddress.AddressValueError:
raise ipaddress.AddressValueError(
f"{host} is not set up to be an IPv4 adddress."
)
if enable_password is not None and not isinstance(enable_password, str):
raise TypeError(
f"{enable_password} is not a string. It is a {type(port).__name__}"
)
if dev_driver not in [
"eos",
"junos",
"iosxr",
"nxos",
"nxos_ssh",
"ios",
]:
raise ValueError(f"{dev_driver} is not an approved device driver")
if port < 0 or port > 65535:
raise ValueError(f"Port number needs to be between 0 and 65535")
device_driver = napalm.get_network_driver(dev_driver)
optional_args = None
if dev_driver == "ios" or dev_driver == "nxos_ssh":
if enable_password is not None:
if optional_args is None:
optional_args = {}
optional_args["secret"] = enable_password
try:
with device_driver(host, usern, passw, optional_args=optional_args) as device:
print("Attempting to get the device configuration...")
return_dict = {}
inputs_dict = {
"Device_Facts": device.get_facts,
"Device_Optics": device.get_optics,
"Device_Network_Instances": device.get_network_instances,
"Device_LLDP_Detail": device.get_lldp_neighbors_detail,
"Device_LLDP": device.get_lldp_neighbors,
"Device_Environment": device.get_environment,
"Device_Interfaces": device.get_interfaces,
"Device_Interfaces_IP": device.get_interfaces_ip,
"Device_SNMP_Information": device.get_snmp_information,
"Device_Users": device.get_users,
}
for key, value in inputs_dict.items():
try:
return_dict[key] = value()
except NotImplementedError:
return_dict[key] = {"Not_Implemented": "Not_Implemented"}
device_config = device.get_config()
device_config_full = device.get_config(full=True)
except ValueError as ex:
print(ex)
return {}
write_directory = directory_checker(host)
config_dict = {
"startup": "Device_Startup_Config",
"running": "Device_Running_Config",
"candidate": "Device_Candidate_Config",
}
for key, value in config_dict.items():
file_location = f"{write_directory}\\{host}_{key}.txt"
with open(file_location, "w") as output:
output.write(device_config[key])
return_dict[f"{value}_File_Location"] = file_location
for key, value in config_dict.items():
file_location = f"{write_directory}\\{host}_{key}_full.txt"
with open(file_location, "w") as output:
output.write(device_config_full[key])
return_dict[f"{value}_Full_File_Location"] = file_location
return return_dict
def directory_checker(address):
"""
Uses the address of the device to create a directory for storing configs.
First sees if the directory exists and then creates it if need be
Args:
address (str) : IP of the device connecting to for which configs will be stored
Return
str : directory path that will be written to
"""
if address is None:
raise ValueError(f"{address} needs to be a string and not None")
if not isinstance(address, str):
raise ValueError(f"{address} needs to be a string and not None")
write_directory = None
if "Output" in os.listdir(os.getcwd()):
write_directory = f"{os.getcwd()}/Output/Scans/{address}"
else:
path = "../"
while write_directory is None:
if "Output" in os.listdir(path):
write_directory = f"{path}/Output/Scans/{address}"
path += "../"
if not os.path.exists(write_directory):
os.makedirs(write_directory)
return write_directory
def device_grab(
address=None,
port_dict=None,
username=None,
password=<PASSWORD>,
enable_password_needed=False,
enable_password=None,
):
"""
This will do a bunch of things. The main one is iterate through the ports the user passes over
and see if single port, list of ports, or dictionary of ports.
It will then connect to the machine at address and see if it can SSH to the device. Once there it will attempt to determine version of the
device if Cisco. If LInux, it will grab the uname information only.
If config is gotten, it can be written to a file to as well
Args:
address (ipaddress.IPv4Address or string) : IPv4 address of ipaddress.IPv4Address type that will be used to connect to
port_dict (dict) : dictionary of either one port or multiple ports of format {<port_number>:<header>}
username (str) : string of the username. If not given, user will be asked
password (str) : strin of the password. If not given, user will be asked
enable_password_needed (bool) : if True, enable password needs to either be passed or will be asked for. If False, enable password check is skipped and password set to None
enable_password (str) : string of the enable password. If not given, user will be asked
Return:
dictionary of dictionaries : dictionary will contain full and not full config and full and not full will contain start, run, and candidate
"""
connect_address = check_address(address)
ssh_port, ssh_open = check_ports(port_dict)
# return_dict = {}
return_dict = {
"Version_Info": ["No Version information was available"],
"CONFIG": {
"Open_Close": False,
"Open_Close_Msg": "SSH is not open on the device for ports scanned",
"Device_Information": {},
},
}
if ssh_port is False and ssh_open is False:
"""
return_dict["CONFIG"] = {
"Open_Close": False,
"Open_Close_Msg": "SSH is not open on the device for ports scanned",
"Device_Information": {},
}
"""
return return_dict
ssh_username = check_username(username, connect_address)
ssh_password = check_password(password, connect_address)
if enable_password_needed is True:
ssh_enable_password = check_enable_password(enable_password, connect_address)
elif enable_password_needed is False:
ssh_enable_password = None
else:
raise ValueError(
f"You set the enable_password_needed option to something besides True or False. Not Cool man. enable_password_needed = {enable_password_needed}"
)
device_type = get_device_type(
connect_address,
ssh_port,
ssh_username,
ssh_password,
ssh_enable_password,
ssh_open,
)
if "[ERROR] " in device_type["Version Info"]:
return_dict = {
"Version_Info": device_type["Version Info"],
}
return return_dict
return_dict = {
"Version_Info": device_type["Version Info"],
"CONFIG": {
"Open_Close": False,
"Open_Close_Msg": f"Config for type device not yet supported ({device_type['OS Type']}).",
"Device_Information": {},
},
}
if device_type["OS Type"] in [
"eos",
"junos",
"iosxr",
"nxos",
"nxos_ssh",
"ios",
]:
return_dict = {
"Version_Info": device_type["Version Info"],
"CONFIG": {
"Open_Close": True,
"Open_Close_Msg": f"SSH is open and Device Type is known ({device_type['OS Type']}).",
},
}
device_information = get_config_napalm(
dev_driver=device_type["OS Type"],
host=connect_address,
port=ssh_port,
usern=ssh_username,
passw=<PASSWORD>,
enable_password=<PASSWORD>,
)
return_dict["CONFIG"]["Device_Information"] = device_information
return_dict["CONFIG"]["Show_Info"] = device_info_getter(
address=connect_address,
username=ssh_username,
password=<PASSWORD>,
device_type=device_type["OS Type"],
enable_password_needed=enable_password_<PASSWORD>,
enable_password=<PASSWORD>,
port_to_use=ssh_port,
)
return return_dict
if __name__ == "__main__":
from scan_mods.device_class import FoundDevice
start_time = time.time()
linux_testbox = "192.168.89.80"
cisco_iosxe_no_en = "192.168.89.254"
cisco_iosv_enable = "192.168.89.253"
cisco_iosl2_no_enable = "192.168.89.252"
cisco_nx0s7 = "192.168.89.251"
cisco_iosxe_enable = "192.168.89.247"
fake_testbox = "192.168.0.1"
response_time = (1.1, 1.35, 1.82)
linux_ports = {
"TCP": {
"20": {
"ERROR": "ConnectionRefusedError -- No connection could be made because the target machine actively refused it"
},
"21": {"Return Information": "220 (vsFTPd 3.0.3)"},
"22": {"Return Information": "SSH-2.0-OpenSSH_8.2p1 Ubuntu-4ubuntu0.1"},
}
}
cisco_ports = {
"TCP": {
"20": {
"ERROR": "ConnectionRefusedError -- No connection could be made because the target machine actively refused it"
},
"21": {"Return Information": "220 (vsFTPd 3.0.3)"},
"22": {"Return Information": "SSH-1.99-Cisco-1.25"},
}
}
nexus_ports = {
"TCP": {
"20": {
"ERROR": "ConnectionRefusedError -- No connection could be made because the target machine actively refused it"
},
"21": {"Return Information": "220 (vsFTPd 3.0.3)"},
"22": {"Return Information": "SSH-2.0-OpenSSH_6.2 FIPS"},
}
}
fake_ports = {
"TCP": {
"20": {
"ERROR": "ConnectionRefusedError -- No connection could be made because the target machine actively refused it"
},
"21": {"Return Information": "220 (vsFTPd 3.0.3)"},
"22": {
"ERROR": "ConnectionRefusedError -- No connection could be made because the target machine actively refused it"
},
}
}
linux_test_device = FoundDevice(linux_testbox, response_time)
cisco_iosxe_no_en_device = FoundDevice(cisco_iosxe_no_en, response_time)
cisco_iosv_enable_device = FoundDevice(cisco_iosv_enable, response_time)
cisco_iosl2_no_enable_device = FoundDevice(cisco_iosl2_no_enable, response_time)
cisco_nx0s7_device = FoundDevice(cisco_nx0s7, response_time)
cisco_iosxe_enable_device = FoundDevice(cisco_iosxe_enable, response_time)
fake_testbox_device = FoundDevice(fake_testbox, response_time)
linux_test_device.all_ports = linux_ports
cisco_iosxe_no_en_device.all_ports = cisco_ports
cisco_iosv_enable_device.all_ports = cisco_ports
cisco_iosl2_no_enable_device.all_ports = cisco_ports
cisco_nx0s7_device.all_ports = cisco_ports
cisco_iosxe_enable_device.all_ports = cisco_ports
fake_testbox_device.all_ports = fake_ports
username = "jmctsm"
password = "<PASSWORD>"
enable_password = None
no_enable_device_list = [
fake_testbox_device,
linux_test_device,
cisco_iosxe_no_en_device,
cisco_iosl2_no_enable_device,
cisco_nx0s7_device,
]
enable_device_list = [
cisco_iosv_enable_device,
cisco_iosxe_enable_device,
]
json_input_dict = {}
for device in no_enable_device_list:
device_grab_info = device_grab(
address=device.IP,
port_dict=device.open_tcp_ports,
username=username,
password=password,
enable_password_needed=False,
)
json_input_dict[str(device.IP)] = device_grab_info
enable_password = "<PASSWORD>"
for device in enable_device_list:
device_grab_info = device_grab(
address=device.IP,
port_dict=device.open_tcp_ports,
username=username,
password=password,
enable_password_needed=True,
enable_password=enable_password,
)
json_input_dict[str(device.IP)] = device_grab_info
print(json_input_dict)
json_output = json.dumps(json_input_dict, indent=4)
print("\n\n\n\n")
print(json_output)
with open(f"../../Output/test_output_{time.time()}.txt", "w") as file_output:
file_output.write(json_output)
duration = time.time() - start_time
print(f"Duration to run was {duration}")
```
#### File: scan_mods/grabbing_mods/device_specific_info_getter.py
```python
import os
import sys
from typing import Type
if "scan_mods" in os.listdir(os.getcwd()):
sys.path.append(os.getcwd())
else:
path = "../"
while True:
if "scan_mods" in os.listdir(path):
sys.path.append(path)
break
else:
path += "../"
from scan_mods.common_validation_checks.check_address import check_address
from scan_mods.common_validation_checks.check_username import check_username
from scan_mods.common_validation_checks.check_password import check_password
from scan_mods.common_validation_checks.check_enable_password import (
check_enable_password,
)
import time
import json
import netmiko
import textfsm
def check_device_type(device_type, address):
"""
Validate that the device type will return the correct string for NetMiko
Args:
device_type (str) : string of device types
address (str): string of the IP address
return:
str : string of the device type to use with NetMiko
"""
if device_type is None:
raise ValueError(
f"You did not enter a device type to search for at address {address}. Try again"
)
if device_type is not None:
if isinstance(device_type, str):
if device_type == "ios":
return ("cisco_ios", "cisco_ios")
elif device_type == "nxos_ssh":
return ("cisco_nxos_ssh", "cisco_nxos")
elif device_type == "iosxr":
return ("cisco_xr", "cisco_xr")
elif device_type == "linux":
return ("linux_ssh", "linux")
else:
raise ValueError(
f"Device Type of {device_type} is not known. Try again."
)
else:
raise TypeError(
f"You did not enter a string for the device type at address {address}. Try again"
)
def device_info_getter(
address=None,
username=None,
password=<PASSWORD>,
device_type=None,
enable_password_needed=False,
enable_password=None,
port_to_use=22,
):
"""
Will connect to a device using netmiko and pull information from the device
and format it using TEXTFSM settings into JSON for output later
Args:
address (str) : address of the device to get info from
username (str) : username of the device
password (str) : <PASSWORD>
device_type (str) : device type of the device in either napalm format or netmiko
enable_password_needed (bool) : boolean on if the enable password is needed or not
enable_password (str) : string of the enable password
port_to_use (int) : if the default ssh port is different
REturns:
dict : dict of all info pulled from the device in JSON format
"""
if address is None:
raise ValueError(f"You did not tell me what to connect to Fix This")
valid_address = check_address(address)
valid_username = check_username(username, valid_address)
valid_password = check_password(password, valid_address)
valid_device_type, valid_textfsm_string = check_device_type(
device_type, valid_address
)
if enable_password_needed:
valid_enable_password = check_enable_password(enable_password, valid_address)
if port_to_use != 22:
if isinstance(port_to_use, int):
if port_to_use > 0 and port_to_use < 65535:
connect_port = port_to_use
else:
print(f"You gave me a non-valid port. Connecting on port 22.")
connect_port = 22
else:
print(f"You gave me a non-valid port. Connecting on port 22.")
connect_port = 22
else:
connect_port = 22
if enable_password_needed:
device_parameters = {
"device_type": valid_device_type,
"host": valid_address,
"username": valid_username,
"password": <PASSWORD>,
"port": connect_port,
"secret": valid_enable_password,
}
else:
device_parameters = {
"device_type": valid_device_type,
"host": valid_address,
"username": valid_username,
"password": <PASSWORD>,
"port": connect_port,
}
output_dict = {}
print(
f"Attempting connection to {valid_address} to get device specific information"
)
try:
device_connection = netmiko.ConnectHandler(**device_parameters)
except netmiko.ssh_exception.NetmikoTimeoutException:
output_dict["ERROR"] = {
"NetmikoTimeoutException": f"Device Connection Timed Out for {valid_address}"
}
return output_dict
except netmiko.ssh_exception.NetmikoAuthenticationException:
output_dict["ERROR"] = {
"NetmikoAuthenticationException": f"Authentication failed for {valid_address}"
}
return output_dict
except Exception as ex:
print(ex)
raise
command_dict = find_commands(valid_textfsm_string)
print(f"Sending commands to {valid_address} for device specific information")
for command_key, command_value in command_dict.items():
try:
output_string = device_connection.send_command(
command_value, use_textfsm=True
)
except textfsm.parser.TextFSMError:
output_string = "% Invalid input detected "
except OSError:
output_string = "% Invalid input detected "
except netmiko.ssh_exception.NetmikoTimeoutException:
output_string = "Timed-out reading channel, data not available."
except Exception as ex:
print(ex)
output_string = "% Invalid input detected "
raise
if (
"% Invalid input detected " in output_string
or "% Incomplete command" in output_string
):
continue
output_dict[command_key] = output_string
device_connection.disconnect()
return output_dict
def find_commands(device_type):
"""
This will read through the list of available commands in the NTC templates and will return a list of comands available for the device type
Args:
device_type (str) : string of the device_type to look for in the NTC templates
Return:
dict : dict of commands for the device type where key is the command name with underscores and value is the command
"""
if not isinstance(device_type, str):
raise TypeError(
f"Device Type should be a string. Not {type(device_type).__name__}"
)
commands_found = {}
print(f"Grabbing all commands for device type {device_type}")
ntc_template_path = None
if "NTC_Templates" in os.listdir(os.getcwd()):
ntc_template_path = (
f"{os.getcwd()}/NTC_Templates/ntc-templates/ntc_templates/templates/"
)
else:
path = "../"
while ntc_template_path is None:
if "NTC_Templates" in os.listdir(path):
ntc_template_path = (
f"{path}/NTC_Templates/ntc-templates/ntc_templates/templates/"
)
path += "../"
os.environ["NTC_TEMPLATES_DIR"] = ntc_template_path
for template_name in os.listdir(ntc_template_path):
if device_type in template_name:
command_name = template_name[len(device_type) + 1 : -8]
command_value = " ".join(command_name.split("_"))
commands_found[command_name] = command_value
return commands_found
if __name__ == "__main__":
start_time = time.time()
cisco_iosxe_no_en = {"192.168.89.254": "ios"}
cisco_iosv_enable = {"192.168.89.253": "ios"}
cisco_iosl2_no_enable = {"192.168.89.252": "ios"}
cisco_iosxe_enable = {"192.168.89.247": "ios"}
cisco_nx0s7 = {"192.168.89.251": "nxos_ssh"}
linux_ubuntu_server = {"192.168.89.80": "linux"}
username = "jmctsm"
password = "<PASSWORD>"
enable_password = None
no_enable_device_list = [
cisco_iosxe_no_en,
cisco_iosl2_no_enable,
cisco_nx0s7,
linux_ubuntu_server,
]
enable_device_list = [
cisco_iosv_enable,
cisco_iosxe_enable,
]
json_input_dict = {}
for device in no_enable_device_list:
for device_ip, device_type in device.items():
device_info = device_info_getter(
address=device_ip,
username=username,
password=password,
device_type=device_type,
enable_password_needed=False,
)
json_input_dict[device_ip] = device_info
enable_password = "<PASSWORD>"
for device in enable_device_list:
for device_ip, device_type in device.items():
device_info = device_info_getter(
address=device_ip,
username=username,
password=password,
device_type=device_type,
enable_password_needed=True,
enable_password=enable_password,
)
json_input_dict[device_ip] = device_info
print(json_input_dict)
json_output = json.dumps(json_input_dict, indent=4)
print("\n\n\n\n")
print(json_output)
with open(f"../../Output/test_output_{time.time()}.txt", "w") as file_output:
file_output.write(json_output)
duration = time.time() - start_time
print(f"Duration to run was {duration}")
```
#### File: NetworkScanner/scan_mods/mp_pinger.py
```python
import ipaddress
from pythonping import ping
import re
import multiprocessing
import time
def ping_address(address):
"""
This will take an IP address in the IP objects and ping it.
It will return either a string that says it timed out or it
return a tuple of the address and response times
This is pseudo private and should be called from the main pinger function.
Tried to optomize so it can be run in a multi-processor environment
Args:
address (str) : ipaddress in string format
Return:
tuple/str : tuple of ip address and response times if the system is up or a TIMEOUT string if not
"""
try:
ipaddress.ip_address(address)
except ValueError:
raise ValueError(f"{address} is not an IPv4 address")
print(f"Pinging {address}", end=" (")
result = ping(str(address), timeout=1, count=3)
timeout_pattern = "Round Trip Times min/avg/max is 1000/1000.0/1000 ms"
timeout_result = re.search(timeout_pattern, str(result), re.IGNORECASE)
active_result = False
if timeout_result:
print("Not Responding)")
return "TIMEOUT"
else:
active_pattern = "Round Trip Times min\/avg\/max is (.*)\/(.*)\/(.*) ms"
active_result = re.search(active_pattern, str(result), re.IGNORECASE)
if active_result:
up_result = (
float(active_result.group(1)),
float(active_result.group(2)),
float(active_result.group(3)),
)
print(f"{up_result[0]} ms, {up_result[1]} ms, {up_result[2]} ms)")
return (address, up_result)
def pinger(addresses):
"""
This will take a list of IP addresses in the IP objects and ping them.
It will return a dictionary of addresses that are reachable along with
the response times for each address.
Args:
addresses (list) : list of IP address strings to ping
Return:
dict : dictionary of IP address strings that are reachable and the
response time of each one
"""
# raise an error is an empty list is passed to the function
if len(addresses) <= 0:
raise ValueError(
"Looks like the network didn't work for getting IPs. Bye Bye!!"
)
if not isinstance(addresses, list):
raise TypeError(
"Looks like a list was not passed to pinger. Please try again."
)
for address in addresses:
try:
ipaddress.ip_address(address)
except ValueError:
raise ValueError(f"{address} is not an IPv4 address")
with multiprocessing.Pool() as pool:
ping_results = pool.map(ping_address, addresses)
active_dict = {}
if len(ping_results) == 0:
raise ValueError("Nothing was alive. Pick a subnet that has something alive")
for item in ping_results:
if item is None:
raise ValueError("The return value was not correct.")
if isinstance(item, str) and item == "TIMEOUT":
continue
if isinstance(item[1], tuple) and len(item[1]) == 3:
active_dict[item[0]] = {"ping_response_time": item[1]}
else:
print(item)
raise ValueError("The return value was not correct.")
if len(active_dict) > 0:
return active_dict
else:
raise Exception("Nothing was alive. Pick a subnet that has something alive")
if __name__ == "__main__":
start_time = time.time()
# test this out with a home network
test_addresses = [
"192.168.89.1",
"192.168.89.2",
"192.168.89.3",
"192.168.89.4",
"192.168.89.5",
"192.168.89.6",
"192.168.89.7",
"192.168.89.8",
"192.168.89.9",
"192.168.89.10",
"192.168.89.11",
"192.168.89.12",
"192.168.89.13",
"192.168.89.14",
"192.168.89.15",
"192.168.89.16",
"192.168.89.17",
"192.168.89.18",
"192.168.89.19",
"192.168.89.20",
"192.168.89.21",
"192.168.89.22",
"192.168.89.23",
"192.168.89.24",
"192.168.89.25",
"192.168.89.26",
"192.168.89.27",
"192.168.89.28",
"192.168.89.29",
"192.168.89.30",
"192.168.89.31",
"192.168.89.32",
"192.168.89.33",
"192.168.89.34",
"192.168.89.35",
"192.168.89.36",
"192.168.89.37",
"192.168.89.38",
"192.168.89.39",
"192.168.89.40",
"192.168.89.41",
"192.168.89.42",
"192.168.89.43",
"192.168.89.44",
"192.168.89.45",
"192.168.89.46",
"192.168.89.47",
"192.168.89.48",
"192.168.89.49",
"192.168.89.50",
"192.168.89.51",
"192.168.89.52",
"192.168.89.53",
"192.168.89.54",
"192.168.89.55",
"192.168.89.56",
"192.168.89.57",
"192.168.89.58",
"192.168.89.59",
"192.168.89.60",
"192.168.89.61",
"192.168.89.62",
"192.168.89.63",
"192.168.89.64",
"192.168.89.65",
"192.168.89.66",
"192.168.89.67",
"192.168.89.68",
"192.168.89.69",
"192.168.89.70",
"192.168.89.71",
"192.168.89.72",
"192.168.89.73",
"192.168.89.74",
"192.168.89.75",
"192.168.89.76",
"192.168.89.77",
"192.168.89.78",
"192.168.89.79",
"192.168.89.80",
"192.168.89.81",
"192.168.89.82",
"192.168.89.83",
"192.168.89.84",
"192.168.89.85",
"192.168.89.86",
"192.168.89.87",
"192.168.89.88",
"192.168.89.89",
"192.168.89.90",
"192.168.89.91",
"192.168.89.92",
"192.168.89.93",
"192.168.89.94",
"192.168.89.95",
"192.168.89.96",
"192.168.89.97",
"192.168.89.98",
"192.168.89.99",
"192.168.89.100",
"192.168.89.101",
"192.168.89.102",
"192.168.89.103",
"192.168.89.104",
"192.168.89.105",
"192.168.89.106",
"192.168.89.107",
"192.168.89.108",
"192.168.89.109",
"192.168.89.110",
"192.168.89.111",
"192.168.89.112",
"192.168.89.113",
"192.168.89.114",
"192.168.89.115",
"192.168.89.116",
"192.168.89.117",
"192.168.89.118",
"192.168.89.119",
"192.168.89.120",
"192.168.89.121",
"192.168.89.122",
"192.168.89.123",
"192.168.89.124",
"192.168.89.125",
"192.168.89.126",
"192.168.89.127",
"192.168.89.128",
"192.168.89.129",
"192.168.89.130",
"192.168.89.131",
"192.168.89.132",
"192.168.89.133",
"192.168.89.134",
"192.168.89.135",
"192.168.89.136",
"192.168.89.137",
"192.168.89.138",
"192.168.89.139",
"192.168.89.140",
"192.168.89.141",
"192.168.89.142",
"192.168.89.143",
"192.168.89.144",
"192.168.89.145",
"192.168.89.146",
"192.168.89.147",
"192.168.89.148",
"192.168.89.149",
"192.168.89.150",
"192.168.89.151",
"192.168.89.152",
"192.168.89.153",
"192.168.89.154",
"192.168.89.155",
"192.168.89.156",
"192.168.89.157",
"192.168.89.158",
"192.168.89.159",
"192.168.89.160",
"192.168.89.161",
"192.168.89.162",
"192.168.89.163",
"192.168.89.164",
"192.168.89.165",
"192.168.89.166",
"192.168.89.167",
"192.168.89.168",
"192.168.89.169",
"192.168.89.170",
"192.168.89.171",
"192.168.89.172",
"192.168.89.173",
"192.168.89.174",
"192.168.89.175",
"192.168.89.176",
"192.168.89.177",
"192.168.89.178",
"192.168.89.179",
"192.168.89.180",
"192.168.89.181",
"192.168.89.182",
"192.168.89.183",
"192.168.89.184",
"192.168.89.185",
"192.168.89.186",
"192.168.89.187",
"192.168.89.188",
"192.168.89.189",
"192.168.89.190",
"192.168.89.191",
"192.168.89.192",
"192.168.89.193",
"192.168.89.194",
"192.168.89.195",
"192.168.89.196",
"192.168.89.197",
"192.168.89.198",
"192.168.89.199",
"192.168.89.200",
"192.168.89.201",
"192.168.89.202",
"192.168.89.203",
"192.168.89.204",
"192.168.89.205",
"192.168.89.206",
"192.168.89.207",
"192.168.89.208",
"192.168.89.209",
"192.168.89.210",
"192.168.89.211",
"192.168.89.212",
"192.168.89.213",
"192.168.89.214",
"192.168.89.215",
"192.168.89.216",
"192.168.89.217",
"192.168.89.218",
"192.168.89.219",
"192.168.89.220",
"192.168.89.221",
"192.168.89.222",
"192.168.89.223",
"192.168.89.224",
"192.168.89.225",
"192.168.89.226",
"192.168.89.227",
"192.168.89.228",
"192.168.89.229",
"192.168.89.230",
"192.168.89.231",
"192.168.89.232",
"192.168.89.233",
"192.168.89.234",
"192.168.89.235",
"192.168.89.236",
"192.168.89.237",
"192.168.89.238",
"192.168.89.239",
"192.168.89.240",
"192.168.89.241",
"192.168.89.242",
"192.168.89.243",
"192.168.89.244",
"192.168.89.245",
"192.168.89.246",
"192.168.89.247",
"192.168.89.248",
"192.168.89.249",
"192.168.89.250",
"192.168.89.251",
"192.168.89.252",
"192.168.89.253",
"192.168.89.254",
]
# host list to pass to the pinger function
# iterate through all of the hosts within the address list and append that to the list of
# hosts to ping
active_hosts = pinger(test_addresses)
if len(active_hosts) > 0:
print(active_hosts)
duration = time.time() - start_time
print(f"Total time was {duration} seconds")
```
#### File: scan_mods/Not_Used/port_scanner.py
```python
import ipaddress
import socket
import time
import os
import sys
currentdir = os.path.dirname(os.path.realpath(__file__))
parentdir = os.path.dirname(currentdir)
sys.path.append(parentdir)
# Protocol Scanner imports
from scan_mods.protocol_scanners.http_scanner import http_scanner
from scan_mods.protocol_scanners.https_scanner import https_scanner
from scan_mods.protocol_scanners.dns_scanner import udp_dns_scanner
from scan_mods.protocol_scanners.dns_scanner import tcp_dns_scanner
"""
TODO: Might want to make different scanners for each of the different types of ports to make it easier to really get a good read on what they are
This will take a list of IPV4 addresses and do a basic port scan on them. The ports to see if they are open are below. If they are open, the header will
be grabbed and returned to the calling function.
Ports to scan
20: File Transfer Protocol (FTP) data channel.
21: File Transfer Protocol (FTP) control channel. The commands port.
22: Secure Shell (SSH). Remote management protocol OS.
23: Telnet, or terminal network, for protocol implementation text interface across a network.
25: Simple Mail Transfer Protocol (SMTP).
37: Time protocol.
43: WHOIS. Protocol for obtaining registration of ownership details for IP addresses and domain names.
53: Domain Name System (DNS).
67: Dynamic Host Configuration Protocol (DHCP). Dynamic IP.
69: Trivial File Transfer Protocol (TFTP).
79: Finger protocol.
80: Hypertext Transfer Protocol (HTTP).
88: Kerberos.
109: Post Office Protocol v2 (POP2). Protocol for receiving emails, version two.
110: Post Office Protocol v3 (POP3). Protocol for receiving emails, version three.
115: Secure File Transfer Protocol (SFTP). Protocol for secure transmission of data.
118: SQL Services.
123: Network Time Protocol (NTP)
143: Internet Message Access Protocol (IMAP). Protocol at the application level, for accessing emails.
161: Simple Network Management Protocol (SNMP). Protocol for device management.
162: Simple Network Management Protocol (SNMP) Trap.
179: Border Gateway Protocol (BGP).
194: Internet Relay Chat (IRC).
389: Lightweight Directory Access Protocol (LDAP). Application layer protocol.
443: Hypertext Transfer Protocol Secure (HTTPS). HTTP protocol, with support for encryption.
464: Kerberos reset password.
465: Simple Mail Transfer Protocol over SSL (SMTPS).
514: Syslog.
515: Line Printer Daemon (LPD). Protocol for remote printing.
530: Remote Procedure Call (RPC).
543: Kerberos login.
544: Real Time Stream Control Protocol (RTSP).
547: DHCPv6 server.
993: Internet Message Access Protocol over SSL (IMAPS). IMAP protocol with support for SSL encryption.
995: Post Office Protocol 3 over SSL (POP3S). POP3 protocol with support for SSL encryption.
1080: SOCKet Secure (SOCKS). Protocol for receiving secure and anonymous access.
3128: Proxy. Port often used for proxies.
3306: MySQL, for MySQL database.
3389: Remote Desktop Protocol (RDP), for Windows.
5432: Postgres Database (PostgreSQL).
5900: Virtual Network Computing (VNC). For desktop remote access.
5938: TeamViewer, for the remote-control system, to facilitate data computer and data exchange.
8080: HTTP/Web. An alternate HTTP protocol port.
"""
def __validate_for_scanners(address, port, domain):
"""
Validates that the address, port, and domain are of the correct types
Pulled here since the code was the same
Args:
address (str) : string type address
port (int) : port number that should be an int
domain (str) : domain name that should be a string
"""
if not isinstance(address, str):
raise TypeError(f"{address} is not of type str")
if not isinstance(port, int):
raise TypeError(f"{port} is not of type int")
if domain is not None and not isinstance(domain, str):
raise TypeError(f"{domain} is not a string")
return True
def tcp_scanner(address, port, domain_name=None):
"""
Scans the TCP port and returns the string to the main function
Args:
address (str) : string of the IPv4 address that is passed from the calling function
port (int) : int of the port to connect to
domain_name (str): optional variable to hold for domain_name testing in things like DNS
Return:
string of either the error message or the header from the port
"""
__validate_for_scanners(address, port, domain_name)
print(f"Scanning TCP port {port}")
if port == 53:
if domain_name is None:
scan_data = tcp_dns_scanner(dns_server=address)
else:
scan_data = tcp_dns_scanner(dns_server=address, domainname=domain_name)
print(f"TCP {port} = {scan_data}")
return scan_data
elif port == 80:
scan_data = http_scanner(address)
print(f"TCP {port} = {scan_data.strip()}")
return scan_data
elif port == 443:
scan_data = https_scanner(address)
print(f"TCP {port} = {scan_data.strip()}")
return scan_data
elif port == 8080:
scan_data = http_scanner(address)
print(f"TCP {port} = {scan_data.strip()}")
return scan_data
scan_socket = socket.socket()
try:
scan_socket.connect((address, port))
except ConnectionRefusedError:
output = "ConnectionRefusedError -- No connection could be made because the target machine actively refused it"
print(f"TCP {port} = {output}")
scan_socket.close()
return output
except TimeoutError:
output = f"TimeoutError -- A connection attempt failed because the connected party did not properly respond after a period of time"
output += ", or established connection failed because connected host has failed to respond"
print(f"TCP {port} = {output}")
scan_socket.close()
return output
MESSAGE = b"Hello, World!"
scan_socket.send(MESSAGE)
try:
scan_data = scan_socket.recv(1024).decode()
except UnicodeDecodeError:
scan_data = "UnicodeDecodeError -- 'utf-8' codec can't decode byte 0xff in position 0: invalid start byte"
print(f"TCP {port} = {scan_data}")
scan_socket.close()
return scan_data
else:
print(f"TCP {port} = {scan_data.strip()}")
scan_socket.close()
return scan_data.strip()
return None
def udp_scanner(address, port, domain_name=None):
"""
Scans the UDP port and returns the string to the main function
Args:
address (str) : string of the IPv4 address that is passed from the calling function
port (int) : int of the port to connect to
domain_name (str): optional variable to hold for domain_name testing in things like DNS
Return:
string of either the error message or the header from the port
"""
__validate_for_scanners(address, port, domain_name)
print(f"Scanning UDP port {port}")
if port == 53:
if domain_name is None:
scan_data = udp_dns_scanner(dns_server=address)
else:
scan_data = udp_dns_scanner(dns_server=address, domainname=domain_name)
print(f"UDP {port} = {scan_data}")
return scan_data
try:
MESSAGE = b"Hello, World!"
# socket.AF_INET is for the internet protocol and socket.sock_dgram is for UDP
scan_socket = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
scan_socket.sendto(MESSAGE, (address, port))
scan_socket.settimeout(2)
scan_data, addr = scan_socket.recvfrom(1024) # buffer size is 1024 bytes
print(f"UDP {port} = {scan_data.strip()}")
scan_socket.close()
return scan_data.strip()
except socket.timeout:
scan_data = f"Socket Timed Out"
print(f"UDP {port} = {scan_data}")
scan_socket.close()
return scan_data
return None
def port_scanner(address, domain_name=None):
"""
This will scan an address for standard ports to see what is open. If it is open, it will then grab a header if applicable.
It returns a dictionary of ports and headers to the calling function
Args:
address (IPv4 address object) : IPv4 address object to scan
domain_name (str) : string of the domain name to test with other places like DNS
Return:
dict : dictionary of ports and headers that are open on the box
None : if no ports are open or responding
"""
TCP_PORTS = (53,)
UDP_PORTS = (53,)
# check to make sure that the address is correct first
if not isinstance(address, ipaddress.IPv4Address):
raise TypeError(f"{address} since it is not an IPv4Address")
if domain_name is not None and not isinstance(domain_name, str):
raise TypeError(f"{domain_name} is not a string")
return_dict = {
"TCP": {},
"UDP": {},
}
# Scan the TCP Ports
print(f"SCANNING TCP PORTS for {address}...")
for port in TCP_PORTS:
TCP_key = str(port)
if domain_name is None:
scan_result = tcp_scanner(str(address), port)
else:
scan_result = tcp_scanner(str(address), port, domain_name)
if len(scan_result) < 1:
scan_result = "Nothing returned from the server"
return_dict["TCP"][TCP_key] = scan_result
# Scan the UDP Ports
print(f"SCANNING UDP PORTS for {address}...")
for port in UDP_PORTS:
UDP_key = str(port)
if domain_name is None:
scan_result = udp_scanner(str(address), port)
else:
scan_result = udp_scanner(str(address), port, domain_name)
if len(scan_result) < 1:
scan_result = "***Nothing returned from the server***"
return_dict["UDP"][TCP_key] = scan_result
return return_dict
if __name__ == "__main__":
start_time = time.time()
# calling function for example
address_list = [
ipaddress.ip_address("192.168.1.65"),
ipaddress.ip_address("10.0.1.254"),
ipaddress.ip_address("192.168.89.80"),
]
test_domain_names = [
"test.local",
"www.google.com",
"google.com",
"test.test",
None,
]
dict_of_ports = {}
for address in address_list:
for test_domain_name in test_domain_names:
dict_of_ports[address] = port_scanner(address, test_domain_name)
print(dict_of_ports)
duration = time.time() - start_time
print(f"Total time was {duration} seconds")
```
#### File: tests/test_scan_mod/test_device_class.py
```python
from typing import Type
import unittest
import ipaddress
import os
import sys
import json
from unittest.mock import patch
currentdir = os.path.dirname(os.path.realpath(__file__))
parentdir = os.path.dirname(currentdir)
grandparentdir = os.path.dirname(parentdir)
sys.path.append(grandparentdir)
from scan_mods.device_class import FoundDevice
class TestFoundDevice(unittest.TestCase):
"""
Tests that the device class works for FoundDevice
"""
test_ip01 = "192.168.1.65"
test_ip02 = "192.168.1.65"
test_ip03 = ipaddress.IPv4Address("192.168.1.65")
test_ip04 = ipaddress.IPv4Address("192.168.1.68")
test_time01 = (1.1, 1.35, 1.82)
test_time02 = (1.1, 1.35, 1.82)
test_ports01 = {
"TCP": {
"20": {
"ERROR": "ConnectionRefusedError -- No connection could be made because the target machine actively refused it"
},
"21": {"Return Information": "220 (vsFTPd 3.0.3)"},
"22": {"Return Information": "SSH-2.0-OpenSSH_8.2p1 Ubuntu-4ubuntu0.1"},
"23": {
"ERROR": "ConnectionRefusedError -- No connection could be made because the target machine actively refused it"
},
"53": {
"Domain_Name": "test.local",
"Server": "192.168.89.80",
"@": "[@ 604800 IN SOA @ root 2 604800 86400 2419200 604800][@ 604800 IN NS ns][@ 604800 IN A 192.168.89.80][@ 604800 IN AAAA ::1]",
"ns": "[ns 604800 IN A 192.168.89.80]",
"www": "[www 604800 IN A 192.168.89.80]",
},
"80": {
"Date": "Tue, 09 Mar 2021 13:09:42 GMT",
"Server": "Apache/2.4.41 (Ubuntu)",
"Last-Modified": "Tue, 23 Feb 2021 19:42:50 GMT",
"ETag": '"2ab2-5bc061fadc9e7-gzip"',
"Accept-Ranges": "bytes",
"Vary": "Accept-Encoding",
"Content-Encoding": "gzip",
"Content-Length": "3147",
"Keep-Alive": "timeout=5, max=100",
"Connection": "Keep-Alive",
"Content-Type": "text/html",
},
},
"UDP": {
"43": {"ERROR": "Socket Timed Out"},
"53": {
"Name": "test.local.",
"Record Type": "SOA",
"Record Class": "IN",
"nameserver": "192.168.89.80",
"port": "53",
"Answer": "test.local. 604800 IN SOA test.local. root.test.local. 2 604800 86400 2419200 604800",
"Canonical Name": "test.local.",
"Minimum TTL": "604800",
"CNAMES": [],
"DNS Record Set": "test.local. 604800 IN SOA test.local. root.test.local. 2 604800 86400 2419200 604800",
"expiration": "1615900227.7461846",
},
},
}
test_closed_TCP_ports01 = {
"20": {
"ERROR": "ConnectionRefusedError -- No connection could be made because the target machine actively refused it"
},
"23": {
"ERROR": "ConnectionRefusedError -- No connection could be made because the target machine actively refused it"
},
}
test_closed_UDP_ports01 = {"43": {"ERROR": "Socket Timed Out"}}
test_open_UDP_ports01 = {
"53": {
"Name": "test.local.",
"Record Type": "SOA",
"Record Class": "IN",
"nameserver": "192.168.89.80",
"port": "53",
"Answer": "test.local. 604800 IN SOA test.local. root.test.local. 2 604800 86400 2419200 604800",
"Canonical Name": "test.local.",
"Minimum TTL": "604800",
"CNAMES": [],
"DNS Record Set": "test.local. 604800 IN SOA test.local. root.test.local. 2 604800 86400 2419200 604800",
"expiration": "1615900227.7461846",
},
}
test_open_TCP_ports01 = {
"21": {"Return Information": "220 (vsFTPd 3.0.3)"},
"22": {"Return Information": "SSH-2.0-OpenSSH_8.2p1 Ubuntu-4ubuntu0.1"},
"53": {
"Domain_Name": "test.local",
"Server": "192.168.89.80",
"@": "[@ 604800 IN SOA @ root 2 604800 86400 2419200 604800][@ 604800 IN NS ns][@ 604800 IN A 192.168.89.80][@ 604800 IN AAAA ::1]",
"ns": "[ns 604800 IN A 192.168.89.80]",
"www": "[www 604800 IN A 192.168.89.80]",
},
"80": {
"Date": "Tue, 09 Mar 2021 13:09:42 GMT",
"Server": "Apache/2.4.41 (Ubuntu)",
"Last-Modified": "Tue, 23 Feb 2021 19:42:50 GMT",
"ETag": '"2ab2-5bc061fadc9e7-gzip"',
"Accept-Ranges": "bytes",
"Vary": "Accept-Encoding",
"Content-Encoding": "gzip",
"Content-Length": "3147",
"Keep-Alive": "timeout=5, max=100",
"Connection": "Keep-Alive",
"Content-Type": "text/html",
},
}
test_ports02 = {
"TCP": {
"37": {
"ERROR": "ConnectionRefusedError -- No connection could be made because the target machine actively refused it"
},
},
"UDP": {
"69": {"ERROR": "Socket Timed Out"},
},
}
test_ports03 = {
"TCP": {
"20": {
"ERROR": "ConnectionRefusedError -- No connection could be made because the target machine actively refused it"
},
"21": {"Return Information": "220 (vsFTPd 3.0.3)"},
"22": {"Return Information": "SSH-2.0-OpenSSH_8.2p1 Ubuntu-4ubuntu0.1"},
"23": {
"ERROR": "ConnectionRefusedError -- No connection could be made because the target machine actively refused it"
},
"37": {
"ERROR": "ConnectionRefusedError -- No connection could be made because the target machine actively refused it"
},
"53": {
"Domain_Name": "test.local",
"Server": "192.168.89.80",
"@": "[@ 604800 IN SOA @ root 2 604800 86400 2419200 604800][@ 604800 IN NS ns][@ 604800 IN A 192.168.89.80][@ 604800 IN AAAA ::1]",
"ns": "[ns 604800 IN A 192.168.89.80]",
"www": "[www 604800 IN A 192.168.89.80]",
},
"80": {
"Date": "Tue, 09 Mar 2021 13:09:42 GMT",
"Server": "Apache/2.4.41 (Ubuntu)",
"Last-Modified": "Tue, 23 Feb 2021 19:42:50 GMT",
"ETag": '"2ab2-5bc061fadc9e7-gzip"',
"Accept-Ranges": "bytes",
"Vary": "Accept-Encoding",
"Content-Encoding": "gzip",
"Content-Length": "3147",
"Keep-Alive": "timeout=5, max=100",
"Connection": "Keep-Alive",
"Content-Type": "text/html",
},
},
"UDP": {
"43": {"ERROR": "Socket Timed Out"},
"53": {
"Name": "test.local.",
"Record Type": "SOA",
"Record Class": "IN",
"nameserver": "192.168.89.80",
"port": "53",
"Answer": "test.local. 604800 IN SOA test.local. root.test.local. 2 604800 86400 2419200 604800",
"Canonical Name": "test.local.",
"Minimum TTL": "604800",
"CNAMES": [],
"DNS Record Set": "test.local. 604800 IN SOA test.local. root.test.local. 2 604800 86400 2419200 604800",
"expiration": "1615900227.7461846",
},
"69": {"ERROR": "Socket Timed Out"},
},
}
def test_001_class_init_pass(self):
"""
Tests that a device can be created passing only the init variables
"""
print("\nTest 001 - Start testing that class init works...")
test_class = FoundDevice(self.test_ip01, self.test_time01)
self.assertIsInstance(test_class, FoundDevice)
self.assertIsInstance(test_class._IP, str)
self.assertEqual(test_class._IP, self.test_ip01)
self.assertIsInstance(test_class._response_time, tuple)
self.assertEqual(test_class._response_time, self.test_time01)
for test_item in [
test_class._all_ports,
test_class._username,
test_class._password,
test_class._enable_password,
test_class._domain_name,
test_class.device_info,
]:
self.assertIsNone(test_item)
for test_item in [
test_class._open_tcp_ports,
test_class._open_udp_ports,
test_class._closed_tcp_ports,
test_class._closed_udp_ports,
]:
self.assertEqual(len(test_item), 0)
self.assertIsInstance(test_item, dict)
self.assertEqual(test_item, {})
self.assertFalse(test_class._use_enable)
test_class = FoundDevice(
self.test_ip01,
self.test_time01,
"jmctsm",
"ciscocisco",
use_enable=True,
enable_password="<PASSWORD>",
domain_name="test.local",
)
self.assertIsInstance(test_class, FoundDevice)
self.assertIsInstance(test_class._IP, str)
self.assertEqual(test_class._IP, self.test_ip01)
self.assertIsInstance(test_class._response_time, tuple)
self.assertEqual(test_class._response_time, self.test_time01)
for test_item in [
test_class._all_ports,
test_class.device_info,
]:
self.assertIsNone(test_item)
for test_item in [
test_class._open_tcp_ports,
test_class._open_udp_ports,
test_class._closed_tcp_ports,
test_class._closed_udp_ports,
]:
self.assertEqual(len(test_item), 0)
self.assertIsInstance(test_item, dict)
self.assertEqual(test_item, {})
self.assertEqual(test_class._username, "jmctsm")
self.assertEqual(test_class._password, "<PASSWORD>")
self.assertTrue(test_class._use_enable)
self.assertEqual(test_class._enable_password, "<PASSWORD>")
self.assertEqual(test_class._domain_name, "test.local")
print("Test 001 - Finish testing that class init works\n")
def test_002_init_raises_correct_errors(self):
"""
Test that the init function raises the correct errors
"""
print("\nTest 002 - Starting test that init raises the correct errors...")
test_list = [
(1, self.test_time01, TypeError),
("1", self.test_time01, ValueError),
("192.168.0.254", [1.1, 1.1, 1.1], TypeError),
("192.168.0.254", (1.1, 1.1), ValueError),
("192.168.0.254", [1.1, 1.1, "a"], TypeError),
]
for test_tuple in test_list:
with self.assertRaises(test_tuple[2]):
test_class = FoundDevice(test_tuple[0], test_tuple[1])
print("Test 002 - Finished test that init raises the correct errrors\n")
def test_003_IP_getter_works(self):
"""
Tests that the IP getter for the class is working
"""
print("\nTest 003 - Start testing that class IP getter works")
test_class = FoundDevice(self.test_ip01, self.test_time01)
self.assertEqual(test_class.IP, self.test_ip01)
print("Test 003 - Finish testing that class IP getter works works\n")
def test_004_set_IP_directly(self):
"""
Tests that you cannot set the IP directly
"""
print("\nTest 004 - Start testing that IP cannot be set be directly")
test_class = FoundDevice(self.test_ip01, self.test_time01)
with self.assertRaises(AttributeError):
test_class.IP = 4
print(
"Test 004 - Finish testing that response_time cannot be set be directly\n"
)
def test_005_response_time_getter_works(self):
"""
Tests that the response time getter for the class is working
"""
print("\nTest 005 - Start testing that class response time getter works")
test_class = FoundDevice(self.test_ip01, self.test_time01)
self.assertEqual(test_class.response_time, self.test_time01)
print("Test 005 - Finish testing that class response time getter works\n")
def test_006_set_response_time_directly(self):
"""
Tests that you cannot set the response_time directly
"""
print("\nTest 006 - Start testing that response_time cannot be set be directly")
test_class = FoundDevice(self.test_ip01, self.test_time01)
with self.assertRaises(AttributeError):
test_class.response_time = 4
print(
"Test 006 - Finish testing that response_time cannot be set be directly\n"
)
def test_007_username_getter_works(self):
"""
Tests that the username getter for the class is working
"""
print("\nTest 007 - Start testing that class username getter works")
test_class = FoundDevice(self.test_ip01, self.test_time01)
self.assertEqual(test_class.username, "Username has not been set yet")
test_class._username = "jmctsm"
self.assertEqual(test_class.username, "jmctsm")
print("Test 007 - Finish testing that class username getter works\n")
def test_008_username_directly(self):
"""
Tests that you cannot set the username directly
"""
print("\nTest 008 - Start testing that username cannot be set be directly")
test_class = FoundDevice(self.test_ip01, self.test_time01)
with self.assertRaises(AttributeError):
test_class.username = 4
print("Test 008 - Finish testing that username cannot be set be directly\n")
def test_009_password_getter_works(self):
"""
Tests that the password getter for the class is working
"""
print("\nTest 009 - Start testing that class password getter works")
test_class = FoundDevice(self.test_ip01, self.test_time01)
self.assertEqual(test_class.password, "Password for device has not been given")
test_class._password = "<PASSWORD>"
self.assertEqual(test_class.password, "Password for device has been given")
print("Test 009 - Finish testing that class password getter works\n")
def test_010_password_directly(self):
"""
Tests that you cannot set the password directly
"""
print("\nTest 010 - Start testing that password cannot be set be directly")
test_class = FoundDevice(self.test_ip01, self.test_time01)
with self.assertRaises(AttributeError):
test_class.password = 4
print("Test 010 - Finish testing that password cannot be set be directly\n")
def test_011_enable_password_getter_works(self):
"""
Tests that the enable password getter for the class is working
"""
print("\nTest 011 - Start testing that class enable password getter works")
test_class = FoundDevice(self.test_ip01, self.test_time01)
self.assertEqual(
test_class.enable_password, "Not using Enable password for this device"
)
test_class._use_enable = True
self.assertEqual(
test_class.enable_password, "Enable password for device has not been given"
)
test_class._enable_password = "<PASSWORD>"
self.assertEqual(
test_class.enable_password, "Enable password for device has been given"
)
print("Test 009 - Finish testing that class enable password getter works\n")
def test_012_enable_password_directly(self):
"""
Tests that you cannot set the password directly
"""
print(
"\nTest 012 - Start testing that enable password cannot be set be directly"
)
test_class = FoundDevice(self.test_ip01, self.test_time01)
with self.assertRaises(AttributeError):
test_class.enable_password = 4
print(
"Test 012 - Finish testing that enable password cannot be set be directly\n"
)
def test_013_domain_name_getter_works(self):
"""
Tests that the domain name getter for the class is working
"""
print("\nTest 013 - Start testing that class domain name getter works")
test_class = FoundDevice(self.test_ip01, self.test_time01)
self.assertEqual(test_class.domain_name, "Domain name has not been set yet")
test_class._domain_name = "test.local"
self.assertEqual(test_class.domain_name, "test.local")
print("Test 013 - Finish testing that class domain name getter works\n")
def test_014_domain_name_directly(self):
"""
Tests that you cannot set the domain name directly
"""
print("\nTest 014 - Start testing that domain name cannot be set be directly")
test_class = FoundDevice(self.test_ip01, self.test_time01)
with self.assertRaises(AttributeError):
test_class.domain_name = 4
print("Test 014 - Finish testing that domain name cannot be set be directly\n")
def test_015_all_ports_getter_works(self):
"""
Tests that the all_ports getter for the class is working
"""
print("\nTest 015 - Start testing that class ports getter works")
test_class = FoundDevice(self.test_ip01, self.test_time01)
self.assertIsNone(test_class.all_ports)
test_class._all_ports = {"a": {22: "Open"}}
self.assertEqual(test_class.all_ports, {"a": {22: "Open"}})
print("Test 015 - Finish testing that class ports getter works\n")
def test_016_get_ports_works(self):
"""
Tests that method get_ports works
"""
print("\nTest 016 - Start testing that class get_ports works")
with patch("scan_mods.device_class.port_scanner") as mock_port_scanner:
mock_port_scanner.return_value = self.test_ports01
test_class = FoundDevice(self.test_ip01, self.test_time01)
test_class.get_ports()
self.assertIsInstance(test_class.all_ports, dict)
self.assertEqual(test_class.all_ports, self.test_ports01)
print("Test 016 - Finish testing that class get_ports works\n")
def test_017_all_ports_setter(self):
"""
Tests that the all ports setter correctly functions
"""
print("\nTest 017 - Start testing that class all_ports setter works")
test_class = FoundDevice(self.test_ip01, self.test_time01)
test_class.all_ports = {
"TCP": {
"20": {
"ERROR": "ConnectionRefusedError -- No connection could be made because the target machine actively refused it"
},
"21": {"Return Information": "220 (vsFTPd 3.0.3)"},
"22": {"Return Information": "SSH-2.0-OpenSSH_8.2p1 Ubuntu-4ubuntu0.1"},
"23": {
"ERROR": "ConnectionRefusedError -- No connection could be made because the target machine actively refused it"
},
"53": {
"Domain_Name": "test.local",
"Server": "192.168.89.80",
"@": "[@ 604800 IN SOA @ root 2 604800 86400 2419200 604800][@ 604800 IN NS ns][@ 604800 IN A 192.168.89.80][@ 604800 IN AAAA ::1]",
"ns": "[ns 604800 IN A 192.168.89.80]",
"www": "[www 604800 IN A 192.168.89.80]",
},
"80": {
"Date": "Tue, 09 Mar 2021 13:09:42 GMT",
"Server": "Apache/2.4.41 (Ubuntu)",
"Last-Modified": "Tue, 23 Feb 2021 19:42:50 GMT",
"ETag": '"2ab2-5bc061fadc9e7-gzip"',
"Accept-Ranges": "bytes",
"Vary": "Accept-Encoding",
"Content-Encoding": "gzip",
"Content-Length": "3147",
"Keep-Alive": "timeout=5, max=100",
"Connection": "Keep-Alive",
"Content-Type": "text/html",
},
},
"UDP": {
"43": {"ERROR": "Socket Timed Out"},
"53": {
"Name": "test.local.",
"Record Type": "SOA",
"Record Class": "IN",
"nameserver": "192.168.89.80",
"port": "53",
"Answer": "test.local. 604800 IN SOA test.local. root.test.local. 2 604800 86400 2419200 604800",
"Canonical Name": "test.local.",
"Minimum TTL": "604800",
"CNAMES": [],
"DNS Record Set": "test.local. 604800 IN SOA test.local. root.test.local. 2 604800 86400 2419200 604800",
"expiration": "1615900227.7461846",
},
},
}
self.assertEqual(test_class._all_ports, self.test_ports01)
self.assertEqual(test_class._closed_tcp_ports, self.test_closed_TCP_ports01)
self.assertEqual(test_class._closed_udp_ports, self.test_closed_UDP_ports01)
self.assertEqual(test_class._open_tcp_ports, self.test_open_TCP_ports01)
self.assertEqual(test_class._open_udp_ports, self.test_open_UDP_ports01)
test_class.all_ports = self.test_ports02
self.assertEqual(test_class._all_ports, self.test_ports03)
print("Test 017 - Finish testing that class all_ports setter works\n")
def test_018_all_ports_errors(self):
"""
Tests that method get_ports produces correct errors
"""
print("\nTest 018 - Start testing that class get_ports produces correct errors")
with self.assertRaises(TypeError):
test_class = FoundDevice(self.test_ip01, self.test_time01)
test_class.all_ports = 1
test_bad_port_list = [
({"1": {}}, KeyError),
({"TCP": 1}, TypeError),
]
for test_tuple in test_bad_port_list:
with self.assertRaises(test_tuple[1]):
test_class = FoundDevice(self.test_ip01, self.test_time01)
test_class.all_ports = test_tuple[0]
test_class = FoundDevice(self.test_ip01, self.test_time01)
test_class.all_ports = self.test_ports01
test_bad_port_list = [
({"1": {}}, KeyError),
({"TCP": 1}, TypeError),
({"UDP": {"69": 1}}, TypeError),
]
for test_tuple in test_bad_port_list:
with self.assertRaises(test_tuple[1]):
test_class = FoundDevice(self.test_ip01, self.test_time01)
test_class.all_ports = test_tuple[0]
print(
"Test 018 - Finish testing that class all_ports produces correct errors\n"
)
def test_019_set_private_closed_open_ports(self):
"""
Tests that set_private_closed_open_ports correctly passes
"""
print(
"\nTest 019 - Start testing that class set_private_closed_open_ports works"
)
test_class = FoundDevice(self.test_ip01, self.test_time01)
test_class._all_ports = self.test_ports01
self.assertEqual(test_class._all_ports, self.test_ports01)
test_class.set_private_closed_open_ports()
self.assertEqual(test_class._closed_tcp_ports, self.test_closed_TCP_ports01)
self.assertEqual(test_class._closed_udp_ports, self.test_closed_UDP_ports01)
self.assertEqual(test_class._open_tcp_ports, self.test_open_TCP_ports01)
self.assertEqual(test_class._open_udp_ports, self.test_open_UDP_ports01)
print(
"Test 019 - Finish testing that class set_private_closed_open_ports works\n"
)
def test_020_all_ports_errors(self):
"""
Tests that method set_private_closed_open_ports produces correct errors
"""
print(
"\nTest 020 - Start testing that class set_private_closed_open_ports produces correct errors"
)
test_bad_list = [
({"TCP": {"22": 1}}, TypeError),
({"UDP": {"22": 1}}, TypeError),
]
for test_tuple in test_bad_list:
with self.assertRaises(test_tuple[1]):
test_class = FoundDevice(self.test_ip01, self.test_time01)
test_class.set_private_closed_open_ports(test_tuple[0])
print(
"Test 020 - Finish testing that class set_private_closed_open_ports produces correct errors\n"
)
def test_021_open_tcp_ports_getter(self):
"""
Tests that the open_tcp_ports getter for the class is working
"""
print("\nTest 021 - Start testing that class open_tcp_ports getter works")
test_class = FoundDevice(self.test_ip01, self.test_time01)
self.assertEqual(test_class.open_tcp_ports, {})
test_class.all_ports = self.test_ports01
self.assertEqual(test_class.open_tcp_ports, self.test_open_TCP_ports01)
print("Test 021 - Finish testing that class open_tcp_ports getter works\n")
def test_022_open_tcp_ports_directly(self):
"""
Tests that you cannot set the open_tcp_ports directly
"""
print(
"\nTest 022 - Start testing that open_tcp_ports cannot be set be directly"
)
test_class = FoundDevice(self.test_ip01, self.test_time01)
with self.assertRaises(AttributeError):
test_class.open_tcp_ports = 4
print(
"Test 022 - Finish testing that open_tcp_ports cannot be set be directly\n"
)
def test_023_open_udp_ports_getter(self):
"""
Tests that the open_udp_ports getter for the class is working
"""
print("\nTest 023 - Start testing that class open_udp_ports getter works")
test_class = FoundDevice(self.test_ip01, self.test_time01)
self.assertEqual(test_class.open_udp_ports, {})
test_class.all_ports = self.test_ports01
self.assertEqual(test_class.open_udp_ports, self.test_open_UDP_ports01)
print("Test 023 - Finish testing that class open_udp_ports getter works\n")
def test_024_open_udp_ports_directly(self):
"""
Tests that you cannot set the open_udp_ports directly
"""
print(
"\nTest 024 - Start testing that open_udp_ports cannot be set be directly"
)
test_class = FoundDevice(self.test_ip01, self.test_time01)
with self.assertRaises(AttributeError):
test_class.open_udp_ports = 4
print(
"Test 024 - Finish testing that open_udp_ports cannot be set be directly\n"
)
def test_025_closed_tcp_ports_getter(self):
"""
Tests that the closed_tcp_ports getter for the class is working
"""
print("\nTest 025 - Start testing that class closed_tcp_ports getter works")
test_class = FoundDevice(self.test_ip01, self.test_time01)
self.assertEqual(test_class.closed_tcp_ports, {})
test_class.all_ports = self.test_ports01
self.assertEqual(test_class.closed_tcp_ports, self.test_closed_TCP_ports01)
print("Test 025 - Finish testing that class closed_tcp_ports getter works\n")
def test_026_closed_tcp_ports_directly(self):
"""
Tests that you cannot set the closed_tcp_ports directly
"""
print(
"\nTest 026 - Start testing that closed_tcp_ports cannot be set be directly"
)
test_class = FoundDevice(self.test_ip01, self.test_time01)
with self.assertRaises(AttributeError):
test_class.closed_tcp_ports = 4
print(
"Test 026 - Finish testing that closed_tcp_ports cannot be set be directly\n"
)
def test_027_closed_udp_ports_getter(self):
"""
Tests that the closed_udp_ports getter for the class is working
"""
print("\nTest 027 - Start testing that class closed_udp_ports getter works")
test_class = FoundDevice(self.test_ip01, self.test_time01)
self.assertEqual(test_class.closed_udp_ports, {})
test_class.all_ports = self.test_ports01
self.assertEqual(test_class.closed_udp_ports, self.test_closed_UDP_ports01)
print("Test 027 - Finish testing that class closed_udp_ports getter works\n")
def test_028_closed_udp_ports_directly(self):
"""
Tests that you cannot set the closed_udp_ports directly
"""
print(
"\nTest 028 - Start testing that closed_udp_ports cannot be set be directly"
)
test_class = FoundDevice(self.test_ip01, self.test_time01)
with self.assertRaises(AttributeError):
test_class.closed_udp_ports = 4
print(
"Test 028 - Finish testing that closed_udp_ports cannot be set be directly\n"
)
def test_029_class_hash_value(self):
"""
Tests that you hash value returns the correct value
"""
print("\nTest 029 - Start testing that the hash value returned is correct")
test_class = FoundDevice(self.test_ip01, self.test_time01)
self.assertEqual(hash(test_class), hash(self.test_ip01))
print("Test 029 - Finish testing that the hash value returned is correct\n")
def test_030_class_bool_value(self):
"""
Tests that you bool value returns the correct value
"""
print("\nTest 030 - Start testing that the bool value returned is correct")
test_class = FoundDevice(self.test_ip01, self.test_time01)
self.assertTrue(bool(test_class))
print("Test 030 - Finish testing that the bool value returned is correct\n")
def test_031_class_eq_method(self):
"""
Tests that you class __eq__ method
"""
print("\nTest 031 - Start testing that the __eq__ method...")
test_class01 = FoundDevice(self.test_ip01, self.test_time01)
self.assertTrue(test_class01 == self.test_ip01)
test_class02 = FoundDevice(self.test_ip02, self.test_time02)
self.assertTrue(test_class01 == test_class02)
test_class01.all_ports = self.test_ports01
test_class02.all_ports = self.test_ports01
self.assertTrue(test_class01 == test_class02)
self.assertTrue(test_class01 == self.test_ip03)
# Now to test the False Ones
self.assertFalse(test_class01 == "a")
self.assertFalse(test_class01 == "192.168.1.66")
test_class04 = FoundDevice("192.168.1.64", self.test_time01)
test_class05 = FoundDevice("192.168.1.65", self.test_time01)
self.assertFalse(test_class04 == test_class05)
test_class06 = FoundDevice("192.168.1.67", self.test_time01)
test_class07 = FoundDevice("192.168.1.67", (1.7, 1.7, 1.7))
self.assertFalse(test_class06 == test_class07)
self.assertFalse(test_class01 == test_class07)
self.assertFalse(test_class01 == self.test_ip04)
print("Test 031 - Finish testing that the __eq__ method\n")
def test_032_device_info_grabber_method(self):
"""
Test that the device_info_grabber method is called correctly
"""
print(
"\nTest 032 - Start testing that the device_info_grabber method can be called..."
)
with patch("scan_mods.device_class.port_scanner") as mock_port_scanner:
mock_port_scanner.return_value = self.test_ports01
with patch("scan_mods.device_class.device_grab") as mock_dev_info_grab:
mock_dev_info_grab.return_value = {
"Worked": "Method called. Tested in its own file"
}
test_class = FoundDevice(self.test_ip01, self.test_time01)
test_class.get_ports()
self.assertIsNone(test_class.device_info)
test_class.device_info_grabber()
self.assertIsInstance(test_class.device_info, dict)
self.assertEqual(
test_class.device_info,
{"Worked": "Method called. Tested in its own file"},
)
print(
"Test 032 - Finish testing that the device_info_grabber method can be called\n"
)
def test_033_class_repr(self):
"""
Tests that you class __repr__ method is correct
"""
print("\nTest 033 - Start testing that the __repr__ method is correct...")
test_class = FoundDevice(self.test_ip01, self.test_time01)
test_string = f"{self.test_ip01} : "
test_string += f"\n\tresponse times are {self.test_time01[0]} ms, {self.test_time01[1]} ms, {self.test_time01[2]} ms"
test_string += f"\n\tusername is Username has not been set yet"
test_string += f"\n\tpassword Password for device has not been given"
test_string += f"\n\tenable password Not using Enable password for this device"
test_string += f"\n\tdomain name is Domain name has not been set yet"
self.assertEqual(repr(test_class), test_string)
test_class.all_ports = self.test_ports01
test_string += "\n\tOpen TCP Ports:"
for key in self.test_open_TCP_ports01.keys():
test_string += f"\n\t\t{key} : {self.test_open_TCP_ports01[key]}"
test_string += "\n\tOpen UDP Ports:"
for key in self.test_open_UDP_ports01.keys():
test_string += f"\n\t\t{key} : {self.test_open_UDP_ports01[key]}"
test_string += "\n\tClosed TCP Ports:"
for key in self.test_closed_TCP_ports01.keys():
test_string += f"\n\t\t{key} : {self.test_closed_TCP_ports01[key]}"
test_string += "\n\tClosed UDP Ports:"
for key in self.test_closed_UDP_ports01.keys():
test_string += f"\n\t\t{key} : {self.test_closed_UDP_ports01[key]}"
self.assertEqual(repr(test_class), test_string)
print("Test 033 - Finish testing that the __repr__ method is correct\n")
def test_034_class_str(self):
"""
Tests that you class __str__ method is correct
"""
print("\nTest 034 - Start testing that the __str__ method is correct...")
test_class = FoundDevice(self.test_ip01, self.test_time01)
test_string = f"{self.test_ip01} : "
test_string += f"\n\tresponse times are {self.test_time01[0]} ms, {self.test_time01[1]} ms, {self.test_time01[2]} ms"
test_string += f"\n\tusername is Username has not been set yet"
test_string += f"\n\tpassword Password for device has not been given"
test_string += f"\n\tenable password Not using Enable password for this device"
test_string += f"\n\tdomain name is Domain name has not been set yet"
self.assertEqual(str(test_class), test_string)
test_class.all_ports = self.test_ports01
test_string += "\n\tOpen TCP Ports:"
for key in self.test_open_TCP_ports01.keys():
test_string += f"\n\t\t{key} = {self.test_open_TCP_ports01[key]}"
test_string += "\n\tOpen UDP Ports:"
for key in self.test_open_UDP_ports01.keys():
test_string += f"\n\t\t{key} = {self.test_open_UDP_ports01[key]}"
test_string += "\n\tClosed TCP Ports:"
for key in self.test_closed_TCP_ports01.keys():
test_string += f"\n\t\t{key}"
test_string += "\n\tClosed UDP Ports:"
for key in self.test_closed_UDP_ports01.keys():
test_string += f"\n\t\t{key}"
self.assertEqual(str(test_class), test_string)
print("Test 034 - Finish testing that the __str__ method is correct\n")
def test_035_print_json_short(self):
"""
Tests that the print_json_short function works correctly
"""
print(
"\nTest 035 - Start testing that the print_json_short function works correctly..."
)
test_class = FoundDevice(self.test_ip01, self.test_time01)
test_dict = {
self.test_ip01: {
"ping_response_times": self.test_time01,
"username": "Username has not been set yet",
"password": "<PASSWORD>",
"enable_password": "<PASSWORD> using Enable password for this device",
"domain_name": "Domain name has not been set yet",
}
}
self.assertEqual(test_class.print_json_short(), json.dumps(test_dict, indent=4))
test_class.all_ports = self.test_ports01
test_dict[self.test_ip01]["Open_TCP_Ports_List"] = list(
self.test_open_TCP_ports01.keys()
)
test_dict[self.test_ip01]["Open_UDP_Ports_List"] = list(
self.test_open_UDP_ports01.keys()
)
test_dict[self.test_ip01]["Closed_TCP_Ports_List"] = list(
self.test_closed_TCP_ports01.keys()
)
test_dict[self.test_ip01]["Closed_UDP_Ports_List"] = list(
self.test_closed_UDP_ports01.keys()
)
self.assertEqual(test_class.print_json_short(), json.dumps(test_dict, indent=4))
with patch("scan_mods.device_class.port_scanner") as mock_port_scanner:
mock_port_scanner.return_value = self.test_ports01
with patch("scan_mods.device_class.device_grab") as mock_dev_info_grab:
mock_dev_info_grab.return_value = {
"Version_Info": "Method called. Tested in its own file"
}
test_class.device_info_grabber()
test_dict[self.test_ip01][
"Device_Info"
] = "Method called. Tested in its own file"
self.assertEqual(test_class.print_json_short(), json.dumps(test_dict, indent=4))
print(
"Test 035 - Finish testing that the print_json_short function works correctly\n"
)
def test_036_print_json_long(self):
"""
Tests that the print_json_long function works correctly
"""
print(
"\nTest 036 - Start testing that the print_json_long function works correctly..."
)
test_class = FoundDevice(self.test_ip01, self.test_time01)
test_dict = {
self.test_ip01: {
"ping_response_times": self.test_time01,
"username": "Username has not been set yet",
"password": "<PASSWORD>",
"enable_password": "<PASSWORD> for <PASSWORD> device",
"domain_name": "Domain name has not been set yet",
}
}
self.assertEqual(test_class.print_json_long(), json.dumps(test_dict, indent=4))
test_class.all_ports = self.test_ports01
test_dict[self.test_ip01]["Open_TCP_Ports_List"] = self.test_open_TCP_ports01
test_dict[self.test_ip01]["Open_UDP_Ports_List"] = self.test_open_UDP_ports01
test_dict[self.test_ip01][
"Closed_TCP_Ports_List"
] = self.test_closed_TCP_ports01
test_dict[self.test_ip01][
"Closed_UDP_Ports_List"
] = self.test_closed_UDP_ports01
self.assertEqual(test_class.print_json_long(), json.dumps(test_dict, indent=4))
with patch("scan_mods.device_class.port_scanner") as mock_port_scanner:
mock_port_scanner.return_value = self.test_ports01
with patch("scan_mods.device_class.device_grab") as mock_dev_info_grab:
mock_dev_info_grab.return_value = {
"Version_Info": "Method called. Tested in its own file"
}
test_class.device_info_grabber()
test_dict[self.test_ip01]["Device_Info"] = {
"Version_Info": "Method called. Tested in its own file"
}
self.assertEqual(test_class.print_json_long(), json.dumps(test_dict, indent=4))
print(
"Test 036 - Finish testing that the print_json_long function works correctly\n"
)
if __name__ == "__main__":
unittest.main()
```
#### File: test_scan_mod/test_grabbing_mods/test_device_grabber.py
```python
from typing import Type
import unittest
import ipaddress
import os
import sys
import json
from unittest import result
from unittest.mock import MagicMock, patch
import napalm
import paramiko
import paramiko.ssh_exception
if "scan_mods" in os.listdir(os.getcwd()):
sys.path.append(os.getcwd())
else:
path = "../"
while True:
if "scan_mods" in os.listdir(path):
sys.path.append(path)
break
else:
path += "../"
import scan_mods.grabbing_mods.device_grabber
class TestFoundDevice(unittest.TestCase):
"""
Tests that the device grabber python file works as expected
"""
linux_testbox = "192.168.89.80"
cisco_iosxe_no_en = "192.168.89.254"
cisco_iosv_enable = "192.168.89.253"
cisco_iosl2_no_enable = "192.168.89.252"
cisco_nx0s7 = "192.168.89.251"
cisco_iosxe_enable = "192.168.89.247"
fake_testbox = "192.168.0.254"
username = "jmctsm"
password = "<PASSWORD>"
enable_password = "<PASSWORD>"
linux_ports = {
"21": {"Return Information": "220 (vsFTPd 3.0.3)"},
"22": {"Return Information": "SSH-2.0-OpenSSH_8.2p1 Ubuntu-4ubuntu0.1"},
}
cisco_ports = {
"21": {"Return Information": "220 (vsFTPd 3.0.3)"},
"22": {"Return Information": "SSH-1.99-Cisco-1.25"},
}
nexus_ports = {
"21": {"Return Information": "220 (vsFTPd 3.0.3)"},
"22": {"Return Information": "SSH-2.0-OpenSSH_6.2 FIPS"},
}
fake_ports = {
"21": {"Return Information": "220 (vsFTPd 3.0.3)"},
}
no_enable_device_list = [
(fake_testbox, fake_ports),
(linux_testbox, linux_ports),
(cisco_iosxe_no_en, cisco_ports),
(cisco_iosl2_no_enable, cisco_ports),
(cisco_nx0s7, nexus_ports),
]
enable_device_list = [
(cisco_iosv_enable, cisco_ports),
(cisco_iosxe_enable, cisco_ports),
]
test_get_device_type_list = [
(linux_testbox, 22, username, password, None, "Linux", "linux"),
(cisco_iosxe_no_en, 22, username, password, None, "Cisco", "ios"),
(cisco_iosv_enable, 22, username, password, enable_password, "Cisco", "ios"),
(cisco_iosl2_no_enable, 22, username, password, None, "Cisco", "ios"),
(cisco_nx0s7, 22, username, password, None, "Other", "nxos_ssh"),
(cisco_iosxe_enable, 22, username, password, enable_password, "Cisco", "ios"),
]
test_get_config_napalm_list = [
("ios", cisco_iosxe_no_en, 22, username, password, None),
("ios", cisco_iosv_enable, 22, username, password, enable_password),
("ios", cisco_iosl2_no_enable, 22, username, password, None),
("nxos_ssh", cisco_nx0s7, 22, username, password, None),
("ios", cisco_iosxe_enable, 22, username, password, enable_password),
]
def test_01_check_all_pass(self):
print("\nTest 01 - Starting test to ensure that all pass...")
for test_tuple in self.no_enable_device_list:
device_ip, device_open_ports = test_tuple
results = scan_mods.grabbing_mods.device_grabber.device_grab(
address=device_ip,
port_dict=device_open_ports,
username=self.username,
password=<PASSWORD>,
enable_password_needed=False,
)
self.assertIsInstance(results, dict)
self.assertGreaterEqual(len(results), 1)
for key in results.keys():
self.assertTrue(key in ["Version_Info", "CONFIG"])
for test_tuple in self.enable_device_list:
device_ip, device_open_ports = test_tuple
results = scan_mods.grabbing_mods.device_grabber.device_grab(
address=device_ip,
port_dict=device_open_ports,
username=self.username,
password=<PASSWORD>,
enable_password_needed=True,
enable_password=self.enable_password,
)
self.assertIsInstance(results, dict)
self.assertGreaterEqual(len(results), 1)
for key in results.keys():
self.assertTrue(key in ["Version_Info", "CONFIG"])
print("Test 01 - Finish test that all pass")
def test_02_check_ports_pass(self):
"""
Tests that the check ports function works as expected
"""
print(
"\nTest 02 - Starting the test that the check ports function works as expected..."
)
test_list = [
(self.linux_ports, (22, "Linux")),
(self.cisco_ports, (22, "Cisco")),
(self.nexus_ports, (22, "Other")),
(self.fake_ports, (False, False)),
(
{
"2222": {
"Return Information": "SSH-2.0-OpenSSH_8.2p1 Ubuntu-4ubuntu0.1"
}
},
(2222, "Other"),
),
]
for test_tuple in test_list:
test_ports, correct_value = test_tuple
results = scan_mods.grabbing_mods.device_grabber.check_ports(test_ports)
self.assertIsInstance(results, tuple)
self.assertEqual(len(results), 2)
self.assertEqual(results, correct_value)
print(
"Test 02 - Finished the test that the check ports function works as expected...\n"
)
def test_03_check_ports_fail(self):
"""
Tests that the check ports function fails as expected
"""
print(
"\nTest 03 - Starting the test that the check ports function fails as expected..."
)
with self.assertRaises(TypeError):
scan_mods.grabbing_mods.device_grabber.check_address(22)
# This does not seem to be working for raising an exception
with patch("builtins.str") as mock_str:
mock_str.side_effect = Exception
with self.assertRaises(Exception):
scan_mods.grabbing_mods.device_grabber.check_address(22)
with self.assertRaises(TypeError):
scan_mods.grabbing_mods.device_grabber.check_address({"2222": {22}})
print(
"Test 03 - Finished the test that the check ports function fails as expected...\n"
)
def test_04_get_device_type_pass(self):
"""
Tests that the get_device_type passes correctly
"""
print(
"\nTest 04 - Starting the test that get_device_type function works as expected..."
)
for test_tuple in self.test_get_device_type_list:
results = scan_mods.grabbing_mods.device_grabber.get_device_type(
test_tuple[0],
test_tuple[1],
test_tuple[2],
test_tuple[3],
test_tuple[4],
test_tuple[5],
)
self.assertIsNotNone(results)
self.assertIsInstance(results, dict)
self.assertGreaterEqual(len(results), 1)
self.assertEqual(results["OS Type"], test_tuple[6])
print(
"Test 04 - Finished the test that get_device_type function works as expected...\n"
)
def test_05_get_device_type_fail(self):
"""
Tests that the get_device_type fails correctly
"""
print(
"\nTest 05 - Starting the test that get_device_type function fails as expected..."
)
test_fail_get_device_type_list = [
(1, 22, self.username, self.password, None, "Linux", "linux"),
(self.linux_testbox, 22, 1, self.password, None, "Linux", "linux"),
(self.linux_testbox, 22, self.username, 1, None, "Linux", "linux"),
(self.linux_testbox, 22, self.username, self.password, None, 1, "linux"),
]
for test_tuple in test_fail_get_device_type_list:
# Raise an error if these things are not a string
with self.assertRaises(TypeError):
scan_mods.grabbing_mods.device_grabber.get_device_type(
test_tuple[0],
test_tuple[1],
test_tuple[2],
test_tuple[3],
test_tuple[4],
test_tuple[5],
)
# error is IP address is not a true IP address
with self.assertRaises(ipaddress.AddressValueError):
scan_mods.grabbing_mods.device_grabber.get_device_type(
"a",
22,
self.username,
self.password,
None,
"Linux",
)
# raise an error is the port is not an int
with self.assertRaises(TypeError):
scan_mods.grabbing_mods.device_grabber.get_device_type(
self.linux_testbox,
"a",
self.username,
self.password,
None,
"Linux",
)
# raise an error if the port is too low or too high
for test_num in [-1, 1_000_000]:
with self.assertRaises(ValueError):
scan_mods.grabbing_mods.device_grabber.get_device_type(
self.linux_testbox,
test_num,
self.username,
self.password,
None,
"Linux",
)
# raise an error if the enable password is not a string
with self.assertRaises(TypeError):
scan_mods.grabbing_mods.device_grabber.get_device_type(
self.linux_testbox,
60_000,
self.username,
self.password,
1,
"Linux",
)
test_list = ["Cisco", "Linux", "Other"]
for test_header in test_list:
with patch(
"scan_mods.grabbing_mods.device_grabber.paramiko.SSHClient"
) as mock_ssh_client:
stderr = MagicMock()
stderr.readlines().return_value = "file1\nfile2\nfile3\n"
with self.assertRaises(ValueError):
scan_mods.grabbing_mods.device_grabber.get_device_type(
self.linux_testbox,
22,
self.username,
self.password,
None,
test_header,
)
print(
"Test 05 - Finished the test that get_device_type function fails as expected\n"
)
def test_06_get_device_type_error_returns(self):
"""
Tests that the get_device_type returns upon certain errors
"""
print(
"\nTest 06 - Starting the test that get_device_type function returns as expected for certain errors..."
)
# cannot test for paramiko.ssh_exception.NoValidConnectionsError because it wraps multiple other
# errors.
test_list = [
(
paramiko.AuthenticationException,
{
"Version Info": "[ERROR] paramiko.AuthenticationException: Authentication failed for device 192.168.89.80"
},
),
(
TimeoutError,
{
"Version Info": "[ERROR] TimeoutError: Connection timed out for device 192.168.89.80"
},
),
]
for test_tuple in test_list:
with patch(
"scan_mods.grabbing_mods.device_grabber.paramiko.SSHClient"
) as mock_ssh_client:
mock_ssh_client.return_value.connect.side_effect = test_tuple[0]
result = scan_mods.grabbing_mods.device_grabber.get_device_type(
self.linux_testbox,
22,
self.username,
self.password,
None,
"Linux",
)
self.assertIsInstance(result, dict)
self.assertEqual(result, test_tuple[1])
print(
"Test 06 - Finished the test that get_device_type function returns as expected for certain errors\n"
)
def test_07_get_config_napalm_pass(self):
"""
Tests that the get_config_napalm passes
"""
print(
"\nTest 07 - Starting the test that get_config_napalm function passes correctly..."
)
for test_tuple in self.test_get_config_napalm_list:
results = scan_mods.grabbing_mods.device_grabber.get_config_napalm(
dev_driver=test_tuple[0],
host=test_tuple[1],
port=test_tuple[2],
usern=test_tuple[3],
passw=<PASSWORD>[4],
enable_password=<PASSWORD>[5],
)
self.assertIsInstance(results, dict)
self.assertGreaterEqual(len(results), 1)
for key in results.keys():
self.assertIn(
key,
[
"Device_Facts",
"Device_Optics",
"Device_Network_Instances",
"Device_LLDP_Detail",
"Device_LLDP",
"Device_Environment",
"Device_Interfaces",
"Device_Interfaces_IP",
"Device_SNMP_Information",
"Device_Users",
"Device_Startup_Config_File_Location",
"Device_Running_Config_File_Location",
"Device_Candidate_Config_File_Location",
"Device_Startup_Config_Full_File_Location",
"Device_Running_Config_Full_File_Location",
"Device_Candidate_Config_Full_File_Location",
],
)
print(
"Test 07 - Finished the test that get_config_napalm function passes correctly\n"
)
def test_08_get_config_napalm_raise_errors(self):
"""
Test that get_config_napalm raises the correct errors
"""
print(
"\nTest 08 - Starting the test that get_config_napalm raises the correct errors..."
)
# Tests that initial type checkers work correctly
bad_test_list = [
(1, "a", 1, "a", "a", None, TypeError),
("a", 1, 1, "a", "a", None, TypeError),
("a", "a", 1, 1, "a", None, TypeError),
("a", "a", 1, "a", 1, None, TypeError),
("a", "a", "a", "a", "a", None, TypeError),
("a", "192.168.0.254", 1, "a", "a", 1, TypeError),
("a", "a", 1, "a", "a", "a", ipaddress.AddressValueError),
("a", "192.168.0.254", 1, "a", "a", "a", ValueError),
("ios", "192.168.0.254", 1_000_000, "a", "a", "a", ValueError),
("ios", "192.168.0.254", -1, "a", "a", "a", ValueError),
]
for test_tuple in bad_test_list:
with self.assertRaises(test_tuple[6]):
scan_mods.grabbing_mods.device_grabber.get_config_napalm(
dev_driver=test_tuple[0],
host=test_tuple[1],
port=test_tuple[2],
usern=test_tuple[3],
passw=test_tuple[4],
enable_password=test_tuple[5],
)
with patch(
"scan_mods.grabbing_mods.device_grabber.napalm.get_network_driver"
) as mock_driver:
mock_driver.return_value.side_effect = ValueError
result = scan_mods.grabbing_mods.device_grabber.get_config_napalm(
"ios",
"192.168.89.254",
22,
"jmctsm",
"ciscocisco",
)
self.assertIsInstance(result, dict)
self.assertEqual(len(result), 0)
print(
"Test 08 - Finished the test that get_config_napalm raises the correct errors\n"
)
def test_09_directory_checker_pass(self):
"""
Tests that the directory_checker passes
"""
print(
"\nTest 09 - Starting the test that directory_checker function passes correctly..."
)
test_addresses = [None, 1]
with self.assertRaises(ValueError):
scan_mods.grabbing_mods.device_grabber.directory_checker(None)
with patch("scan_mods.grabbing_mods.device_grabber.os") as mock_os:
mock_os.getcwd.return_value = "/root/test/"
mock_os.listdir.return_value = ["Output"]
mock_os.path.exists.retur_value = True
result = scan_mods.grabbing_mods.device_grabber.directory_checker(
"192.168.0.254"
)
self.assertIsInstance(result, str)
self.assertEqual(result, "/root/test//Output/Scans/192.168.0.254")
with patch("scan_mods.grabbing_mods.device_grabber.os") as mock_os:
mock_os.getcwd.return_value = "/root/Output/test1/test2/test3/test"
mock_os.listdir.side_effect = ["test", "test3", "test2", "test1", "Output"]
mock_os.path.exists.retur_value = True
result = scan_mods.grabbing_mods.device_grabber.directory_checker(
"192.168.0.254"
)
self.assertIsInstance(result, str)
self.assertEqual(result, "../../../..//Output/Scans/192.168.0.254")
print(
"Test 09 - Finished the test that directory_checker function passes correctly\n"
)
def test_10_device_grab_fails_correctly(self):
"""
Tests that the device_grab function fails correctly
"""
print(
"\nTest 10 - Starting the test that device_grab raises the correct errors..."
)
with patch(
"scan_mods.grabbing_mods.device_grabber.check_ports"
) as mock_check_ports:
mock_check_ports.return_value = (False, False)
result = scan_mods.grabbing_mods.device_grabber.device_grab(
"192.168.0.1",
self.linux_ports,
"jmctsm",
"ciscocisco",
)
self.assertIsInstance(result, dict)
self.assertEqual(
result,
{
"Version_Info": ["No Version information was available"],
"CONFIG": {
"Open_Close": False,
"Open_Close_Msg": "SSH is not open on the device for ports scanned",
"Device_Information": {},
},
},
)
with self.assertRaises(ValueError):
scan_mods.grabbing_mods.device_grabber.device_grab(
"192.168.0.1",
self.linux_ports,
"jmctsm",
"ciscocisco",
enable_password_needed="a",
)
with patch(
"scan_mods.grabbing_mods.device_grabber.check_ports"
) as mock_check_ports:
mock_check_ports.return_value = (22, "Cisco")
with patch(
"scan_mods.grabbing_mods.device_grabber.get_device_type"
) as mock_get_device_type:
mock_get_device_type.return_value = {
"Version Info": "[ERROR] Error_TEST"
}
result = scan_mods.grabbing_mods.device_grabber.device_grab(
"192.168.0.1",
self.linux_ports,
"jmctsm",
"ciscocisco",
)
self.assertIsInstance(result, dict)
self.assertEqual(
result,
{"Version_Info": "[ERROR] Error_TEST"},
)
print(
"Test 10 - Finished the test that device_grab raises the correct errors\n"
)
if __name__ == "__main__":
unittest.main()
```
#### File: tests/test_scan_mod/test_mp_pinger.py
```python
import unittest
import ipaddress
import os
import sys
from unittest.mock import patch
if "scan_mods" in os.listdir(os.getcwd()):
sys.path.append(os.getcwd())
else:
path = "../"
while True:
if "scan_mods" in os.listdir(path):
sys.path.append(path)
break
else:
path += "../"
import scan_mods.mp_pinger
class TestPinger(unittest.TestCase):
"""
Tests that pinger works
"""
mp_test_addresses = [
"192.168.0.8/29",
"192.168.89.0/24",
"192.168.0.254",
]
mp_test_networks = ["192.168.0.8/29", "192.168.89.0/24", "192.168.0.128/25"]
sp_test_addresses = [
"192.168.0.192/26",
]
def unpack_addresses(self, addresses_to_test):
"""
function to unpack addresses for all test functions
Args:
addresses_to_test (list) : address list of addresses or networks to test
return:
(list) : list of the individual addresses to run against
"""
if len(addresses_to_test) == 0:
raise ValueError(
"There were no arguments passed to the function. That is wrong. Closing"
)
return_addresses = []
for address in addresses_to_test:
if "/" in address:
try:
six_or_four = ipaddress.ip_network(address)
except ValueError:
print(f"{address} is not a valid subnet. Skipping.")
continue
for address_host in six_or_four.hosts():
return_addresses.append(str(address_host))
else:
try:
ipaddress.ip_address(address)
except ValueError:
print(f"{address} is not a valid address. Skipping.")
continue
return_addresses.append(str(address))
for address in return_addresses:
try:
ipaddress.ip_address(address)
except ValueError:
raise ValueError(f"{address} is not an IPv4/v6 address. Shutting Down")
if len(return_addresses) > 0:
return return_addresses
else:
raise ValueError("No usable addresses to scan")
def test_01_all_pass_pinger(self):
print("\nStart testing that all pass and return time values")
hosts_lists = self.unpack_addresses(self.mp_test_addresses)
active_hosts = scan_mods.mp_pinger.pinger(hosts_lists)
self.assertGreaterEqual(len(active_hosts), 0)
self.assertIsInstance(active_hosts, dict)
for key in active_hosts.keys():
self.assertIn(key, hosts_lists)
self.assertIsInstance(active_hosts[key], dict)
self.assertIsInstance(active_hosts[key]["ping_response_time"], tuple)
self.assertEqual(len(active_hosts[key]["ping_response_time"]), 3)
print("Finish testing that all pass\n")
def test_02_all_pass_ping_address(self):
print("\nStart testing that all pass for ping_address")
hosts_lists = self.unpack_addresses(self.sp_test_addresses)
for host in hosts_lists:
return_value = scan_mods.mp_pinger.ping_address(host)
self.assertTrue(
isinstance(return_value, tuple) or isinstance(return_value, str)
)
if isinstance(return_value, str):
self.assertEqual(return_value, "TIMEOUT")
if isinstance(return_value, tuple):
self.assertIsInstance(return_value[0], str)
self.assertEqual(return_value[0], host)
self.assertIsInstance(return_value[1], tuple)
self.assertEqual(len(return_value[1]), 3)
self.assertIsInstance(return_value[1][0], float)
self.assertIsInstance(return_value[1][1], float)
self.assertIsInstance(return_value[1][2], float)
print("Finish testing that all pass\n")
def test_03_pinger_fail_as_expected(self):
print("\nStart testing that all fail due to empty list passed")
test_list = []
with self.assertRaises(ValueError):
scan_mods.mp_pinger.pinger(test_list)
print("Finish testing that all fail due to empty list passed\n")
print("\nStart testing that all fail due to not a list passed")
test_list = [(1, 1), "a", 1.1]
for entry in test_list:
with self.assertRaises(TypeError):
scan_mods.mp_pinger.pinger(tuple(entry))
print("Finish testing that all fail due to not a list passed\n")
print("\nStart testing that all fail due to no subnets. All host bits")
with self.assertRaises(ValueError):
test_02_addresses = [
ipaddress.ip_network("192.168.1.65/29"),
ipaddress.ip_network("10.0.1.248/32"),
]
for address in test_02_addresses:
scan_mods.mp_pinger.pinger(address)
print("Finish testing that all fail due to no subnets. All host bits\n")
def test_04_pass_due_to_no_active_hosts(self):
print("\nStart testing that passed due to no active hosts")
no_active_hosts_list = [
"192.168.1.0/29",
"10.0.1.16/29",
]
hosts_lists = self.unpack_addresses(no_active_hosts_list)
with self.assertRaises(Exception):
scan_mods.mp_pinger.pinger(hosts_lists)
print("Finish testing that passed due to no active hosts\n")
def test_05_pass_due_to_no_arguments_passed_to_pinger(self):
print("\nStart testing that all fail due to no arguments passed\n")
with self.assertRaises(TypeError):
scan_mods.mp_pinger.pinger()
print("\nFinish testing that all fail due to no arguments passed\n")
def test_06_fail_due_to_no_not_an_IP_to_pinger(self):
print("\nStart testing that pinger fails due to an IP not being in the list\n")
test_addresses = [
"10.0.1.254",
"192.168.1.65",
"abc",
123,
]
with self.assertRaises(ValueError):
scan_mods.mp_pinger.pinger(test_addresses)
print("\nFinish testing that pinger fails due to an IP not being in the list\n")
def test_07_fail_due_to_no_not_an_IP_to_ping_address(self):
print("\nStart testing that pinger fails due to an IP not being in the list\n")
test_addresses = [
"10.0.1.254",
"10.0.1.16/29",
"abc",
123,
]
with self.assertRaises(ValueError):
for address in test_addresses:
scan_mods.mp_pinger.ping_address(address)
print("\nFinish testing that pinger fails due to an IP not being in the list\n")
if __name__ == "__main__":
unittest.main()
```
#### File: test_scan_mod/test_protocol_scanners/test_scanner_http_scanner.py
```python
from unittest.mock import patch
import requests
import unittest
import os
import sys
import ipaddress
import json
if "scan_mods" in os.listdir(os.getcwd()):
sys.path.append(os.getcwd())
else:
path = "../"
while True:
if "scan_mods" in os.listdir(path):
sys.path.append(path)
break
else:
path += "../"
import scan_mods.protocol_scanners.http_scanner
class TestPortScanner(unittest.TestCase):
"""
Tests that HTTP port scanner works
"""
good_ports = [80, 8080, None]
good_servers = [
"192.168.0.254",
"192.168.89.80",
"192.168.89.254",
"192.168.89.251",
]
def test_01_all_pass_using_http_scanner(self):
"""
Tests that the HTTP port scanner passes initial tests
"""
print("\nStarting test for all pass with http_scanner")
for address in self.good_servers:
for port in self.good_ports:
print(f"Scanning {address}:{port}")
result = scan_mods.protocol_scanners.http_scanner.http_scanner(
address, port
)
self.assertIsNotNone(result)
self.assertGreaterEqual(len(result), 1)
self.assertIsInstance(result, dict)
for key in result.keys():
self.assertIsInstance(key, str)
print("Finished test for all pass with http_scanner\n")
def test_02_pass_non_string_and_fail(self):
"""
Tests that the HTTP port scanner fails when passed a non-string value
"""
print("\nStarting test for http_scanner failing for using a non-string")
for address in [
1,
[1, 2],
(1, 2),
1.1,
{"test": "test"},
]:
with self.assertRaises(TypeError):
scan_mods.protocol_scanners.http_scanner.http_scanner(address)
print("Finished test for http_scanner failing for using a non-string\n")
def test_03_pass_non_IPv4able_arg_and_fail(self):
"""
Tests that the HTTP port scanner fails when passed a non-IPv4-able string
"""
print(
"\nStarting test for http_scanner failing for using a non_IP-able address"
)
for address in ["1.1.1", "1", "a"]:
with self.assertRaises(ipaddress.AddressValueError):
scan_mods.protocol_scanners.http_scanner.http_scanner(address)
print(
"Finished test for http_scanner failing for using a non_IP-able address\n"
)
def test_04_pass_bad_port_value_and_fail(self):
"""
Tests that the HTTP port scanner fails when passed bad port value
"""
print("\nStarting test for http_scanner failing when use a bad port value")
for address in self.good_servers:
for port in [
"a",
]:
with self.assertRaises(ValueError):
scan_mods.protocol_scanners.http_scanner.http_scanner(address, port)
for port in [
1.1,
(1, 1),
[1, 1],
]:
with self.assertRaises(TypeError):
scan_mods.protocol_scanners.http_scanner.http_scanner(address, port)
for port in [
-1,
1_000_000,
65536,
]:
with self.assertRaises(ValueError):
scan_mods.protocol_scanners.http_scanner.http_scanner(address, port)
print("Finished test for http_scanner failing when use a bad port value\n")
def test_05_patch(self):
""" Tests that the all exceptions are caught correctly"""
print("\nStarting test for http_scanner correctly catching exceptions")
test_list = [
(requests.exceptions.HTTPError, "HTTPError -- "),
(requests.exceptions.ConnectionError, "ConnectionError -- "),
(Exception, "OtherError -- "),
]
for test_tuple in test_list:
test_exception, test_string = test_tuple
with patch(
"scan_mods.protocol_scanners.https_scanner.requests.Session.get",
side_effect=test_exception,
):
result = scan_mods.protocol_scanners.http_scanner.http_scanner(
"192.168.89.80",
port=443,
)
self.assertIsNotNone(result)
self.assertGreaterEqual(len(result), 1)
self.assertIsInstance(result, dict)
for key in result.keys():
self.assertIsInstance(key, str)
self.assertEqual(key, "ERROR")
self.assertEqual(result[key], test_string)
print("Finished test for http_scanner correctly catching exceptions")
def test_06_can_create_valid_json(self):
"""
Tests that the HTTP port scanner can create valid json
"""
print("\nStarting test http_scanner can create valid JSON")
dict_of_results = {}
for address in self.good_servers:
dict_of_results[
address
] = scan_mods.protocol_scanners.http_scanner.http_scanner(address)
json_output = json.dumps(dict_of_results)
self.assertIsNotNone(json_output)
self.assertGreaterEqual(len(json_output), 1)
self.assertIsInstance(json_output, str)
print("Finished test http_scanner can create valid JSON\n")
if __name__ == "__main__":
unittest.main()
``` |
{
"source": "jmcusick/currencyhft",
"score": 3
} |
#### File: currencyhft/database/dbapi.py
```python
import MySQLdb as sql
import json
#Class that handles all SQL calls from the python code
class API:
defaultCredsFile = "../../credentials.txt"
formats = None
db = None
cur = None
def __init__(self):
#gets the database table formats from a file
formatFile = open("tableColumns.json",'r')
self.formats = json.loads(formatFile.read())
formatFile.close()
#open database connection
def connect(self, credsFile = None):
if credsFile == None:
credsFile = self.defaultCredsFile
file = open(credsFile, 'r')
creds = file.read().strip()
file.close()
creds = creds.split("\n")
try:
self.db = sql.connect(host=creds[0],user=creds[1],passwd=<PASSWORD>[2],db=creds[3])
self.cur = self.db.cursor()
except Exception as e:
print "ERROR: database connection error\n"+str(e)
#close database connection
def close(self):
self.db.close()
#takes an array of rows and calls commit after
def bulkInsert(self, table, data):
for row in data:
self.insert(table, row, True)
return self.commit()
#insert new rows into database
#data is a dictionary
#bulk, if true, disables the commit statemeny. This quickens insertions but
#the data will not be live until the programmer manually calls commit
def insert(self, table, data, bulk = False, requireData = True):
tableCols = ""
values = ""
update = ""
cols = self.formats[table]["cols"]
keys = self.formats[table]["keys"]
for c in cols:
# allows for user to not specify columns in data argument
if not c in data:
continue
# allows for missing data columns
if data[c] == None:
continue
tableCols += c+","
if c not in keys:
update += c + " = "
val = data[c]
if isinstance(val,basestring):
values += "\'"+val+"\',"
if c not in keys:
update += "\'"+val+"\',"
else:
values += str(val)+","
if c not in keys:
update += str(val)+","
tableCols = tableCols.strip(",")
values = values.strip(",")
update = update.strip(",")
insertion = "INSERT INTO {0} ({1}) VALUES ({2})".format(table,tableCols,values)
# allows for only a key to be inserted (with an error on duplicate)
# IF requireData is also passed as false
if update == "":
if requireData:
return
else:
insertion += " ON DUPLICATE KEY UPDATE {0}".format(update)
#execute the insertion query
try:
self.cur.execute(insertion)
if not bulk:
self.db.commit()
except Exception as e:
if not bulk:
self.db.rollback()
print "ERROR: unable to add or update row to table" + str(e)
else:
print "WARNING: unable to execute call: ",insertion
print str(e)
#gets every row from the the specified table in the database
def getAllData(self, table):
insertion = "SELECT * from {0}".format(table)
try:
print insertion
self.cur.execute(insertion)
rows = self.cur.fetchall()
return list(rows)
except Exception as e:
print e
if table not in self.formats.keys():
print "ERROR: the inputted table name does not exist"
else:
print "ERROR: could not execute call"
print str(e)
return None
#commits the previous execute calls to the database
#rolls back changes if there is an error
def commit(self):
try:
self.db.commit()
except Exception as e:
self.db.rollback()
print "ERROR: unable to commit data to table" + str(e)
return False
return True
#gets the name of the forex history table based on the interval
def getTableNameForDataInterval(self, dataInterval):
table = None
if dataInterval == "1d":
table = "forexDashDaily"
elif dataInterval == "1m":
table = "forexDashMinute"
else:
print "ERROR: unknown data interval: ", dataInterval
return table
#used for testing
if __name__ == '__main__':
db = API()
db.connect()
db.cur.execute("describe forex")
data = {"ticker" : "USDEUR=X"
, "current_rate" : 120.03
, "last_rate" : 121.03
, "change_day" : 1.03}
db.insert('forex',data)
db.close()
```
#### File: currencyhft/database/forexDiskToDB.py
```python
import json
import glob
from pprint import pprint
import dbapi
def main():
data = {}
db = dbapi.API()
db.connect()
progressCounter = 1
progressFinish = 56.0 # number of currency pairs
for folder in glob.glob('data/*'):
print "Currency Pair "+ str(progressCounter)+ " of " + str(int(progressFinish)) + ": " + str(int(round(progressCounter/progressFinish*100))) + "%"
print "foldername: ", folder
ticker = folder.split('/')[1]
print "ticker: ", ticker
#create ticker in forex table
obj = {}
obj["ticker"] = ticker
for filename in glob.glob(folder+'/*.txt'):
#print "\tfilename: ", filename
dataInterval = filename[-6:][0:2] #file name ends in ..._[dataInterval].txt
print "\tdataInterval: ", dataInterval
#check dataInterval and set table
table = db.getTableNameForDataInterval(dataInterval)
if table is None:
print "ERROR: Bad table, skipping file: ",filename
continue
with open(filename, 'r') as f:
data = json.load(f)
#pprint(data)
bulkData = []
for i in range(len(data["timestamp"])):
#print "i: " ,i
#construct the object for a mysql insert
obj = {}
obj["ticker"] = ticker
obj["timestamp"] = data["timestamp"][i]
obj["volume"] = data["quote"]["volume"][i]
obj["close"] = data["quote"]["close"][i]
obj["open"] = data["quote"]["open"][i]
# YAHOO DATA HIGH AND LOW ARE BACKWARDS
obj["high"] = data["quote"]["low"][i]
obj["low"] = data["quote"]["high"][i]
#print obj
bulkData.append(obj)
# insert into database
retVal = db.bulkInsert(table, bulkData)
if retVal == False:
print "ERROR: bulk insert failed for file: ", filename
progressCounter+=1
print "Completed. Goodbye."
db.close()
if __name__ == '__main__':
main()
``` |
{
"source": "jmcvetta/cspace",
"score": 2
} |
#### File: cspaceapps/filetransfer/Ui_FileReceiverWindow.py
```python
import sys
from PyQt4 import QtCore, QtGui
class Ui_FileReceiverWindow(object):
def setupUi(self, FileReceiverWindow):
FileReceiverWindow.setObjectName("FileReceiverWindow")
FileReceiverWindow.resize(QtCore.QSize(QtCore.QRect(0,0,306,322).size()).expandedTo(FileReceiverWindow.minimumSizeHint()))
FileReceiverWindow.setWindowIcon(QtGui.QIcon(":/images/cspace32.png"))
self.vboxlayout = QtGui.QVBoxLayout(FileReceiverWindow)
self.vboxlayout.setMargin(9)
self.vboxlayout.setSpacing(6)
self.vboxlayout.setObjectName("vboxlayout")
self.status = QtGui.QLabel(FileReceiverWindow)
self.status.setObjectName("status")
self.vboxlayout.addWidget(self.status)
self.fileList = QtGui.QListWidget(FileReceiverWindow)
self.fileList.setObjectName("fileList")
self.vboxlayout.addWidget(self.fileList)
self.hboxlayout = QtGui.QHBoxLayout()
self.hboxlayout.setMargin(0)
self.hboxlayout.setSpacing(6)
self.hboxlayout.setObjectName("hboxlayout")
self.acceptButton = QtGui.QPushButton(FileReceiverWindow)
self.acceptButton.setEnabled(False)
self.acceptButton.setObjectName("acceptButton")
self.hboxlayout.addWidget(self.acceptButton)
self.cancelButton = QtGui.QPushButton(FileReceiverWindow)
self.cancelButton.setObjectName("cancelButton")
self.hboxlayout.addWidget(self.cancelButton)
self.vboxlayout.addLayout(self.hboxlayout)
self.retranslateUi(FileReceiverWindow)
QtCore.QMetaObject.connectSlotsByName(FileReceiverWindow)
def retranslateUi(self, FileReceiverWindow):
FileReceiverWindow.setWindowTitle(QtGui.QApplication.translate("FileReceiverWindow", "Receive Files", None, QtGui.QApplication.UnicodeUTF8))
self.status.setText(QtGui.QApplication.translate("FileReceiverWindow", "Status...", None, QtGui.QApplication.UnicodeUTF8))
self.acceptButton.setText(QtGui.QApplication.translate("FileReceiverWindow", "&Accept", None, QtGui.QApplication.UnicodeUTF8))
self.cancelButton.setText(QtGui.QApplication.translate("FileReceiverWindow", "&Cancel", None, QtGui.QApplication.UnicodeUTF8))
```
#### File: cspaceapps/im/Ui_IMWindow.py
```python
import sys
from PyQt4 import QtCore, QtGui
class Ui_IMWindow(object):
def setupUi(self, IMWindow):
IMWindow.setObjectName("IMWindow")
IMWindow.resize(QtCore.QSize(QtCore.QRect(0,0,401,308).size()).expandedTo(IMWindow.minimumSizeHint()))
IMWindow.setWindowIcon(QtGui.QIcon(":/images/cspace32.png"))
self.vboxlayout = QtGui.QVBoxLayout(IMWindow)
self.vboxlayout.setMargin(9)
self.vboxlayout.setSpacing(6)
self.vboxlayout.setObjectName("vboxlayout")
self.chatLogView = QtGui.QTextEdit(IMWindow)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Policy(7),QtGui.QSizePolicy.Policy(7))
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(4)
sizePolicy.setHeightForWidth(self.chatLogView.sizePolicy().hasHeightForWidth())
self.chatLogView.setSizePolicy(sizePolicy)
font = QtGui.QFont(self.chatLogView.font())
font.setFamily("MS Shell Dlg")
font.setPointSize(10)
font.setWeight(50)
font.setItalic(False)
font.setUnderline(False)
font.setStrikeOut(False)
font.setBold(False)
self.chatLogView.setFont(font)
self.chatLogView.setFocusPolicy(QtCore.Qt.ClickFocus)
self.chatLogView.setReadOnly(True)
self.chatLogView.setObjectName("chatLogView")
self.vboxlayout.addWidget(self.chatLogView)
self.chatInputEdit = QtGui.QTextEdit(IMWindow)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Policy(7),QtGui.QSizePolicy.Policy(7))
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(1)
sizePolicy.setHeightForWidth(self.chatInputEdit.sizePolicy().hasHeightForWidth())
self.chatInputEdit.setSizePolicy(sizePolicy)
font = QtGui.QFont(self.chatInputEdit.font())
font.setFamily("MS Shell Dlg")
font.setPointSize(10)
font.setWeight(50)
font.setItalic(False)
font.setUnderline(False)
font.setStrikeOut(False)
font.setBold(False)
self.chatInputEdit.setFont(font)
self.chatInputEdit.setAcceptRichText(False)
self.chatInputEdit.setObjectName("chatInputEdit")
self.vboxlayout.addWidget(self.chatInputEdit)
self.statusLabel = QtGui.QLabel(IMWindow)
self.statusLabel.setObjectName("statusLabel")
self.vboxlayout.addWidget(self.statusLabel)
self.retranslateUi(IMWindow)
QtCore.QMetaObject.connectSlotsByName(IMWindow)
IMWindow.setTabOrder(self.chatInputEdit,self.chatLogView)
def retranslateUi(self, IMWindow):
IMWindow.setWindowTitle(QtGui.QApplication.translate("IMWindow", "CSpace IM", None, QtGui.QApplication.UnicodeUTF8))
```
#### File: cspace/dht/proto.py
```python
from cspace.dht.params import DHT_ID_LENGTH, DHT_K
from cspace.dht.util import checkAddr
class ProtocolError(Exception) : pass
def _assert( cond, msg=None ) :
if not cond :
if msg is None : msg = 'invalid message data'
raise ProtocolError, msg
def validatePingRequest( msg ) :
_assert( len(msg) == 0 )
return msg
def validatePingResponse( msg ) :
_assert( type(msg) is str )
_assert( len(msg) == 0 )
return msg
def validateGetAddrRequest( msg ) :
_assert( len(msg) == 0 )
return msg
def validateGetAddrResponse( msg ) :
_assert( type(msg) is list )
msg = tuple(msg)
_assert( checkAddr(msg) )
return msg
def validateGetKeyRequest( msg ) :
_assert( len(msg) == 1 )
_assert( type(msg[0]) is str )
return msg
def validateGetKeyResponse( msg ) :
_assert( type(msg) is list )
_assert( len(msg) == 4 )
result,data,updateLevel,signature = msg
_assert( type(result) is int )
_assert( type(data) is str )
_assert( type(updateLevel) is int )
_assert( updateLevel >= 0 )
_assert( type(signature) is str )
return msg
def validatePutKeyRequest( msg ) :
_assert( len(msg) == 4 )
publicKeyData,data,updateLevel,signature = msg
_assert( type(publicKeyData) is str )
_assert( type(data) is str )
_assert( type(updateLevel) is int )
_assert( updateLevel > 0 )
_assert( type(signature) is str )
return msg
def validatePutKeyResponse( msg ) :
_assert( type(msg) is list )
_assert( len(msg) == 4 )
result,data,updateLevel,signature = msg
_assert( type(result) is int )
_assert( type(data) is str )
_assert( type(updateLevel) is int )
_assert( updateLevel >= 0 )
_assert( type(signature) is str )
return msg
def validateFindNodesRequest( msg ) :
_assert( len(msg) == 1 )
_assert( type(msg[0]) is str )
_assert( len(msg[0]) == DHT_ID_LENGTH )
return msg
def validateFindNodesResponse( msg ) :
_assert( type(msg) is list )
msg = msg[:DHT_K]
out = []
for x in msg :
_assert( type(x) is list )
x = tuple(x)
_assert( checkAddr(x) )
out.append( x )
return out
def validateFirewallCheckRequest( msg ) :
_assert( len(msg) == 2 )
addr = tuple( msg )
_assert( checkAddr(addr) )
return addr
def validateFirewallCheckResponse( msg ) :
_assert( type(msg) is list )
fwResult,token = msg
_assert( type(fwResult) is int )
_assert( type(token) is str )
return msg
MESSAGES = ( 'Ping', 'GetAddr', 'GetKey', 'PutKey', 'FindNodes', 'FirewallCheck' )
requestValidators = {}
responseValidators = {}
def _initTables() :
g = globals()
for m in MESSAGES :
requestValidators[m] = g[ 'validate' + m + 'Request' ]
responseValidators[m] = g[ 'validate' + m + 'Response' ]
_initTables()
def validateRequest( msg ) :
_assert( type(msg) is list )
_assert( len(msg) >= 2 )
cmd, useSourceAddr = msg[0], msg[1]
del msg[:2]
_assert( type(cmd) is str )
_assert( type(useSourceAddr) is int )
_assert( useSourceAddr in (0,1) )
validator = requestValidators.get( cmd )
_assert( validator is not None )
msg = validator( msg )
return (cmd, useSourceAddr, msg)
def validateResponse( cmd, msg ) :
validator = responseValidators[cmd]
return validator( msg )
```
#### File: dht/test/testdhtnode.py
```python
import sys, logging
from socket import socket, AF_INET, SOCK_DGRAM
from nitro.selectreactor import SelectReactor
from cspace.dht import rpc
_requestCount = 0
_oldRequestMethod = rpc.RPCSocket.request
def _newRequestMethod( *args, **kwargs ) :
global _requestCount
_requestCount += 1
return _oldRequestMethod( *args, **kwargs )
rpc.RPCSocket.request = _newRequestMethod
from cspace.dht.util import checkPort
from cspace.dht.rpc import RPCSocket
from cspace.dht.node import DHTNode
def startNode( reactor, nodeAddr, knownNodes ) :
print 'starting node: %s' % str(nodeAddr)
sock = socket( AF_INET, SOCK_DGRAM )
sock.bind( nodeAddr )
rpcSocket = RPCSocket( sock, reactor )
node = DHTNode( rpcSocket, reactor, knownNodes )
return node
def main() :
logging.getLogger().addHandler( logging.StreamHandler() )
if len(sys.argv) == 2 :
initialPort = int(sys.argv[1])
assert checkPort( port )
else : initialPort = 12345
reactor = SelectReactor()
seedNodeAddr = ('127.0.0.1',initialPort)
seedNode = startNode( reactor, seedNodeAddr, [] )
numNodes = 50
for i in range(1,numNodes) :
port = initialPort + i
nodeAddr = ('127.0.0.1',port)
node = startNode( reactor, nodeAddr, [seedNodeAddr] )
print 'started %d nodes' % numNodes
reactor.run()
print '_requestCount =', _requestCount
def profile_main() :
import hotshot
prof = hotshot.Profile( 'test.prof' )
prof.runcall( main )
prof.close()
if __name__ == '__main__' :
#profile_main()
main()
```
#### File: cspace/dht/util.py
```python
from socket import inet_aton
from struct import pack, unpack
from types import IntType, StringType, TupleType
from ncrypt.digest import DigestType, Digest
from ncrypt.rsa import RSAKey, RSAError
from nitro.bencode import encode
from cspace.dht.params import DHT_ID_LENGTH, DHT_ID_MAX
digestType = DigestType( 'SHA1' )
digestLength = digestType.size()
assert digestLength == DHT_ID_LENGTH
def toId( x ) :
return Digest(digestType).digest(x)
def idToNum( x ) :
return long(x.encode('hex'),16)
def numToId( numId ) :
s = hex( numId )
assert s.startswith('0x')
if s.endswith('L') : s = s[2:-1]
else : s = s[2:]
if len(s) < 2*DHT_ID_LENGTH :
s = ('0'*(2*DHT_ID_LENGTH-len(s))) + s
x = s.decode('hex')
assert len(x) == DHT_ID_LENGTH
return x
def checkIP( ip ) :
if type(ip) is not StringType : return False
if not ip : return False
try :
inet_aton( ip )
return True
except :
return False
def checkPort( port ) :
if type(port) is not IntType : return False
return 0 < port < 65536
def checkAddr( addr ) :
if type(addr) is not TupleType :
return False
if len(addr) != 2 : return False
if not checkIP(addr[0]) : return False
return checkPort( addr[1] )
def addrToStr( addr ) :
ip = unpack( 'L', inet_aton(addr[0]) )[0]
port = addr[1]
return pack( '!LH', ip, port )
def addrToId( addr ) :
return toId( addrToStr(addr) )
def verifySignature( publicKey, data, updateLevel, signature ) :
payload = encode( ('DHT-DATA',data,updateLevel) )
if type(publicKey) is str :
k = RSAKey()
try :
k.fromDER_PublicKey( publicKey )
except RSAError :
return False
else :
k = publicKey
try :
digest = Digest(digestType).digest( payload )
k.verify( signature, digest, digestType )
return True
except RSAError :
return False
def computeSignature( rsaKey, data, updateLevel ) :
payload = encode( ('DHT-DATA',data,updateLevel) )
digest = Digest(digestType).digest( payload )
return rsaKey.sign( digest, digestType )
```
#### File: cspace/main/incomingprompt.py
```python
from PyQt4 import QtCore, QtGui
from nitro.async import AsyncOp
from cspace.util.flashwin import FlashWindow
from cspace.util.delaygc import delaygc
from cspace.main.ui.Ui_IncomingPromptWindow import Ui_IncomingPromptWindow
class IncomingPromptWindow( QtGui.QWidget, FlashWindow ) :
def __init__( self, user, service, reactor, callback=None ) :
QtGui.QWidget.__init__( self )
self.ui = Ui_IncomingPromptWindow()
self.ui.setupUi( self )
FlashWindow.__init__( self, reactor )
msg = 'User <b>%s</b> is accessing service <b>%s</b>.<br/>' % (user,service)
msg += 'Allow this connection?'
self.ui.prompt.setText( msg )
self.connect( self.ui.allowButton, QtCore.SIGNAL('clicked()'), self._onAllow )
self.connect( self.ui.denyButton, QtCore.SIGNAL('clicked()'), self.close )
self.op = AsyncOp( callback, self._doCancel )
self.show()
self.flash()
def getOp( self ) : return self.op
def _onAllow( self ) :
op = self.op
self.op = None
op.notify( True )
self.close()
def _doCancel( self ) :
self.op = None
self.close()
def closeEvent( self, ev ) :
if self.op :
op = self.op
self.op = None
op.notify( False )
self.cancelFlash()
delaygc( self )
QtGui.QWidget.closeEvent( self, ev )
```
#### File: main/ui/Ui_MainWindow.py
```python
import sys
from PyQt4 import QtCore, QtGui
class Ui_MainWindow(object):
def setupUi(self, MainWindow):
MainWindow.setObjectName("MainWindow")
MainWindow.resize(QtCore.QSize(QtCore.QRect(0,0,283,376).size()).expandedTo(MainWindow.minimumSizeHint()))
MainWindow.setWindowIcon(QtGui.QIcon(":/images/cspace32.png"))
MainWindow.setIconSize(QtCore.QSize(24,24))
self.centralwidget = QtGui.QWidget(MainWindow)
self.centralwidget.setObjectName("centralwidget")
self.vboxlayout = QtGui.QVBoxLayout(self.centralwidget)
self.vboxlayout.setMargin(0)
self.vboxlayout.setSpacing(0)
self.vboxlayout.setObjectName("vboxlayout")
self.stack = QtGui.QStackedWidget(self.centralwidget)
self.stack.setObjectName("stack")
self.contactsPage = QtGui.QWidget()
self.contactsPage.setObjectName("contactsPage")
self.vboxlayout1 = QtGui.QVBoxLayout(self.contactsPage)
self.vboxlayout1.setMargin(0)
self.vboxlayout1.setSpacing(0)
self.vboxlayout1.setObjectName("vboxlayout1")
self.contacts = QtGui.QListWidget(self.contactsPage)
self.contacts.setContextMenuPolicy(QtCore.Qt.CustomContextMenu)
self.contacts.setIconSize(QtCore.QSize(24,24))
self.contacts.setResizeMode(QtGui.QListView.Adjust)
self.contacts.setObjectName("contacts")
self.vboxlayout1.addWidget(self.contacts)
self.stack.addWidget(self.contactsPage)
self.offlinePage = QtGui.QWidget()
self.offlinePage.setObjectName("offlinePage")
self.vboxlayout2 = QtGui.QVBoxLayout(self.offlinePage)
self.vboxlayout2.setMargin(0)
self.vboxlayout2.setSpacing(0)
self.vboxlayout2.setObjectName("vboxlayout2")
self.hboxlayout = QtGui.QHBoxLayout()
self.hboxlayout.setMargin(0)
self.hboxlayout.setSpacing(6)
self.hboxlayout.setObjectName("hboxlayout")
spacerItem = QtGui.QSpacerItem(40,20,QtGui.QSizePolicy.Expanding,QtGui.QSizePolicy.Minimum)
self.hboxlayout.addItem(spacerItem)
self.goOnlineButton = QtGui.QPushButton(self.offlinePage)
self.goOnlineButton.setObjectName("goOnlineButton")
self.hboxlayout.addWidget(self.goOnlineButton)
spacerItem1 = QtGui.QSpacerItem(40,20,QtGui.QSizePolicy.Expanding,QtGui.QSizePolicy.Minimum)
self.hboxlayout.addItem(spacerItem1)
self.vboxlayout2.addLayout(self.hboxlayout)
self.hboxlayout1 = QtGui.QHBoxLayout()
self.hboxlayout1.setMargin(0)
self.hboxlayout1.setSpacing(6)
self.hboxlayout1.setObjectName("hboxlayout1")
spacerItem2 = QtGui.QSpacerItem(40,20,QtGui.QSizePolicy.Expanding,QtGui.QSizePolicy.Minimum)
self.hboxlayout1.addItem(spacerItem2)
self.createKeyButton = QtGui.QPushButton(self.offlinePage)
self.createKeyButton.setObjectName("createKeyButton")
self.hboxlayout1.addWidget(self.createKeyButton)
spacerItem3 = QtGui.QSpacerItem(40,20,QtGui.QSizePolicy.Expanding,QtGui.QSizePolicy.Minimum)
self.hboxlayout1.addItem(spacerItem3)
self.vboxlayout2.addLayout(self.hboxlayout1)
self.stack.addWidget(self.offlinePage)
self.offlineNoUsersPage = QtGui.QWidget()
self.offlineNoUsersPage.setObjectName("offlineNoUsersPage")
self.vboxlayout3 = QtGui.QVBoxLayout(self.offlineNoUsersPage)
self.vboxlayout3.setMargin(0)
self.vboxlayout3.setSpacing(0)
self.vboxlayout3.setObjectName("vboxlayout3")
self.hboxlayout2 = QtGui.QHBoxLayout()
self.hboxlayout2.setMargin(0)
self.hboxlayout2.setSpacing(0)
self.hboxlayout2.setObjectName("hboxlayout2")
spacerItem4 = QtGui.QSpacerItem(40,20,QtGui.QSizePolicy.Expanding,QtGui.QSizePolicy.Minimum)
self.hboxlayout2.addItem(spacerItem4)
self.createKeyButton1 = QtGui.QPushButton(self.offlineNoUsersPage)
self.createKeyButton1.setObjectName("createKeyButton1")
self.hboxlayout2.addWidget(self.createKeyButton1)
spacerItem5 = QtGui.QSpacerItem(40,20,QtGui.QSizePolicy.Expanding,QtGui.QSizePolicy.Minimum)
self.hboxlayout2.addItem(spacerItem5)
self.vboxlayout3.addLayout(self.hboxlayout2)
self.stack.addWidget(self.offlineNoUsersPage)
self.connectingPage = QtGui.QWidget()
self.connectingPage.setObjectName("connectingPage")
self.vboxlayout4 = QtGui.QVBoxLayout(self.connectingPage)
self.vboxlayout4.setMargin(9)
self.vboxlayout4.setSpacing(6)
self.vboxlayout4.setObjectName("vboxlayout4")
spacerItem6 = QtGui.QSpacerItem(20,40,QtGui.QSizePolicy.Minimum,QtGui.QSizePolicy.Expanding)
self.vboxlayout4.addItem(spacerItem6)
self.hboxlayout3 = QtGui.QHBoxLayout()
self.hboxlayout3.setMargin(0)
self.hboxlayout3.setSpacing(6)
self.hboxlayout3.setObjectName("hboxlayout3")
spacerItem7 = QtGui.QSpacerItem(40,20,QtGui.QSizePolicy.Expanding,QtGui.QSizePolicy.Minimum)
self.hboxlayout3.addItem(spacerItem7)
self.vboxlayout5 = QtGui.QVBoxLayout()
self.vboxlayout5.setMargin(0)
self.vboxlayout5.setSpacing(6)
self.vboxlayout5.setObjectName("vboxlayout5")
self.connectStatus = QtGui.QLabel(self.connectingPage)
self.connectStatus.setAlignment(QtCore.Qt.AlignCenter)
self.connectStatus.setObjectName("connectStatus")
self.vboxlayout5.addWidget(self.connectStatus)
self.connectCancelButton = QtGui.QPushButton(self.connectingPage)
self.connectCancelButton.setObjectName("connectCancelButton")
self.vboxlayout5.addWidget(self.connectCancelButton)
self.hboxlayout3.addLayout(self.vboxlayout5)
spacerItem8 = QtGui.QSpacerItem(40,20,QtGui.QSizePolicy.Expanding,QtGui.QSizePolicy.Minimum)
self.hboxlayout3.addItem(spacerItem8)
self.vboxlayout4.addLayout(self.hboxlayout3)
spacerItem9 = QtGui.QSpacerItem(20,40,QtGui.QSizePolicy.Minimum,QtGui.QSizePolicy.Expanding)
self.vboxlayout4.addItem(spacerItem9)
self.stack.addWidget(self.connectingPage)
self.vboxlayout.addWidget(self.stack)
MainWindow.setCentralWidget(self.centralwidget)
self.menubar = QtGui.QMenuBar(MainWindow)
self.menubar.setGeometry(QtCore.QRect(0,0,283,21))
self.menubar.setObjectName("menubar")
self.menu_Help = QtGui.QMenu(self.menubar)
self.menu_Help.setObjectName("menu_Help")
self.menuC_ontacts = QtGui.QMenu(self.menubar)
self.menuC_ontacts.setObjectName("menuC_ontacts")
self.menu_CSpace = QtGui.QMenu(self.menubar)
self.menu_CSpace.setObjectName("menu_CSpace")
self.menuO_ptions = QtGui.QMenu(self.menubar)
self.menuO_ptions.setObjectName("menuO_ptions")
MainWindow.setMenuBar(self.menubar)
self.statusbar = QtGui.QStatusBar(MainWindow)
self.statusbar.setObjectName("statusbar")
MainWindow.setStatusBar(self.statusbar)
self.toolBar = QtGui.QToolBar(MainWindow)
self.toolBar.setOrientation(QtCore.Qt.Horizontal)
self.toolBar.setIconSize(QtCore.QSize(32,32))
self.toolBar.setObjectName("toolBar")
MainWindow.addToolBar(self.toolBar)
self.actionCreateKey = QtGui.QAction(MainWindow)
self.actionCreateKey.setIcon(QtGui.QIcon(":/images/register32.png"))
self.actionCreateKey.setObjectName("actionCreateKey")
self.actionGoOnline = QtGui.QAction(MainWindow)
self.actionGoOnline.setIcon(QtGui.QIcon(":/images/connect32.png"))
self.actionGoOnline.setObjectName("actionGoOnline")
self.actionGoOffline = QtGui.QAction(MainWindow)
self.actionGoOffline.setIcon(QtGui.QIcon(":/images/disconnect32.png"))
self.actionGoOffline.setObjectName("actionGoOffline")
self.actionExit = QtGui.QAction(MainWindow)
self.actionExit.setIcon(QtGui.QIcon(":/images/exit32.png"))
self.actionExit.setObjectName("actionExit")
self.actionAddContact = QtGui.QAction(MainWindow)
self.actionAddContact.setIcon(QtGui.QIcon(":/images/user_add32.png"))
self.actionAddContact.setObjectName("actionAddContact")
self.actionRefreshStatus = QtGui.QAction(MainWindow)
self.actionRefreshStatus.setIcon(QtGui.QIcon(":/images/refresh32.png"))
self.actionRefreshStatus.setObjectName("actionRefreshStatus")
self.actionCheckStatus = QtGui.QAction(MainWindow)
self.actionCheckStatus.setIcon(QtGui.QIcon(":/images/refresh32.png"))
self.actionCheckStatus.setObjectName("actionCheckStatus")
self.actionContactInfo = QtGui.QAction(MainWindow)
self.actionContactInfo.setIcon(QtGui.QIcon(":/images/contact_info32.png"))
self.actionContactInfo.setObjectName("actionContactInfo")
self.actionRemoveContact = QtGui.QAction(MainWindow)
self.actionRemoveContact.setIcon(QtGui.QIcon(":/images/user_remove32.png"))
self.actionRemoveContact.setObjectName("actionRemoveContact")
self.actionEditPermissions = QtGui.QAction(MainWindow)
self.actionEditPermissions.setIcon(QtGui.QIcon(":/images/edit_permissions32.png"))
self.actionEditPermissions.setObjectName("actionEditPermissions")
self.actionAboutCSpace = QtGui.QAction(MainWindow)
self.actionAboutCSpace.setIcon(QtGui.QIcon(":/images/cspace32.png"))
self.actionAboutCSpace.setObjectName("actionAboutCSpace")
self.actionKeyInfo = QtGui.QAction(MainWindow)
self.actionKeyInfo.setIcon(QtGui.QIcon(":/images/key_info32.png"))
self.actionKeyInfo.setObjectName("actionKeyInfo")
self.menu_Help.addAction(self.actionAboutCSpace)
self.menuC_ontacts.addAction(self.actionAddContact)
self.menuC_ontacts.addAction(self.actionRefreshStatus)
self.menu_CSpace.addAction(self.actionGoOnline)
self.menu_CSpace.addAction(self.actionGoOffline)
self.menu_CSpace.addAction(self.actionKeyInfo)
self.menu_CSpace.addSeparator()
self.menu_CSpace.addAction(self.actionCreateKey)
self.menu_CSpace.addSeparator()
self.menu_CSpace.addAction(self.actionExit)
self.menuO_ptions.addAction(self.actionEditPermissions)
self.menubar.addAction(self.menu_CSpace.menuAction())
self.menubar.addAction(self.menuC_ontacts.menuAction())
self.menubar.addAction(self.menuO_ptions.menuAction())
self.menubar.addAction(self.menu_Help.menuAction())
self.toolBar.addAction(self.actionGoOnline)
self.toolBar.addAction(self.actionCreateKey)
self.toolBar.addAction(self.actionGoOffline)
self.toolBar.addAction(self.actionExit)
self.toolBar.addSeparator()
self.toolBar.addAction(self.actionAddContact)
self.toolBar.addAction(self.actionRefreshStatus)
self.retranslateUi(MainWindow)
self.stack.setCurrentIndex(0)
QtCore.QMetaObject.connectSlotsByName(MainWindow)
def retranslateUi(self, MainWindow):
MainWindow.setWindowTitle(QtGui.QApplication.translate("MainWindow", "CSpace", None, QtGui.QApplication.UnicodeUTF8))
self.contacts.clear()
item = QtGui.QListWidgetItem(self.contacts)
item.setText(QtGui.QApplication.translate("MainWindow", "Item 1", None, QtGui.QApplication.UnicodeUTF8))
item.setIcon(QtGui.QIcon(":/images/user_online.png"))
item1 = QtGui.QListWidgetItem(self.contacts)
item1.setText(QtGui.QApplication.translate("MainWindow", "Item 2", None, QtGui.QApplication.UnicodeUTF8))
item1.setIcon(QtGui.QIcon(":/images/user_offline.png"))
item2 = QtGui.QListWidgetItem(self.contacts)
item2.setText(QtGui.QApplication.translate("MainWindow", "Item 3", None, QtGui.QApplication.UnicodeUTF8))
item2.setIcon(QtGui.QIcon(":/images/user_online.png"))
item3 = QtGui.QListWidgetItem(self.contacts)
item3.setText(QtGui.QApplication.translate("MainWindow", "Item 4", None, QtGui.QApplication.UnicodeUTF8))
item3.setIcon(QtGui.QIcon(":/images/user_offline.png"))
self.goOnlineButton.setText(QtGui.QApplication.translate("MainWindow", "Go Online...", None, QtGui.QApplication.UnicodeUTF8))
self.createKeyButton.setText(QtGui.QApplication.translate("MainWindow", "Create Private Key...", None, QtGui.QApplication.UnicodeUTF8))
self.createKeyButton1.setText(QtGui.QApplication.translate("MainWindow", "Create Private Key...", None, QtGui.QApplication.UnicodeUTF8))
self.connectStatus.setText(QtGui.QApplication.translate("MainWindow", "<html><head><meta name=\"qrichtext\" content=\"1\" /></head><body style=\" white-space: pre-wrap; font-family:MS Shell Dlg; font-size:8.25pt; font-weight:400; font-style:normal; text-decoration:none;\"><p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px; font-size:8pt;\"><span style=\" font-weight:600;\">Connect failed.</span></p><p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px; font-size:8pt; font-weight:600;\">Reconnecting in 30 second(s)...</p></body></html>", None, QtGui.QApplication.UnicodeUTF8))
self.connectCancelButton.setText(QtGui.QApplication.translate("MainWindow", "Cancel", None, QtGui.QApplication.UnicodeUTF8))
self.menu_Help.setTitle(QtGui.QApplication.translate("MainWindow", "&Help", None, QtGui.QApplication.UnicodeUTF8))
self.menuC_ontacts.setTitle(QtGui.QApplication.translate("MainWindow", "C&ontacts", None, QtGui.QApplication.UnicodeUTF8))
self.menu_CSpace.setTitle(QtGui.QApplication.translate("MainWindow", "&CSpace", None, QtGui.QApplication.UnicodeUTF8))
self.menuO_ptions.setTitle(QtGui.QApplication.translate("MainWindow", "O&ptions", None, QtGui.QApplication.UnicodeUTF8))
self.actionCreateKey.setText(QtGui.QApplication.translate("MainWindow", "&Create Private Key...", None, QtGui.QApplication.UnicodeUTF8))
self.actionCreateKey.setIconText(QtGui.QApplication.translate("MainWindow", "Create Private Key", None, QtGui.QApplication.UnicodeUTF8))
self.actionCreateKey.setToolTip(QtGui.QApplication.translate("MainWindow", "Create Private Key", None, QtGui.QApplication.UnicodeUTF8))
self.actionGoOnline.setText(QtGui.QApplication.translate("MainWindow", "&Go Online...", None, QtGui.QApplication.UnicodeUTF8))
self.actionGoOffline.setText(QtGui.QApplication.translate("MainWindow", "Go &Offline", None, QtGui.QApplication.UnicodeUTF8))
self.actionExit.setText(QtGui.QApplication.translate("MainWindow", "E&xit", None, QtGui.QApplication.UnicodeUTF8))
self.actionAddContact.setText(QtGui.QApplication.translate("MainWindow", "&Add Contact...", None, QtGui.QApplication.UnicodeUTF8))
self.actionAddContact.setIconText(QtGui.QApplication.translate("MainWindow", "Add Contact", None, QtGui.QApplication.UnicodeUTF8))
self.actionAddContact.setToolTip(QtGui.QApplication.translate("MainWindow", "Add Contact", None, QtGui.QApplication.UnicodeUTF8))
self.actionRefreshStatus.setText(QtGui.QApplication.translate("MainWindow", "Refresh &Status", None, QtGui.QApplication.UnicodeUTF8))
self.actionCheckStatus.setText(QtGui.QApplication.translate("MainWindow", "&Check Status", None, QtGui.QApplication.UnicodeUTF8))
self.actionContactInfo.setText(QtGui.QApplication.translate("MainWindow", "Contact &Information...", None, QtGui.QApplication.UnicodeUTF8))
self.actionRemoveContact.setText(QtGui.QApplication.translate("MainWindow", "Remove Contact", None, QtGui.QApplication.UnicodeUTF8))
self.actionEditPermissions.setText(QtGui.QApplication.translate("MainWindow", "&Edit Permissions...", None, QtGui.QApplication.UnicodeUTF8))
self.actionAboutCSpace.setText(QtGui.QApplication.translate("MainWindow", "&About CSpace...", None, QtGui.QApplication.UnicodeUTF8))
self.actionKeyInfo.setText(QtGui.QApplication.translate("MainWindow", "Key Information...", None, QtGui.QApplication.UnicodeUTF8))
```
#### File: main/ui/Ui_UpdateNotifyWindow.py
```python
import sys
from PyQt4 import QtCore, QtGui
class Ui_UpdateNotifyWindow(object):
def setupUi(self, UpdateNotifyWindow):
UpdateNotifyWindow.setObjectName("UpdateNotifyWindow")
UpdateNotifyWindow.resize(QtCore.QSize(QtCore.QRect(0,0,364,110).size()).expandedTo(UpdateNotifyWindow.minimumSizeHint()))
UpdateNotifyWindow.setWindowIcon(QtGui.QIcon(":/images/cspace32.png"))
self.vboxlayout = QtGui.QVBoxLayout(UpdateNotifyWindow)
self.vboxlayout.setMargin(9)
self.vboxlayout.setSpacing(6)
self.vboxlayout.setObjectName("vboxlayout")
self.hboxlayout = QtGui.QHBoxLayout()
self.hboxlayout.setMargin(0)
self.hboxlayout.setSpacing(6)
self.hboxlayout.setObjectName("hboxlayout")
self.label = QtGui.QLabel(UpdateNotifyWindow)
self.label.setPixmap(QtGui.QPixmap(":/images/cspace48.png"))
self.label.setObjectName("label")
self.hboxlayout.addWidget(self.label)
self.label_2 = QtGui.QLabel(UpdateNotifyWindow)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Policy(5),QtGui.QSizePolicy.Policy(5))
sizePolicy.setHorizontalStretch(1)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_2.sizePolicy().hasHeightForWidth())
self.label_2.setSizePolicy(sizePolicy)
self.label_2.setWordWrap(True)
self.label_2.setObjectName("label_2")
self.hboxlayout.addWidget(self.label_2)
self.vboxlayout.addLayout(self.hboxlayout)
self.hboxlayout1 = QtGui.QHBoxLayout()
self.hboxlayout1.setMargin(0)
self.hboxlayout1.setSpacing(6)
self.hboxlayout1.setObjectName("hboxlayout1")
spacerItem = QtGui.QSpacerItem(40,20,QtGui.QSizePolicy.Expanding,QtGui.QSizePolicy.Minimum)
self.hboxlayout1.addItem(spacerItem)
self.installUpdateButton = QtGui.QPushButton(UpdateNotifyWindow)
self.installUpdateButton.setObjectName("installUpdateButton")
self.hboxlayout1.addWidget(self.installUpdateButton)
spacerItem1 = QtGui.QSpacerItem(40,20,QtGui.QSizePolicy.Expanding,QtGui.QSizePolicy.Minimum)
self.hboxlayout1.addItem(spacerItem1)
self.vboxlayout.addLayout(self.hboxlayout1)
self.retranslateUi(UpdateNotifyWindow)
QtCore.QMetaObject.connectSlotsByName(UpdateNotifyWindow)
def retranslateUi(self, UpdateNotifyWindow):
UpdateNotifyWindow.setWindowTitle(QtGui.QApplication.translate("UpdateNotifyWindow", "CSpace Update", None, QtGui.QApplication.UnicodeUTF8))
self.label_2.setText(QtGui.QApplication.translate("UpdateNotifyWindow", "<html><head><meta name=\"qrichtext\" content=\"1\" /></head><body style=\" white-space: pre-wrap; font-family:MS Shell Dlg; font-size:8.25pt; font-weight:400; font-style:normal; text-decoration:none;\"><p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-weight:600;\">A new version of CSpace has been downloaded and is ready to install.</span><br /><span style=\" font-weight:600;\">Click the button below after closing all CSpace windows</span></p></body></html>", None, QtGui.QApplication.UnicodeUTF8))
self.installUpdateButton.setText(QtGui.QApplication.translate("UpdateNotifyWindow", "Stop CSpace, Install Update, and Restart CSpace", None, QtGui.QApplication.UnicodeUTF8))
```
#### File: cspace/main/upnpmapper.py
```python
from time import time
from random import randint
from nitro.async import AsyncOp
from nitro.upnp import UPnpActions
from cspace.util.statemachine import StateMachine
class UPnpMapper( object ) :
DEFAULT = 0
INITIALIZING = 1
READY = 2
CLEANINGUP = 3
CLOSED = 4
def __init__( self, reactor ) :
self.reactor = reactor
self.sm = StateMachine( self.DEFAULT )
self.device = None
self.externalIP = None
self.cleaningupCount = 0
self.sm.appendCallback( self._onCleanup, dest=self.CLEANINGUP, single=True )
self._initialize()
def shutdown( self, callback=None ) :
if self.sm.current() in (self.CLEANINGUP,self.CLOSED) :
return None
if self.sm.current() == self.READY :
self.sm.change( self.CLEANINGUP )
else :
self.sm.change( self.CLOSED )
if self.sm.current() == self.CLOSED :
return None
def onClosed() : op.notify()
def doCancel() : self.sm.removeCallback( callbackId )
callbackId = self.sm.appendCallback( onClosed,
dest=self.CLOSED, single=True )
op = AsyncOp( callback, doCancel )
return op
def _onCleanup( self ) :
if self.cleaningupCount == 0 :
self.sm.change( self.CLOSED )
def _initialize( self ) :
class Dummy : pass
obj = Dummy()
def onError() :
self.sm.removeCallback( callbackId )
self.sm.change( self.CLOSED )
def onDiscover( device ) :
if device is None :
onError()
return
self.device = device
obj.op = UPnpActions.getExternalIP( device, self.reactor,
onExternalIP )
def onExternalIP( externalIP ) :
if externalIP is None :
onError()
return
self.externalIP = externalIP
self.sm.removeCallback( callbackId )
self.sm.change( self.READY )
self.sm.change( self.INITIALIZING )
obj.op = UPnpActions.findDevice( self.reactor, onDiscover )
callbackId = self.sm.insertCallback( lambda : obj.op.cancel(),
src=self.INITIALIZING, single=True )
def addMapping( self, localIP, localPort, callback=None ) :
class Dummy : pass
obj = Dummy()
if self.sm.current() not in (self.INITIALIZING,self.READY) :
timerOp = self.reactor.callLater( 0, lambda : obj.op.notify(None) )
obj.op = AsyncOp( callback, timerOp.cancel )
return obj.op
def doReady() :
obj.attempt = 0
doAttempt()
def doAttempt() :
if obj.attempt == 3 :
obj.op.notify( None )
return
obj.attempt += 1
obj.externalPort = randint( 10000, 20000 )
desc = 'CSpace_t%d' % int(time())
obj.addOp = UPnpActions.addMapping( self.device, obj.externalPort,
'TCP', localPort, localIP, desc, self.reactor, onAdd )
obj.callbackId = self.sm.insertCallback( onAbort, src=self.READY,
single=True )
obj.op.setCanceler( onCancel )
def onCancel() :
obj.addOp.cancel()
self.sm.removeCallback( obj.callbackId )
def onAbort() :
obj.addOp.cancel()
obj.op.notify( None )
def onAdd( result ) :
self.sm.removeCallback( obj.callbackId )
if not result :
doAttempt()
return
mapping = (self.externalIP,obj.externalPort)
obj.op.notify( mapping )
self.sm.insertCallback( onCleanup, dest=self.CLEANINGUP, single=True )
def onCleanup() :
self.cleaningupCount += 1
UPnpActions.delMapping( self.device, obj.externalPort, 'TCP',
self.reactor, onDelMapping )
def onDelMapping( result ) :
self.cleaningupCount -= 1
self._onCleanup()
if self.sm.current() == self.INITIALIZING :
def checkReady() :
if self.sm.current() == self.READY :
obj.op.setCanceler( None )
doReady()
return
obj.op.notify( None )
obj.callbackId = self.sm.insertCallback( checkReady,
src=self.INITIALIZING, single=True )
obj.op = AsyncOp( callback,
lambda : self.sm.removeCallback(obj.callbackId) )
return obj.op
obj.op = AsyncOp( callback, None )
doReady()
return obj.op
```
#### File: cspace/network/locationcache.py
```python
from time import time
from nitro.async import AsyncOp
from cspace.network.location import lookupUser
# cache entry states
(ES_DEFAULT,ES_LOOKINGUP,ES_NOTIFYING) = range(3)
class _Entry( object ) :
def __init__( self, publicKey ) :
self.publicKey = publicKey
self.location = None
self.timestamp = time()
self.state = ES_DEFAULT
self.lookupOp = None
self.notifyOps = None
class LocationCache( object ) :
def __init__( self, dhtClient, nodeTable, reactor ) :
self.dhtClient = dhtClient
self.nodeTable = nodeTable
self.reactor = reactor
self.d = {}
def close( self ) :
for entry in self.d.values() :
assert entry.state != ES_NOTIFYING
if entry.state == ES_LOOKINGUP :
entry.lookupOp.cancel()
entry.notifyOps = None
self.d.clear()
def _getEntry( self, publicKey ) :
return self.d.get( publicKey.toDER_PublicKey() )
def getLocation( self, publicKey ) :
entry = self._getEntry( publicKey )
if entry is None :
return None
return entry.location
def _onLookupUser( self, entry, location ) :
entry.location = location
entry.timestamp = time()
entry.lookupOp = None
entry.state = ES_NOTIFYING
for op in list(entry.notifyOps) :
if op in entry.notifyOps :
entry.notifyOps.remove(op)
op.notify( location )
entry.notifyOps = None
entry.state = ES_DEFAULT
def refreshUser( self, publicKey, callback=None ) :
pubKeyData = publicKey.toDER_PublicKey()
entry = self.d.get( pubKeyData )
if entry is None :
entry = _Entry( publicKey )
self.d[pubKeyData] = entry
assert entry.state != ES_NOTIFYING
if entry.state == ES_DEFAULT :
assert entry.lookupOp is None
assert entry.notifyOps is None
def onLookupUser( location ) :
self._onLookupUser( entry, location )
entry.lookupOp = lookupUser( publicKey, self.dhtClient,
self.nodeTable,
lambda loc : self._onLookupUser(entry,loc) )
entry.state = ES_LOOKINGUP
entry.notifyOps = set()
def doCancel() :
entry.notifyOps.remove( op )
op = AsyncOp( callback, doCancel )
entry.notifyOps.add( op )
return op
```
#### File: cspace/test/node_echoclient.py
```python
import logging
logging.getLogger().addHandler( logging.StreamHandler() )
from time import time
from nitro.selectreactor import SelectReactor
from nitro.tcp import TCPStream
from cspace.node.client import NodeClient
reactor = SelectReactor()
nodeAddr = ('127.0.0.1',13542)
nodeClient = None
class EchoClient(object) :
def __init__( self, sock, reactor ) :
self.sock = sock
self.reactor = reactor
self.stream = TCPStream( self.sock, self.reactor )
self.stream.setCloseCallback( self._onClose )
self.stream.setErrorCallback( self._onError )
self.stream.setInputCallback( self._onInput )
self.timerOp = self.reactor.addTimer( 1, self._onTimer )
self.stream.initiateRead( 8192 )
def _shutdown( self ) :
self.stream.shutdown()
self.sock.close()
self.timerOp.cancel()
self.reactor.stop()
def _onClose( self ) :
print 'closed'
self._shutdown()
def _onError( self, err, errMsg ) :
print 'error(%d): %s' % (err,errMsg)
self._shutdown()
def _onInput( self, data ) :
print 'received: %s' % data
def _onTimer( self ) :
msg = 'time() = %f' % time()
print 'sending: %s' % msg
self.stream.writeData( msg )
def onConnectTo( err, sock ) :
if err < 0 :
print 'unable to connect to node echo server'
reactor.stop()
return
print 'connected to node echo server'
EchoClient( sock, reactor )
def onGet( err, value ) :
if err < 0 :
print 'unable to locate node echo server'
reactor.stop()
return
routerId = value
if not routerId :
print 'node echo server not found'
reactor.stop()
return
print 'connecting to node echo server...'
nodeClient.connectTo( routerId, onConnectTo )
def onConnect( err ) :
if err < 0 :
print 'unable to connect to node'
reactor.stop()
return
print 'locating node echo server...'
nodeClient.callGet( 'Echo', onGet )
def onClose() :
print 'node connection closed'
reactor.stop()
def main() :
global nodeClient
nodeClient = NodeClient( reactor )
nodeClient.setCloseCallback( onClose )
print 'connecting to node...'
nodeClient.connect( nodeAddr, onConnect )
reactor.run()
if __name__ == '__main__' :
main()
```
#### File: cspace/util/delaygc.py
```python
_objects = []
_timerOp = None
_reactor = None
def initdelaygc( reactor ) :
global _reactor
_reactor = reactor
def delaygc( obj ) :
global _timerOp
_objects.append( obj )
if _timerOp is not None :
_timerOp.cancel()
_timerOp = _reactor.callLater( 1, _onTimer )
def _onTimer() :
global _timerOp
_timerOp = None
del _objects[:]
```
#### File: cspace/util/hexcode.py
```python
_hexChars = '0123456789abcdef'
_hexTab = {}
def _initHexTab() :
for (i,c) in enumerate(_hexChars) :
_hexTab[c] = i
_hexTab[c.upper()] = i
_initHexTab()
class HexDecodeError( Exception ) : pass
def hexByteEncode( c ) :
x = ord(c)
return _hexChars[x >> 4] + _hexChars[x&15]
def hexByteDecode( s, i=0 ) :
try :
return chr( _hexTab[s[i]]*16 + _hexTab[s[i+1]] )
except (KeyError,IndexError) :
raise HexDecodeError
def hexEncode( s ) :
return ''.join( [hexByteEncode(c) for c in s] )
def hexDecode( s ) :
return ''.join( [hexByteDecode(s,i) for i in range(0,len(s),2)] )
```
#### File: distrib/Installer/CalcDigest.py
```python
import os, sys, StringIO, sha
def digestFile( f ) :
return sha.new(file(f,'rb').read()).hexdigest()
def digestList( fileList ) :
out = []
for f in fileList :
if not os.path.isfile(f) : continue
d = digestFile( f )
out.append( (f,d) )
return out
def main() :
fileList = sys.argv[1:]
for f,d in digestList(fileList) :
print '%s %s' % (f,d)
if __name__ == '__main__' :
main()
```
#### File: cspace/nitro/bencode.py
```python
from types import IntType, LongType, StringType, ListType, TupleType, DictType
def encodeInt( x, out ) :
assert type(x) in (IntType,LongType)
out.extend( ('i', str(x), 'e') )
def encodeString( x, out ) :
assert type(x) is StringType
out.extend( (str(len(x)), ':', x) )
def encodeList( x, out ) :
assert type(x) in (ListType,TupleType)
out.append( 'l' )
for i in x : encoderTable[type(i)]( i, out )
out.append( 'e' )
def encodeDict( x, out ) :
assert type(x) is DictType
out.append( 'd' )
items = x.items()
items.sort()
for a,b in items :
encodeString( a, out )
encoderTable[type(b)]( b, out )
out.append( 'e' )
encoderTable = {}
encoderTable[IntType] = encodeInt
encoderTable[LongType] = encodeInt
encoderTable[StringType] = encodeString
encoderTable[ListType] = encodeList
encoderTable[TupleType] = encodeList
encoderTable[DictType] = encodeDict
def encode( x ) :
out = []
encoderTable[type(x)]( x, out )
return ''.join( out )
class DecodeError( Exception ) : pass
def decodeInt( x, i ) :
assert x[i] == 'i'
e = x.index( 'e', i )
i += 1
ret = int(x[i:e])
if x[i] == '-' :
if x[i+1] == '0' : raise ValueError
elif x[i] == '0' :
if e != i+1 : raise ValueError
return (ret, e+1)
def decodeString( x, i ) :
e = x.index( ':', i )
count = int( x[i:e] )
if count < 0 : raise ValueError
if x[i] == '0' and e != i+1 : raise ValueError
e += 1
ret = x[e:e+count]
return (ret, e+count)
def decodeList( x, i ) :
assert x[i] == 'l'
ret, i = [], i+1
next = x[i]
while next != 'e' :
(v, i) = decoderTable[next]( x, i )
ret.append( v )
next = x[i]
return (ret, i+1)
def decodeDict( x, i ) :
assert x[i] == 'd'
ret, i = {}, i+1
prev = None
while x[i] != 'e' :
(a, i) = decodeString( x, i )
if a <= prev : raise ValueError
prev = a
(b, i) = decoderTable[x[i]]( x, i )
ret[a] = b
return (ret, i+1)
decoderTable = {}
decoderTable['i'] = decodeInt
for i in range(10) :
decoderTable[chr(ord('0')+i)] = decodeString
decoderTable['l'] = decodeList
decoderTable['d'] = decodeDict
def decode( x ) :
try :
(ret,i) = decoderTable[x[0]]( x, 0 )
except (KeyError, IndexError, ValueError) :
raise DecodeError, 'invalid bencoded data'
if i != len(x) :
raise DecodeError, 'invalid tail data'
return ret
```
#### File: cspace/nitro/selectreactor.py
```python
import sys, logging
from time import sleep
from select import select
from bisect import bisect, insort
from nitro.async import AsyncOp
from nitro.hirestimer import seconds
from nitro.reactor import Reactor
logger = logging.getLogger( 'nitro.selectreactor' )
def win32select( r, w, e, timeout ) :
if not r and not w and not e :
sleep( timeout )
return [], [], []
return select( r, w, e, timeout )
if sys.platform == 'win32' :
_select = win32select
else :
_select = select
def _add( which, sockfd, callback ) :
assert sockfd not in which
which[sockfd] = callback
def _remove( which, sockfd ) :
assert sockfd in which
del which[sockfd]
class TimerInfo(object) :
def __init__( self, timerId, timeout, callback, singleShot ) :
self.timerId = timerId
self.timeout = timeout
self.callback = callback
self.singleShot = singleShot
self.deadline = seconds() + timeout
class SelectStoppedError( Exception ) : pass
class SelectReactor( Reactor ) :
def __init__( self ) :
self.r, self.w, self.e = {}, {}, {}
self.timers = {}
self.deadlineList = []
self.nextTimerId = 0
self.stopped = False
self._failOnException = False
def addReadCallback( self, sockfd, callback ) :
_add( self.r, sockfd, callback )
def removeReadCallback( self, sockfd ) :
_remove( self.r, sockfd )
def addWriteCallback( self, sockfd, callback ) :
_add( self.w, sockfd, callback )
def removeWriteCallback( self, sockfd ) :
_remove( self.w, sockfd )
def addExceptionCallback( self, sockfd, callback ) :
_add( self.e, sockfd, callback )
def removeExceptionCallback( self, sockfd ) :
_remove( self.e, sockfd )
def addTimer( self, timeout, callback=None ) :
op = AsyncOp( callback, lambda : self._cancelTimer(timerId) )
timerId = self._addTimer( timeout, op.notify, False )
return op
def callLater( self, timeout, callback=None ) :
op = AsyncOp( callback, lambda : self._cancelTimer(timerId) )
timerId = self._addTimer( timeout, op.notify, True )
return op
def usesWSAAsyncSelect( self ) :
return False
def _addTimer( self, timeout, callback, singleShot ) :
timerId = self.nextTimerId
self.nextTimerId += 1
ti = TimerInfo( timerId, timeout, callback, singleShot )
self.timers[timerId] = ti
insort( self.deadlineList, (ti.deadline,ti) )
return timerId
def _cancelTimer( self, timerId ) :
ti = self.timers.get( timerId, None )
assert ti is not None
del self.timers[timerId]
i = bisect( self.deadlineList, (ti.deadline,ti) )
if (i > 0) and (self.deadlineList[i-1][1] is ti) :
del self.deadlineList[i-1]
def failOnException( self, fail ) :
self._failOnException = fail
def stop( self ) :
self.stopped = True
def runOnce( self ) :
timeout = 0.1
now = seconds()
dl = self.deadlineList
if dl and (timeout+now > dl[0][0]) :
timeout = dl[0][0] - now
if timeout < 0 : timeout = 0
if self.stopped :
raise SelectStoppedError
(rs, ws, es) = _select( self.r.keys(), self.w.keys(), self.e.keys(), timeout )
now = seconds()
fired = []
for (deadline,ti) in dl :
if deadline <= now :
fired.append(ti)
else : break
if fired : del dl[0:len(fired)]
for ti in fired :
if ti.timerId in self.timers :
if ti.singleShot :
del self.timers[ti.timerId]
try :
ti.callback()
except :
logger.exception( 'Error in timer callback' )
if self._failOnException : raise
if (not ti.singleShot) and (ti.timerId in self.timers) :
ti.deadline = now + ti.timeout
insort( dl, (ti.deadline,ti) )
for (fired,map) in ((rs,self.r), (ws,self.w), (es,self.e)) :
for sockfd in fired :
cb = map.get( sockfd, None )
if cb is not None :
try :
cb()
except :
logger.exception( 'Error in socket event handler' )
if self._failOnException : raise
if self.stopped :
raise SelectStoppedError
def run( self, timeout=None ) :
self.stopped = False
deadline = None
if timeout is not None :
deadline = seconds() + timeout
try :
while 1 :
if (deadline is not None) and (seconds() >= deadline) :
break
self.runOnce()
except SelectStoppedError :
return
except KeyboardInterrupt :
print 'Ctrl-C detected'
return
```
#### File: cspace/nitro/tcp.py
```python
import sys, os
from struct import pack, unpack
from socket import socket, AF_INET, SOCK_STREAM, SOL_SOCKET, SO_REUSEADDR, SO_ERROR
from socket import error as sock_error
from nitro.async import AsyncOp
from nitro.errors import *
def tcpListen( addr, reactor, callback ) :
l = TCPListener( reactor, callback )
l.listen( addr )
return l
class TCPListener(object) :
def __init__( self, reactor, callback ) :
self.reactor = reactor
self.callback = callback
self.initialized = False
self.enabled = False
def setCallback( self, callback ) :
self.callback = callback
def getSock( self ) :
assert self.initialized
return self.sock
def close( self ) :
assert self.initialized
self.enable( False )
self.sock.close()
self.initialized = False
def enable( self, flag ) :
assert self.initialized
if self.enabled == flag : return
self.enabled = flag
if flag :
self.reactor.addReadCallback( self.sock.fileno(), self._onRead )
else :
self.reactor.removeReadCallback( self.sock.fileno() )
def listen( self, addr ) :
self.sock = socket( AF_INET, SOCK_STREAM )
try :
self.sock.setsockopt( SOL_SOCKET, SO_REUSEADDR, 1 )
self.sock.bind( addr )
self.sock.listen( 5 )
except sock_error :
self.sock.close()
raise
self.initialized = True
self.enable( True )
def _onRead( self ) :
(newSock, address) = self.sock.accept()
newSock.setblocking( 0 )
self.callback( newSock )
def tcpConnect( addr, reactor, callback=None ) :
return TCPConnector().connect( addr, reactor, callback )
class TCPConnector( object ) :
def __init__( self ) :
self.errorCode = EINPROGRESS
self.errorMsg = 'In progress.'
self.sock = None
def getError( self ) : return self.errorCode
def getErrorMsg( self ) : return self.errorMsg
def getSock( self ) : return self.sock
def connect( self, addr, reactor, callback=None ) :
def doConnect() :
def doNotify( err=0, errMsg='' ) :
(self.errorCode, self.errorMsg) = (err,errMsg)
if err == 0 :
self.sock = sock
else :
sock.close()
op.notify( self )
def addEvents() :
reactor.addWriteCallback( sock.fileno(), onEvent )
if (sys.platform == 'win32') and (not reactor.usesWSAAsyncSelect()) :
reactor.addExceptionCallback( sock.fileno(), onEvent )
def removeEvents() :
reactor.removeWriteCallback( sock.fileno() )
if (sys.platform == 'win32') and (not reactor.usesWSAAsyncSelect()) :
reactor.removeExceptionCallback( sock.fileno() )
if pollTimer :
pollTimer.cancel()
def doCancel() :
removeEvents()
sock.close()
def onPollTimer() :
try :
err = sock.getsockopt( SOL_SOCKET, SO_ERROR )
if err not in (0,EWOULDBLOCK,EAGAIN,EINPROGRESS) :
onEvent()
except sock_error :
pass
def onEvent() :
removeEvents()
try :
err = sock.getsockopt( SOL_SOCKET, SO_ERROR )
doNotify( err, os.strerror(err) )
except sock_error, (err,errMsg) :
doNotify( err, errMsg )
pollTimer = None
sock = socket( AF_INET, SOCK_STREAM )
sock.setblocking( 0 )
try :
sock.connect( addr )
doNotify()
except sock_error, (err,errMsg) :
if err in (EWOULDBLOCK,EAGAIN,EINPROGRESS) :
addEvents()
if reactor.usesWSAAsyncSelect() :
pollTimer = reactor.addTimer( 1, onPollTimer )
op.setCanceler( doCancel )
return
doNotify( err, errMsg )
timerOp = reactor.callLater( 0, doConnect )
op = AsyncOp( callback, timerOp.cancel )
return op
class TCPWriter( object ) :
def __init__( self, sock, pendingWrite, reactor, timeout=60 ) :
self.sock = sock
self.pendingWrite = pendingWrite
self.reactor = reactor
self.timeout = timeout
self.maxWriteChunk = 8192
self.reactor.addWriteCallback( self.sock.fileno(), self._onWrite )
self.timerOp = self.reactor.callLater( self.timeout, self._onTimeout )
self.writeCompleteCallback = None
self.errorCallback = None
self.timeoutCallback = None
def setWriteCompleteCallback( self, writeCompleteCallback ) :
self.writeCompleteCallback = writeCompleteCallback
def setErrorCallback( self, errorCallback ) :
self.errorCallback = errorCallback
def setTimeoutCallback( self, timeoutCallback ) :
self.timeoutCallback = timeoutCallback
def shutdown( self ) :
self.timerOp.cancel()
self.reactor.removeWriteCallback( self.sock.fileno() )
def onWriteComplete( self ) :
if not self.writeCompleteCallback :
raise NotImplementedError
self.writeCompleteCallback()
def onError( self, err, errMsg ) :
if not self.errorCallback :
raise NotImplementedError
self.errorCallback( err, errMsg )
def onTimeout( self ) :
if not self.timeoutCallback :
raise NotImplementedError
self.timeoutCallback()
def _onWrite( self ) :
try :
while self.pendingWrite :
numWritten = self.sock.send( self.pendingWrite[:self.maxWriteChunk] )
self.pendingWrite = self.pendingWrite[numWritten:]
except sock_error, (err,errMsg) :
if err in (EWOULDBLOCK,EAGAIN) : return
self.reactor.removeWriteCallback( self.sock.fileno() )
self.timerOp.cancel()
self.onError( err, errMsg )
return
self.reactor.removeWriteCallback( self.sock.fileno() )
self.timerOp.cancel()
self.onWriteComplete()
def _onTimeout( self ) :
self.reactor.removeWriteCallback( self.sock.fileno() )
self.onTimeout()
class TCPCloser( TCPWriter ) :
def __init__( self, sock, pendingWrite, reactor, callback=None ) :
TCPWriter.__init__( self, sock, pendingWrite, reactor )
self.op = AsyncOp( callback, self.shutdown )
def getOp( self ) : return self.op
def _notify( self ) :
self.sock.close()
self.op.notify()
def onWriteComplete( self ) : self._notify()
def onError( self, err, errMsg ) : self._notify()
def onTimeout( self ) : self._notify()
class TCPStream( object ) :
def __init__( self, sock, reactor ) :
sock.setblocking( 0 )
self.sock = sock
self.reactor = reactor
self.readEnabled = False
self.maxReadSize = 0
self.writeEnabled = False
self.pendingWrite = ''
self.closeCallback = None
self.errorCallback = None
self.inputCallback = None
self.writeCompleteCallback = None
self.shutdownFlag = False
def getSock( self ) : return self.sock
def setCloseCallback( self, closeCallback ) :
self.closeCallback = closeCallback
def setErrorCallback( self, errorCallback ) :
self.errorCallback = errorCallback
def setInputCallback( self, inputCallback ) :
self.inputCallback = inputCallback
def setWriteCompleteCallback( self, writeCompleteCallback ) :
self.writeCompleteCallback = writeCompleteCallback
def shutdown( self ) :
assert not self.shutdownFlag
self.shutdownFlag = True
if self.readEnabled :
self.reactor.removeReadCallback( self.sock.fileno() )
self.readEnabled = False
if self.writeEnabled :
self.reactor.removeWriteCallback( self.sock.fileno() )
self.writeEnabled = False
self.pendingWrite = ''
def close( self, deferred=False, callback=None ) :
assert not self.shutdownFlag
if not deferred :
self.shutdown()
self.sock.close()
return
pendingWrite = self.pendingWrite
self.shutdown()
return TCPCloser( self.sock, pendingWrite, self.reactor, callback ).getOp()
def hasShutdown( self ) : return self.shutdownFlag
def initiateRead( self, maxReadSize ) :
self.maxReadSize = maxReadSize
if not self.readEnabled :
self.reactor.addReadCallback( self.sock.fileno(), self._onRead )
self.readEnabled = True
def cancelRead( self ) :
if self.readEnabled :
self.reactor.removeReadCallback( self.sock.fileno() )
self.readEnabled = False
def writeData( self, data ) :
if self.writeEnabled :
assert self.pendingWrite
self.pendingWrite += data
return
self.pendingWrite = data
self.writeEnabled = True
self.reactor.addWriteCallback( self.sock.fileno(), self._onWrite )
def getPendingWrite( self ) :
return self.pendingWrite
def _notifyClose( self ) :
if self.readEnabled :
self.reactor.removeReadCallback( self.sock.fileno() )
self.readEnabled = False
if self.closeCallback :
self.closeCallback()
else :
self.close()
def _notifyError( self, err, errMsg ) :
if self.errorCallback :
self.errorCallback( err, errMsg )
else :
self.close()
def _notifyInput( self, data ) :
if self.inputCallback :
self.inputCallback( data )
def _notifyWriteComplete( self ) :
if self.writeCompleteCallback :
self.writeCompleteCallback()
def _onRead( self ) :
assert self.readEnabled
try :
data = self.sock.recv( self.maxReadSize )
if not data :
self._notifyClose()
return
self._notifyInput( data )
except sock_error, (err,errMsg) :
if err in (EWOULDBLOCK,EAGAIN) : return
self._notifyError( err, errMsg )
def _onWrite( self ) :
assert self.writeEnabled
assert self.pendingWrite
try :
while self.pendingWrite :
numWritten = self.sock.send( self.pendingWrite )
self.pendingWrite = self.pendingWrite[numWritten:]
self.writeEnabled = False
self.reactor.removeWriteCallback( self.sock.fileno() )
self._notifyWriteComplete()
except sock_error, (err,errMsg) :
if err in (EWOULDBLOCK,EAGAIN) : return
self._notifyError( err, errMsg )
class TCPMessageStream( object ) :
def __init__( self, sock, reactor ) :
self.stream = TCPStream( sock, reactor )
self.readEnabled = False
self.maxMsgLength = 65536
self.readingLength = True
self.msgLength = 0
self.buffer = ''
self.stream.setInputCallback( self._onInput )
self.getSock = self.stream.getSock
self.shutdown = self.stream.shutdown
self.close = self.stream.close
self.hasShutdown = self.stream.hasShutdown
self.getPendingWrite = self.stream.getPendingWrite
self.setCloseCallback = self.stream.setCloseCallback
self.setErrorCallback = self.stream.setErrorCallback
self.setWriteCompleteCallback = self.stream.setWriteCompleteCallback
self.invalidMessageCallback = None
self.inputCallback = None
def setInvalidMessageCallback( self, invalidMessageCallback ) :
self.invalidMessageCallback = invalidMessageCallback
def setInputCallback( self, inputCallback ) :
self.inputCallback = inputCallback
def enableRead( self, enable ) :
if enable != self.readEnabled :
self.readEnabled = enable
if enable :
if self.readingLength :
assert len(self.buffer) < 4
self.stream.initiateRead( 4-len(self.buffer) )
else :
assert len(self.buffer) < self.msgLength
self.stream.initiateRead( self.msgLength - len(self.buffer) )
else :
self.stream.cancelRead()
def isReadEnabled( self ) : return self.readEnabled
def sendMessage( self, data ) :
msg = pack( '<i%ds' % len(data), len(data), data )
self.stream.writeData( msg )
def _onInput( self, data ) :
assert self.readEnabled
if self.readingLength :
assert len(data) + len(self.buffer) <= 4
self.buffer += data
if len(self.buffer) == 4 :
self.msgLength = unpack( '<i', self.buffer )[0]
self.readingLength = False
if (self.msgLength <= 0) or (self.msgLength > self.maxMsgLength) :
if self.invalidMessageCallback :
self.invalidMessageCallback()
else :
self.close()
return
self.buffer = ''
self.stream.initiateRead( self.msgLength )
else :
self.stream.initiateRead( 4-len(self.buffer) )
else :
assert len(data) + len(self.buffer) <= self.msgLength
self.buffer += data
if len(self.buffer) == self.msgLength :
data = self.buffer
self.buffer = ''
self.msgLength = 0
self.readingLength = True
self.stream.initiateRead( 4 )
if self.inputCallback :
self.inputCallback( data )
else :
self.stream.initiateRead( self.msgLength - len(self.buffer) )
``` |
{
"source": "jmcvetta/django-dynamic-choices",
"score": 2
} |
#### File: django-dynamic-choices/tests/test_admin.py
```python
from __future__ import unicode_literals
import json
import os
from django.core.exceptions import ImproperlyConfigured
from django.test import SimpleTestCase, TestCase
from django.test.client import Client
from django.test.utils import override_settings
from django.utils.encoding import force_text
from dynamic_choices.admin import DynamicAdmin
from dynamic_choices.forms import DynamicModelForm
from dynamic_choices.forms.fields import (
DynamicModelChoiceField, DynamicModelMultipleChoiceField,
)
from .admin import PuppetAdmin
from .models import ALIGNMENT_GOOD, Master, Puppet
MODULE_PATH = os.path.abspath(os.path.dirname(__file__))
@override_settings(
TEMPLATE_LOADERS=[
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
],
TEMPLATE_DIRS=[os.path.join(MODULE_PATH, 'templates')],
)
class ChangeFormTemplateTests(SimpleTestCase):
template_attr = 'change_form_template'
def test_doesnt_extend_change_form(self):
expected_message = (
"Make sure DoesntExtend.%s template extends "
"'admin/dynamic_choices/change_form.html' in order to enable DynamicAdmin"
) % self.template_attr
with self.assertRaisesMessage(ImproperlyConfigured, expected_message):
type(str('DoesntExtend'), (DynamicAdmin,), {
self.template_attr: 'dynamic_choices_tests/do_not_extends_change_form.html'
})
def test_extends_directly(self):
type(str('ExtendsDirectly'), (DynamicAdmin,), {
self.template_attr: 'dynamic_choices_tests/extends_change_form.html'
})
def test_extends_change_from_through_child(self):
type(str('ExtendsThroughChild'), (DynamicAdmin,), {
self.template_attr: 'dynamic_choices_tests/extends_change_form_twice.html'
})
class AddFormTemplateTests(ChangeFormTemplateTests):
template_attr = 'add_form_template'
class AdminTestBase(TestCase):
fixtures = ['dynamic_choices_test_data', 'dynamic_choices_admin_test_data']
def setUp(self):
self.client = Client()
self.client.login(username='superuser', password='<PASSWORD>')
class DynamicAdminFormTests(AdminTestBase):
def assertChoices(self, queryset, field, msg=None):
self.assertEqual(list(queryset), list(field.widget.choices.queryset), msg)
def assertEmptyChoices(self, field, msg=None):
return self.assertChoices((), field, msg=msg)
def test_GET_add(self):
response = self.client.get('/admin/dynamic_choices/puppet/add/', follow=True)
adminform = response.context['adminform']
form = adminform.form
enemies_inline = response.context['inline_admin_formsets'][0]
fields = form.fields
self.assertEqual(200, response.status_code, 'Cannot display add page')
self.assertIsInstance(form, DynamicModelForm, 'Form is not an instance of DynamicModelForm')
self.assertIsInstance(fields['master'], DynamicModelChoiceField,
'Field master is not an instance of DynamicChoicesField')
self.assertIsInstance(fields['friends'], DynamicModelMultipleChoiceField,
'Field friends is not an instance of DynamicModelMultipleChoiceField')
self.assertEmptyChoices(fields['master'], 'Since no alignment is defined master choices should be empty')
self.assertEmptyChoices(fields['friends'], 'Since no alignment is defined friends choices should be empty')
enemies_inline_form = enemies_inline.opts.form
self.assertTrue(issubclass(enemies_inline_form, DynamicModelForm) or
enemies_inline_form.__name__ == "Dynamic%s" % enemies_inline_form.__base__.__name__,
'Inline form is not a subclass of DynamicModelForm')
for form in enemies_inline.formset.forms:
fields = form.fields
self.assertEmptyChoices(fields['enemy'], 'Since no alignment is defined enemy choices should be empty')
self.assertEmptyChoices(
fields['because_of'], 'Since no enemy is defined because_of choices should be empty')
def test_GET_add_with_defined_alignment(self):
alignment = ALIGNMENT_GOOD
response = self.client.get('/admin/dynamic_choices/puppet/add/', {'alignment': alignment}, follow=True)
self.assertEqual(200, response.status_code, 'Cannot display add page')
adminform = response.context['adminform']
form = adminform.form
enemies_inline = response.context['inline_admin_formsets'][0]
fields = form.fields
self.assertIsInstance(form, DynamicModelForm, 'Form is not an instance of DynamicModelForm')
self.assertIsInstance(fields['master'], DynamicModelChoiceField,
'Field master is not an instance of DynamicChoicesField')
self.assertIsInstance(fields['friends'], DynamicModelMultipleChoiceField,
'Field friends is not an instance of DynamicModelMultipleChoiceField')
self.assertChoices(Master.objects.filter(alignment=alignment), fields['master'],
"Since puppet alignment is 'Good' only 'Good' master are valid choices for master field")
self.assertChoices(Puppet.objects.filter(alignment=alignment), fields['friends'],
"Since puppet alignment is 'Good' only 'Good' puppets are valid choices for friends field")
enemies_inline_form = enemies_inline.opts.form
self.assertTrue(issubclass(enemies_inline_form, DynamicModelForm) or
enemies_inline_form.__name__ == "Dynamic%s" % enemies_inline_form.__base__.__name__,
'Inline form is not a subclass of DynamicModelForm')
for form in enemies_inline.formset.forms:
fields = form.fields
self.assertChoices(
Puppet.objects.exclude(alignment=alignment), fields['enemy'],
"Since puppet alignment is 'Good' only not 'Good' puppets are valid choices for enemy field"
)
self.assertEmptyChoices(
fields['because_of'], 'Since no enemy is defined because_of choices should be empty')
def test_POST_add(self):
alignment = ALIGNMENT_GOOD
data = {
'alignment': alignment,
'master': 1,
'friends': [1],
'enemy_set-TOTAL_FORMS': 3,
'enemy_set-INITIAL_FORMS': 0,
}
response = self.client.post('/admin/dynamic_choices/puppet/add/', data)
self.assertEqual(302, response.status_code, 'Failed to validate')
# Attempt to save an empty enemy inline
# and make sure because_of has correct choices
def test_POST_add_because_of(self):
alignment = ALIGNMENT_GOOD
data = {
'alignment': alignment,
'master': 1,
'friends': [1],
'enemy_set-TOTAL_FORMS': 2,
'enemy_set-INITIAL_FORMS': 0,
'enemy_set-0-enemy': 2,
}
response = self.client.post('/admin/dynamic_choices/puppet/add/', data)
self.assertNotEqual(302, response.status_code, 'Empty inline should not validate')
self.assertChoices(
Master.objects.filter(alignment=Puppet.objects.get(id=2).alignment),
response.context['inline_admin_formsets'][0].formset.forms[0].fields['because_of'],
'Since enemy is specified because_of choices must have the same alignment'
)
self.assertChoices(
Puppet.objects.exclude(alignment=ALIGNMENT_GOOD),
response.context['inline_admin_formsets'][0].formset.forms[0].fields['enemy'],
'Since puppet alignment is specified only non-good puppets should be allowed to be enemies'
)
self.assertEmptyChoices(
response.context['inline_admin_formsets'][0].formset.forms[1].fields['because_of'],
'Enemy is only specified for the first inline, second one because_of should be empty'
)
self.assertChoices(
Puppet.objects.exclude(alignment=ALIGNMENT_GOOD),
response.context['inline_admin_formsets'][0].formset.forms[1].fields['enemy'],
'Since puppet alignment is specified only non-good puppets should be allowed to be enemies'
)
# TODO: Add test_(GET & POST)_edit testcases
def test_user_defined_forms(self):
self.assertTrue(issubclass(PuppetAdmin.form, DynamicModelForm),
'User defined forms should be subclassed from DynamicModelForm by metaclass')
self.assertTrue(
issubclass(PuppetAdmin.inlines[0].form, DynamicModelForm),
'User defined inline forms should be subclassed from DynamicModelForm by dynamic_inline_factory'
)
class AdminChoicesTests(AdminTestBase):
def _get_choices(self, data=None):
default = {
'enemy_set-TOTAL_FORMS': 0,
'enemy_set-INITIAL_FORMS': 0,
}
if data:
default.update(data)
return self.client.get('/admin/dynamic_choices/puppet/1/choices/', default)
def test_medias_presence(self):
"""Make sure extra js files are present in the response"""
response = self.client.get('/admin/dynamic_choices/puppet/1/')
self.assertContains(response, 'js/dynamic-choices.js')
self.assertContains(response, 'js/dynamic-choices-admin.js')
def test_fk_as_empty_string(self):
"""Make sure fk specified as empty string are parsed correctly"""
data = {'alignment': ''}
response = self._get_choices(data)
self.assertEqual(200, response.status_code, "Empty string fk shouldn't be cast as int")
def test_empty_string_value_overrides_default(self):
"""Make sure specified empty string overrides instance field"""
data = {
'DYNAMIC_CHOICES_FIELDS': 'enemy_set-0-because_of',
'enemy_set-0-id': 1,
'enemy_set-0-enemy': '',
'enemy_set-TOTAL_FORMS': 3,
'enemy_set-INITIAL_FORMS': 1,
}
response = self._get_choices(data)
self.assertEqual(response.status_code, 200)
data = json.loads(force_text(response.content))
self.assertEqual(data['enemy_set-0-because_of']['value'], [['', '---------']])
def test_empty_form(self):
"""Make sure data is provided for an empty form"""
data = {
'DYNAMIC_CHOICES_FIELDS': 'enemy_set-__prefix__-enemy',
'alignment': 1,
}
response = self._get_choices(data)
self.assertEqual(response.status_code, 200)
data = json.loads(force_text(response.content))
self.assertEqual(data['enemy_set-__prefix__-enemy']['value'], [
['', '---------'],
['Evil', [[2, 'Evil puppet (2)'], ]],
['Neutral', []],
])
``` |
{
"source": "jmcvey3/dolfyn",
"score": 3
} |
#### File: dolfyn/adp/clean.py
```python
import numpy as np
from scipy.signal import medfilt
import xarray as xr
from ..tools.misc import medfiltnan
from ..rotate.api import rotate2
from ..rotate.base import _make_model, quaternion2orient
def set_range_offset(ds, h_deploy):
"""
Adds an instrument's height above seafloor (for an up-facing instrument)
or depth below water surface (for a down-facing instrument) to the range
of depth bins
Parameters
----------
ds : xarray.Dataset
The adcp dataset to ajust 'range' on
h_deploy : numeric
Deployment location in the water column, in [m]
Returns
-------
ds : xarray.Dataset
The adcp dataset with 'range' adjusted
Notes
-----
`Center of bin 1 = h_deploy + blank_dist + cell_size`
Nortek doesn't take `h_deploy` into account, so the range that DOLfYN
calculates distance is from the ADCP transducers. TRDI asks for `h_deploy`
input in their deployment software and is thereby known by DOLfYN.
If the ADCP is mounted on a tripod on the seafloor, `h_deploy` will be
the height of the tripod +/- any extra distance to the transducer faces.
If the instrument is vessel-mounted, `h_deploy` is the distance between
the surface and downward-facing ADCP's transducers.
"""
r = [s for s in ds.dims if 'range' in s]
for val in r:
ds = ds.assign_coords({val: ds[val].values + h_deploy})
ds[val].attrs['units'] = 'm'
ds.attrs['h_deploy'] = h_deploy
return ds
def find_surface(ds, thresh=10, nfilt=None):
"""
Find the surface (water level or seafloor) from amplitude data
Parameters
----------
ds : xarray.Dataset
The full adcp dataset
thresh : int
Specifies the threshold used in detecting the surface.
(The amount that amplitude must increase by near the surface for it to
be considered a surface hit)
nfilt : int
Specifies the width of the median filter applied, must be odd
Returns
-------
ds : xarray.Dataset
The full adcp dataset with `depth` added
"""
# This finds the maximum of the echo profile:
inds = np.argmax(ds.amp.values, axis=1)
# This finds the first point that increases (away from the profiler) in
# the echo profile
edf = np.diff(ds.amp.values.astype(np.int16), axis=1)
inds2 = np.max((edf < 0) *
np.arange(ds.vel.shape[1] - 1,
dtype=np.uint8)[None, :, None], axis=1) + 1
# Calculate the depth of these quantities
d1 = ds.range.values[inds]
d2 = ds.range.values[inds2]
# Combine them:
D = np.vstack((d1, d2))
# Take the median value as the estimate of the surface:
d = np.median(D, axis=0)
# Throw out values that do not increase near the surface by *thresh*
for ip in range(ds.vel.shape[1]):
itmp = np.min(inds[:, ip])
if (edf[itmp:, :, ip] < thresh).all():
d[ip] = np.NaN
if nfilt:
dfilt = medfiltnan(d, nfilt, thresh=.4)
dfilt[dfilt == 0] = np.NaN
d = dfilt
ds['depth'] = xr.DataArray(d, dims=['time'], attrs={'units': 'm'})
return ds
def surface_from_P(ds, salinity=35):
"""
Approximates distance to water surface above ADCP from the pressure sensor.
Parameters
----------
ds : xarray.Dataset
The full adcp dataset
salinity: numeric
Water salinity in psu
Returns
-------
ds : xarray.Dataset
The full adcp dataset with `depth` added
Notes
-----
Requires that the instrument's pressure sensor was calibrated/zeroed
before deployment to remove atmospheric pressure.
"""
# pressure conversion from dbar to MPa / water weight
rho = salinity + 1000
d = (ds.pressure*10000)/(9.81*rho)
if hasattr(ds, 'h_deploy'):
d += ds.h_deploy
ds['depth'] = xr.DataArray(d, dims=['time'], attrs={'units': 'm'})
return ds
def nan_beyond_surface(ds, val=np.nan):
"""
Mask the values of the data that are beyond the surface.
Parameters
----------
ds : xarray.Dataset
The adcp dataset to clean
val : nan or numeric
Specifies the value to set the bad values to (default np.nan).
Returns
-------
ds : xarray.Dataset
The adcp dataset where relevant arrays with values greater than
`depth` are set to NaN
Notes
-----
Surface interference expected to happen at `r > depth * cos(beam_angle)`
"""
var = [h for h in ds.keys() if any(s for s in ds[h].dims if 'range' in s)]
if 'nortek' in _make_model(ds):
beam_angle = 25 * (np.pi/180)
else: # TRDI
try:
beam_angle = ds.beam_angle
except:
beam_angle = 20 * (np.pi/180)
bds = ds.range > (ds.depth * np.cos(beam_angle) - ds.cell_size)
if 'echo' in var:
bds_echo = ds.range_echo > ds.depth
ds['echo'].values[..., bds_echo] = val
var.remove('echo')
for nm in var:
# workaround for xarray since it can't handle 2D boolean arrays
a = ds[nm].values
try:
a[..., bds] = val
except: # correlation
a[..., bds] = 0
ds[nm].values = a
return ds
def vel_exceeds_thresh(ds, thresh=5, val=np.nan):
"""
Find values of the velocity data that exceed a threshold value,
and assign NaN to the velocity data where the threshold is
exceeded.
Parameters
----------
ds : xr.Dataset
The adcp dataset to clean
thresh : numeric
The maximum value of velocity to screen
val : nan or numeric
Specifies the value to set the bad values to (default np.nan)
Returns
-------
ds : xarray.Dataset
The adcp dataset with datapoints beyond thresh are set to `val`
"""
bd = np.zeros(ds.vel.shape, dtype='bool')
bd |= (np.abs(ds.vel.values) > thresh)
ds.vel.values[bd] = val
return ds
def correlation_filter(ds, thresh=50, val=np.nan):
"""
Filters out velocity data where correlation is below a
threshold in the beam correlation data.
Parameters
----------
ds : xarray.Dataset
The adcp dataset to clean.
thresh : numeric
The maximum value of correlation to screen, in counts or %
val : numeric
Value to set masked correlation data to, default is nan
Returns
-------
ds : xarray.Dataset
The adcp dataset with low correlation values set to `val`
"""
# copy original ref frame
coord_sys_orig = ds.coord_sys
# correlation is always in beam coordinates
mask = (ds.corr.values <= thresh)
if hasattr(ds, 'vel_b5'):
mask_b5 = (ds.corr_b5.values <= thresh)
ds.vel_b5.values[mask_b5] = val
ds = rotate2(ds, 'beam')
ds.vel.values[mask] = val
ds = rotate2(ds, coord_sys_orig)
return ds
def medfilt_orient(ds, nfilt=7):
"""
Median filters the orientation data (heading-pitch-roll or quaternions)
Parameters
----------
ds : xarray.Dataset
The adcp dataset to clean
nfilt : numeric
The length of the median-filtering kernel
*nfilt* must be odd.
Return
------
ds : xarray.Dataset
The adcp dataset with the filtered orientation data
See Also
--------
scipy.signal.medfilt()
"""
if getattr(ds, 'has_imu'):
q_filt = np.zeros(ds.quaternion.shape)
for i in range(ds.quaternion.q.size):
q_filt[i] = medfilt(ds.quaternion[i].values, nfilt)
ds.quaternion.values = q_filt
ds['orientmat'] = quaternion2orient(ds.quaternion)
return ds
else:
# non Nortek AHRS-equipped instruments
do_these = ['pitch', 'roll', 'heading']
for nm in do_these:
ds[nm].values = medfilt(ds[nm].values, nfilt)
return ds.drop_vars('orientmat')
def fillgaps_time(ds, method='cubic', max_gap=None):
"""
Fill gaps (nan values) across time using the specified method
Parameters
----------
ds : xarray.Dataset
The adcp dataset to clean
method : string
Interpolation method to use
max_gap : numeric
Max number of consective NaN's to interpolate across
Returns
-------
ds : xarray.Dataset
The adcp dataset with gaps in velocity interpolated across time
See Also
--------
xarray.DataArray.interpolate_na()
"""
ds['vel'] = ds.vel.interpolate_na(dim='time', method=method,
use_coordinate=True,
max_gap=max_gap)
if hasattr(ds, 'vel_b5'):
ds['vel_b5'] = ds.vel.interpolate_na(dim='time', method=method,
use_coordinate=True,
max_gap=max_gap)
return ds
def fillgaps_depth(ds, method='cubic', max_gap=None):
"""
Fill gaps (nan values) along the depth profile using the specified method
Parameters
----------
ds : xarray.Dataset
The adcp dataset to clean
method : string
Interpolation method to use
max_gap : numeric
Max number of consective NaN's to interpolate across
Returns
-------
ds : xarray.Dataset
The adcp dataset with gaps in velocity interpolated across depth profiles
See Also
--------
xarray.DataArray.interpolate_na()
"""
ds['vel'] = ds.vel.interpolate_na(dim='range', method=method,
use_coordinate=False,
max_gap=max_gap)
if hasattr(ds, 'vel_b5'):
ds['vel_b5'] = ds.vel.interpolate_na(dim='range', method=method,
use_coordinate=True,
max_gap=max_gap)
return ds
```
#### File: dolfyn/io/api.py
```python
import numpy as np
import scipy.io as sio
import xarray as xr
import pkg_resources
from .nortek import read_nortek
from .nortek2 import read_signature
from .rdi import read_rdi
from .base import _create_dataset
from ..rotate.base import _set_coords
from ..time import epoch2date, date2epoch, date2matlab, matlab2date
# time variables stored as data variables (as opposed to coordinates)
t_additional = ['hdwtime_gps', ]
def read(fname, userdata=True, nens=None):
"""Read a binary Nortek (e.g., .VEC, .wpr, .ad2cp, etc.) or RDI
(.000, .PD0, .ENX, etc.) data file.
Parameters
----------
filename : string
Filename of instrument file to read.
userdata : True, False, or string of userdata.json filename (default ``True``)
Whether to read the '<base-filename>.userdata.json' file.
nens : None (default: read entire file), int, or 2-element tuple (start, stop)
Number of pings or ensembles to read from the file
Returns
-------
ds : xarray.Dataset
An xarray dataset from instrument datafile.
"""
# Loop over binary readers until we find one that works.
for func in [read_nortek, read_signature, read_rdi]:
try:
ds = func(fname, userdata=userdata, nens=nens)
except:
continue
else:
return ds
raise Exception(
"Unable to find a suitable reader for file {}.".format(fname))
def read_example(name, **kwargs):
"""Read an ADCP or ADV datafile from the examples directory.
Parameters
----------
name : str
A few available files:
AWAC_test01.wpr
BenchFile01.ad2cp
RDI_test01.000
burst_mode01.VEC
vector_data01.VEC
vector_data_imu01.VEC
winriver01.PD0
winriver02.PD0
Returns
-------
ds : xarray.Dataset
An xarray dataset from the binary instrument data.
"""
filename = pkg_resources.resource_filename(
'dolfyn',
'example_data/' + name)
return read(filename, **kwargs)
def save(dataset, filename):
"""Save xarray dataset as netCDF (.nc).
Parameters
----------
dataset : xarray.Dataset
filename : str
Filename and/or path with the '.nc' extension
Notes
-----
Drops 'config' lines.
"""
if '.' in filename:
assert filename.endswith('nc'), 'File extension must be of the type nc'
else:
filename += '.nc'
# Dropping the detailed configuration stats because netcdf can't save it
for key in list(dataset.attrs.keys()):
if 'config' in key:
dataset.attrs.pop(key)
# Handling complex values for netCDF4
dataset.attrs['complex_vars'] = []
for var in dataset.data_vars:
if np.iscomplexobj(dataset[var]):
dataset[var+'_real'] = dataset[var].real
dataset[var+'_imag'] = dataset[var].imag
dataset = dataset.drop(var)
dataset.attrs['complex_vars'].append(var)
# Keeping time in raw file's time instance, unaware of timezone
t_list = [t for t in dataset.coords if 'time' in t]
for ky in t_list:
dt = epoch2date(dataset[ky])
dataset = dataset.assign_coords({ky: dt})
t_data = [t for t in dataset.data_vars if t in t_additional]
for ky in t_data:
dt = epoch2date(dataset[ky])
dataset = dataset.drop_vars(ky) # must do b/c of netcdf encoding error
dataset[ky] = xr.DataArray(dt, coords={'time_gps': dataset.time_gps})
dataset.to_netcdf(filename, format='NETCDF4', engine='netcdf4')
def load(filename):
"""Load xarray dataset from netCDF (.nc)
Parameters
----------
filename : str
Filename and/or path with the '.nc' extension
Returns
-------
ds : xarray.Dataset
An xarray dataset from the binary instrument data.
"""
if '.' in filename:
assert filename.endswith('nc'), 'File extension must be of the type nc'
else:
filename += '.nc'
ds = xr.load_dataset(filename, engine='netcdf4')
# Single item lists were saved as 'int' or 'str'
if hasattr(ds, 'rotate_vars') and len(ds.rotate_vars[0]) == 1:
ds.attrs['rotate_vars'] = [ds.rotate_vars]
# Python lists were saved as numpy arrays
if hasattr(ds, 'rotate_vars') and type(ds.rotate_vars) is not list:
ds.attrs['rotate_vars'] = list(ds.rotate_vars)
# Rejoin complex numbers
if hasattr(ds, 'complex_vars') and len(ds.complex_vars):
if len(ds.complex_vars[0]) == 1:
ds.attrs['complex_vars'] = [ds.complex_vars]
for var in ds.complex_vars:
ds[var] = ds[var+'_real'] + ds[var+'_imag'] * 1j
ds = ds.drop_vars([var+'_real', var+'_imag'])
ds.attrs.pop('complex_vars')
# Reload raw file's time instance since the timezone is unknown
t_list = [t for t in ds.coords if 'time' in t]
for ky in t_list:
dt = ds[ky].values.astype('datetime64[us]').tolist()
ds = ds.assign_coords({ky: date2epoch(dt)})
ds[ky].attrs['description'] = 'seconds since 1970-01-01 00:00:00'
# Time data variables
t_data = [t for t in ds.data_vars if t in t_additional]
for ky in t_data:
dt = ds[ky].values.astype('datetime64[us]').tolist()
ds[ky].data = date2epoch(dt)
ds[ky].attrs['description'] = 'seconds since 1970-01-01 00:00:00'
return ds
def save_mat(dataset, filename, datenum=True):
"""Save xarray dataset as a MATLAB (.mat) file
Parameters
----------
dataset : xarray.Dataset
Data to save
filename : str
Filename and/or path with the '.mat' extension
datenum : bool
Converts epoch time into MATLAB datenum
Notes
-----
The xarray data format is saved as a MATLAB structure with the fields
'vars, coords, config, units'
See Also
--------
scipy.io.savemat()
"""
if '.' in filename:
assert filename.endswith(
'mat'), 'File extension must be of the type mat'
else:
filename += '.mat'
# Convert from epoch time to datenum
if datenum:
t_list = [t for t in dataset.coords if 'time' in t]
for ky in t_list:
dt = date2matlab(epoch2date(dataset[ky]))
dataset = dataset.assign_coords({ky: dt})
t_data = [t for t in dataset.data_vars if t in t_additional]
for ky in t_data:
dt = date2matlab(epoch2date(dataset[ky]))
dataset[ky].data = dt
# Save xarray structure with more descriptive structure names
matfile = {'vars': {}, 'coords': {}, 'config': {}, 'units': {}}
for key in dataset.data_vars:
matfile['vars'][key] = dataset[key].values
if hasattr(dataset[key], 'units'):
matfile['units'][key] = dataset[key].units
for key in dataset.coords:
matfile['coords'][key] = dataset[key].values
matfile['config'] = dataset.attrs
sio.savemat(filename, matfile)
def load_mat(filename, datenum=True):
"""Load xarray dataset from MATLAB (.mat) file, complimentary to `save_mat()`
A .mat file must contain the fields: {vars, coords, config, units},
where 'coords' contain the dimensions of all variables in 'vars'.
Parameters
----------
filename : str
Filename and/or path with the '.mat' extension
datenum : bool
Converts MATLAB datenum into epoch time
Returns
-------
ds : xarray.Dataset
An xarray dataset from the binary instrument data.
See Also
--------
scipy.io.loadmat()
"""
if '.' in filename:
assert filename.endswith(
'mat'), 'File extension must be of the type mat'
else:
filename += '.mat'
data = sio.loadmat(filename, struct_as_record=False, squeeze_me=True)
ds_dict = {'vars': {}, 'coords': {}, 'config': {}, 'units': {}}
for nm in ds_dict:
key_list = data[nm]._fieldnames
for ky in key_list:
ds_dict[nm][ky] = getattr(data[nm], ky)
ds_dict['data_vars'] = ds_dict.pop('vars')
ds_dict['attrs'] = ds_dict.pop('config')
# Recreate dataset
ds = _create_dataset(ds_dict)
ds = _set_coords(ds, ds.coord_sys)
# Convert datenum time back into epoch time
if datenum:
t_list = [t for t in ds.coords if 'time' in t]
for ky in t_list:
dt = date2epoch(matlab2date(ds[ky].values))
ds = ds.assign_coords({ky: dt})
ds[ky].attrs['description'] = 'seconds since 1970-01-01 00:00:00'
t_data = [t for t in ds.data_vars if t in t_additional]
for ky in t_data:
dt = date2epoch(matlab2date(ds[ky].values))
ds[ky].data = dt
ds[ky].attrs['description'] = 'seconds since 1970-01-01 00:00:00'
# Restore 'rotate vars" to a proper list
if hasattr(ds, 'rotate_vars') and len(ds.rotate_vars[0]) == 1:
ds.attrs['rotate_vars'] = [ds.rotate_vars]
else:
ds.attrs['rotate_vars'] = [x.strip(' ') for x in list(ds.rotate_vars)]
return ds
```
#### File: dolfyn/io/base.py
```python
import numpy as np
import xarray as xr
import six
import json
import os
import warnings
def _find_userdata(filename, userdata=True):
# This function finds the file to read
if userdata:
for basefile in [filename.rsplit('.', 1)[0],
filename]:
jsonfile = basefile + '.userdata.json'
if os.path.isfile(jsonfile):
return _read_userdata(jsonfile)
elif isinstance(userdata, (six.string_types)) or hasattr(userdata, 'read'):
return _read_userdata(userdata)
return {}
def _read_userdata(fname):
"""Reads a userdata.json file and returns the data it contains as a
dictionary.
"""
with open(fname) as data_file:
data = json.load(data_file)
for nm in ['body2head_rotmat', 'body2head_vec']:
if nm in data:
new_name = 'inst' + nm[4:]
warnings.warn(
f'{nm} has been deprecated, please change this to {new_name} \
in {fname}.')
data[new_name] = data.pop(nm)
if 'inst2head_rotmat' in data and \
data['inst2head_rotmat'] in ['identity', 'eye', 1, 1.]:
data['inst2head_rotmat'] = np.eye(3)
for nm in ['inst2head_rotmat', 'inst2head_vec']:
if nm in data:
data[nm] = np.array(data[nm])
if 'coord_sys' in data:
raise Exception("The instrument coordinate system "
"('coord_sys') should not be specified in "
"the .userdata.json file, remove this and "
"read the file again.")
return data
def _handle_nan(data):
"""Finds nan's that cause issues in running the rotation algorithms
and deletes them.
"""
nan = np.zeros(data['coords']['time'].shape, dtype=bool)
l = data['coords']['time'].size
if any(np.isnan(data['coords']['time'])):
nan += np.isnan(data['coords']['time'])
var = ['accel', 'angrt', 'mag']
for key in data['data_vars']:
if any(val in key for val in var):
shp = data['data_vars'][key].shape
if shp[-1] == l:
if len(shp) == 1:
if any(np.isnan(data['data_vars'][key])):
nan += np.isnan(data['data_vars'][key])
elif len(shp) == 2:
if any(np.isnan(data['data_vars'][key][-1])):
nan += np.isnan(data['data_vars'][key][-1])
if nan.sum() > 0:
data['coords']['time'] = data['coords']['time'][~nan]
for key in data['data_vars']:
if data['data_vars'][key].shape[-1] == l:
data['data_vars'][key] = data['data_vars'][key][..., ~nan]
return data
def _create_dataset(data):
"""Creates an xarray dataset from dictionary created from binary
readers.
Direction 'dir' coordinates get reset in `set_coords`
"""
ds = xr.Dataset()
inst = ['X', 'Y', 'Z']
earth = ['E', 'N', 'U']
beam = list(range(1, data['data_vars']['vel'].shape[0]+1))
tag = ['_b5', '_echo', '_bt', '_gps', '_ast']
for key in data['data_vars']:
# orientation matrices
if 'mat' in key:
try: # AHRS orientmat
if any(val in key for val in tag):
tg = '_' + key.rsplit('_')[-1]
else:
tg = ''
time = data['coords']['time'+tg]
if data['attrs']['inst_type'] == 'ADV':
coords = {'earth': earth, 'inst': inst, 'time'+tg: time}
dims = ['earth', 'inst', 'time'+tg]
else:
coords = {'inst': inst, 'earth': earth, 'time'+tg: time}
dims = ['inst', 'earth', 'time'+tg]
ds[key] = xr.DataArray(data['data_vars'][key], coords, dims)
except: # the other 2 (beam2inst & inst2head)
ds[key] = xr.DataArray(data['data_vars'][key],
coords={'beam': beam,
'x*': beam},
dims=['beam', 'x*'])
# quaternion units never change
elif 'quat' in key:
if any(val in key for val in tag):
tg = '_' + key.rsplit('_')[-1]
else:
tg = ''
ds[key] = xr.DataArray(data['data_vars'][key],
coords={'q': ['w', 'x', 'y', 'z'],
'time'+tg: data['coords']['time'+tg]},
dims=['q', 'time'+tg])
else:
ds[key] = xr.DataArray(data['data_vars'][key])
try: # not all variables have units
ds[key].attrs['units'] = data['units'][key]
except: # make sure ones with tags get units
tg = '_' + key.rsplit('_')[-1]
if any(val in key for val in tag):
ds[key].attrs['units'] = data['units'][key[:-len(tg)]]
else:
pass
shp = data['data_vars'][key].shape
vshp = data['data_vars']['vel'].shape
l = len(shp)
if l == 1: # 1D variables
if any(val in key for val in tag):
tg = '_' + key.rsplit('_')[-1]
else:
tg = ''
ds[key] = ds[key].rename({'dim_0': 'time'+tg})
ds[key] = ds[key].assign_coords(
{'time'+tg: data['coords']['time'+tg]})
elif l == 2: # 2D variables
if key == 'echo':
ds[key] = ds[key].rename({'dim_0': 'range_echo',
'dim_1': 'time_echo'})
ds[key] = ds[key].assign_coords({'range_echo': data['coords']['range_echo'],
'time_echo': data['coords']['time_echo']})
# 3- & 4-beam instrument vector data, bottom tracking
elif shp[0] == vshp[0] and not any(val in key for val in tag[:2]):
# b/c rdi time
if 'bt' in key and 'time_bt' in data['coords']:
tg = '_bt'
else:
tg = ''
ds[key] = ds[key].rename({'dim_0': 'dir',
'dim_1': 'time'+tg})
ds[key] = ds[key].assign_coords({'dir': beam,
'time'+tg: data['coords']['time'+tg]})
# 4-beam instrument IMU data
elif shp[0] == vshp[0]-1:
if not any(val in key for val in tag):
tg = ''
else:
tg = [val for val in tag if val in key]
tg = tg[0]
ds[key] = ds[key].rename({'dim_0': 'dirIMU',
'dim_1': 'time'+tg})
ds[key] = ds[key].assign_coords({'dirIMU': [1, 2, 3],
'time'+tg: data['coords']['time'+tg]})
# b5 and echo tagged variables
elif any(val in key for val in tag[:2]):
tg = [val for val in tag if val in key]
tg = tg[0]
ds[key] = ds[key].rename({'dim_0': 'range'+tg,
'dim_1': 'time'+tg})
ds[key] = ds[key].assign_coords({'range'+tg: data['coords']['range'+tg],
'time'+tg: data['coords']['time'+tg]})
else:
warnings.warn(f'Variable not included in dataset: {key}')
elif l == 3: # 3D variables
if not any(val in key for val in tag):
if 'vel' in key:
dim0 = 'dir'
else: # amp, corr
dim0 = 'beam'
ds[key] = ds[key].rename({'dim_0': dim0,
'dim_1': 'range',
'dim_2': 'time'})
ds[key] = ds[key].assign_coords({dim0: beam,
'range': data['coords']['range'],
'time': data['coords']['time']})
elif 'b5' in key:
# xarray can't handle coords of length 1
ds[key] = ds[key][0]
ds[key] = ds[key].rename({'dim_1': 'range_b5',
'dim_2': 'time_b5'})
ds[key] = ds[key].assign_coords({'range_b5': data['coords']['range_b5'],
'time_b5': data['coords']['time_b5']})
else:
warnings.warn(f'Variable not included in dataset: {key}')
# coordinate units
r_list = [r for r in ds.coords if 'range' in r]
for ky in r_list:
ds[ky].attrs['units'] = 'm'
t_list = [t for t in ds.coords if 'time' in t]
for ky in t_list:
ds[ky].attrs['description'] = 'seconds since 1970-01-01 00:00:00'
ds.attrs = data['attrs']
return ds
```
#### File: dolfyn/tests/test_rotate_adp.py
```python
from dolfyn.tests import test_read_adp as tr
from dolfyn.tests.base import load_ncdata as load, save_ncdata as save
from dolfyn.rotate.api import rotate2, calc_principal_heading
import numpy as np
from xarray.testing import assert_allclose
import numpy.testing as npt
def test_rotate_beam2inst(make_data=False):
td_rdi = rotate2(tr.dat_rdi, 'inst')
td_sig = rotate2(tr.dat_sig, 'inst')
td_sig_i = rotate2(tr.dat_sig_i, 'inst')
td_sig_ieb = rotate2(tr.dat_sig_ieb, 'inst')
if make_data:
save(td_rdi, 'RDI_test01_rotate_beam2inst.nc')
save(td_sig, 'BenchFile01_rotate_beam2inst.nc')
save(td_sig_i, 'Sig1000_IMU_rotate_beam2inst.nc')
save(td_sig_ieb, 'VelEchoBT01_rotate_beam2inst.nc')
return
cd_rdi = load('RDI_test01_rotate_beam2inst.nc')
cd_sig = load('BenchFile01_rotate_beam2inst.nc')
cd_sig_i = load('Sig1000_IMU_rotate_beam2inst.nc')
cd_sig_ieb = load('VelEchoBT01_rotate_beam2inst.nc')
assert_allclose(td_rdi, cd_rdi, atol=1e-5)
assert_allclose(td_sig, cd_sig, atol=1e-5)
assert_allclose(td_sig_i, cd_sig_i, atol=1e-5)
assert_allclose(td_sig_ieb, cd_sig_ieb, atol=1e-5)
def test_rotate_inst2beam(make_data=False):
td = load('RDI_test01_rotate_beam2inst.nc')
td = rotate2(td, 'beam', inplace=True)
td_awac = load('AWAC_test01_earth2inst.nc')
td_awac = rotate2(td_awac, 'beam', inplace=True)
td_sig = load('BenchFile01_rotate_beam2inst.nc')
td_sig = rotate2(td_sig, 'beam', inplace=True)
td_sig_i = load('Sig1000_IMU_rotate_beam2inst.nc')
td_sig_i = rotate2(td_sig_i, 'beam', inplace=True)
td_sig_ie = load('Sig500_Echo_earth2inst.nc')
td_sig_ie = rotate2(td_sig_ie, 'beam')
if make_data:
save(td_awac, 'AWAC_test01_inst2beam.nc')
save(td_sig_ie, 'Sig500_Echo_inst2beam.nc')
return
cd_td = tr.dat_rdi.copy(deep=True)
cd_awac = load('AWAC_test01_inst2beam.nc')
cd_sig = tr.dat_sig.copy(deep=True)
cd_sig_i = tr.dat_sig_i.copy(deep=True)
cd_sig_ie = load('Sig500_Echo_inst2beam.nc')
# # The reverse RDI rotation doesn't work b/c of NaN's in one beam
# # that propagate to others, so we impose that here.
cd_td['vel'].values[:, np.isnan(cd_td['vel'].values).any(0)] = np.NaN
assert_allclose(td, cd_td, atol=1e-5)
assert_allclose(td_awac, cd_awac, atol=1e-5)
assert_allclose(td_sig, cd_sig, atol=1e-5)
assert_allclose(td_sig_i, cd_sig_i, atol=1e-5)
assert_allclose(td_sig_ie, cd_sig_ie, atol=1e-5)
def test_rotate_inst2earth(make_data=False):
# AWAC & Sig500 are loaded in earth
td_awac = tr.dat_awac.copy(deep=True)
td_awac = rotate2(td_awac, 'inst')
td_sig_ie = tr.dat_sig_ie.copy(deep=True)
td_sig_ie = rotate2(rotate2(td_sig_ie,'earth'), 'inst')
td_sig_o = td_sig_ie.copy(deep=True)
td = rotate2(tr.dat_rdi, 'earth')
tdwr2 = rotate2(tr.dat_wr2, 'earth')
td_sig = load('BenchFile01_rotate_beam2inst.nc')
td_sig = rotate2(td_sig, 'earth', inplace=True)
td_sig_i = load('Sig1000_IMU_rotate_beam2inst.nc')
td_sig_i = rotate2(td_sig_i, 'earth', inplace=True)
if make_data:
save(td_awac, 'AWAC_test01_earth2inst.nc')
save(td, 'RDI_test01_rotate_inst2earth.nc')
save(tdwr2, 'winriver02_rotate_ship2earth.nc')
save(td_sig, 'BenchFile01_rotate_inst2earth.nc')
save(td_sig_i, 'Sig1000_IMU_rotate_inst2earth.nc')
save(td_sig_ie, 'Sig500_Echo_earth2inst.nc')
return
td_awac = rotate2(td_awac, 'earth', inplace=True)
td_sig_ie = rotate2(td_sig_ie, 'earth')
td_sig_o = rotate2(td_sig_o.drop_vars('orientmat'), 'earth')
cd = load('RDI_test01_rotate_inst2earth.nc')
cdwr2 = load('winriver02_rotate_ship2earth.nc')
cd_sig = load('BenchFile01_rotate_inst2earth.nc')
cd_sig_i = load('Sig1000_IMU_rotate_inst2earth.nc')
assert_allclose(td, cd, atol=1e-5)
assert_allclose(tdwr2, cdwr2, atol=1e-5)
assert_allclose(td_awac, tr.dat_awac, atol=1e-5)
#npt.assert_allclose(td_awac.vel.values, tr.dat_awac.vel.values, rtol=1e-7, atol=1e-3)
assert_allclose(td_sig, cd_sig, atol=1e-5)
assert_allclose(td_sig_i, cd_sig_i, atol=1e-5)
assert_allclose(td_sig_ie, tr.dat_sig_ie, atol=1e-5)
npt.assert_allclose(td_sig_o.vel, tr.dat_sig_ie.vel, atol=1e-5)
def test_rotate_earth2inst():
td_rdi = load('RDI_test01_rotate_inst2earth.nc')
td_rdi = rotate2(td_rdi, 'inst', inplace=True)
tdwr2 = load('winriver02_rotate_ship2earth.nc')
tdwr2 = rotate2(tdwr2, 'inst', inplace=True)
td_awac = tr.dat_awac.copy(deep=True)
td_awac = rotate2(td_awac, 'inst') # AWAC is in earth coords
td_sig = load('BenchFile01_rotate_inst2earth.nc')
td_sig = rotate2(td_sig, 'inst', inplace=True)
td_sigi = load('Sig1000_IMU_rotate_inst2earth.nc')
td_sig_i = rotate2(td_sigi, 'inst', inplace=True)
cd_rdi = load('RDI_test01_rotate_beam2inst.nc')
cd_awac = load('AWAC_test01_earth2inst.nc')
cd_sig = load('BenchFile01_rotate_beam2inst.nc')
cd_sig_i = load('Sig1000_IMU_rotate_beam2inst.nc')
assert_allclose(td_rdi, cd_rdi, atol=1e-5)
assert_allclose(tdwr2, tr.dat_wr2, atol=1e-5)
assert_allclose(td_awac, cd_awac, atol=1e-5)
assert_allclose(td_sig, cd_sig, atol=1e-5)
#known failure due to orientmat, see test_vs_nortek
#assert_allclose(td_sig_i, cd_sig_i, atol=1e-3)
npt.assert_allclose(td_sig_i.accel.values, cd_sig_i.accel.values, atol=1e-3)
def test_rotate_earth2principal(make_data=False):
td_rdi = load('RDI_test01_rotate_inst2earth.nc')
td_sig = load('BenchFile01_rotate_inst2earth.nc')
td_awac = tr.dat_awac.copy(deep=True)
td_rdi.attrs['principal_heading'] = calc_principal_heading(td_rdi.vel.mean('range'))
td_sig.attrs['principal_heading'] = calc_principal_heading(td_sig.vel.mean('range'))
td_awac.attrs['principal_heading'] = calc_principal_heading(td_awac.vel.mean('range'),
tidal_mode=False)
td_rdi = rotate2(td_rdi, 'principal')
td_sig = rotate2(td_sig, 'principal')
td_awac = rotate2(td_awac, 'principal')
if make_data:
save(td_rdi, 'RDI_test01_rotate_earth2principal.nc')
save(td_sig, 'BenchFile01_rotate_earth2principal.nc')
save(td_awac, 'AWAC_test01_earth2principal.nc')
return
cd_rdi = load('RDI_test01_rotate_earth2principal.nc')
cd_sig = load('BenchFile01_rotate_earth2principal.nc')
cd_awac = load('AWAC_test01_earth2principal.nc')
assert_allclose(td_rdi, cd_rdi, atol=1e-5)
assert_allclose(td_awac, cd_awac, atol=1e-5)
assert_allclose(td_sig, cd_sig, atol=1e-5)
if __name__=='__main__':
test_rotate_beam2inst()
test_rotate_inst2beam()
test_rotate_inst2earth()
test_rotate_earth2inst()
test_rotate_earth2principal()
``` |
{
"source": "JMcWhorter150/ArtemisAPI",
"score": 3
} |
#### File: ArtemisAPI/database/helpers.py
```python
def admin_helper(admin) -> dict:
return {
"id": str(admin['_id']),
"fullname": admin['fullname'],
"email": admin['email'],
}
def state_count_helper(state_count) -> dict:
return {
"id": str(state_count['_id']),
"date": state_count['date'],
"state": state_count["state"],
"ad_count": state_count["ad_count"],
"avg_age": state_count["avg_age"],
"email_count": state_count["email_count"],
"phone_count": state_count["phone_count"]
}
def city_count_helper(city_count) -> dict:
return {
"id": str(city_count['_id']),
"date": city_count['date'],
"city": city_count["city"],
"ad_count": city_count["ad_count"],
"avg_age": city_count["avg_age"],
"email_count": city_count["email_count"],
"phone_count": city_count["phone_count"]
}
```
#### File: ArtemisAPI/routes/city.py
```python
from fastapi import APIRouter, Body
from fastapi.encoders import jsonable_encoder
from datetime import date
from models.admin import ResponseModel
from models.city_request import CityCountBody
from models.city_response import CityAdResponse, CityEmailResponse, CityPhoneResponse, CityCountResponse
from controllers.city import query_city_counts, query_city_ads, query_city_emails, query_city_phones
router = APIRouter()
@router.post("/counts", response_description="Total data about city scrapes")
async def get_counts(city_count: CityCountBody = Body(...)) -> CityCountResponse:
city_count_dict = jsonable_encoder(city_count)
filter = city_count_dict.get('filter')
options = city_count_dict.get('options')
counts = await query_city_counts(filter, options)
return ResponseModel(counts, 'City count data retrieved successfully') \
if counts['totalResults'] > 0 \
else ResponseModel(
counts, "No results found"
)
@router.get("/ads", response_description="Get just total ads for city for date range")
async def get_ads(city: str, date_from: date, date_to: date) -> CityAdResponse:
ads = await query_city_ads(city, date_from, date_to)
return ResponseModel(ads, 'City ad data retrieved successfully') \
if ads["ads"] > 0 \
else ResponseModel(
ads, "No results found"
)
@router.get("/phones", response_description="Get just total phones for city for date range")
async def get_phones(city: str, date_from: date, date_to: date) -> CityPhoneResponse:
phones = await query_city_phones(city, date_from, date_to)
return ResponseModel(phones, 'City phone data retrieved successfully') \
if phones["phones"] > 0 \
else ResponseModel(
phones, "No results found"
)
@router.get("/emails", response_description="Get just total emails for city for date range")
async def get_emails(city: str, date_from: date, date_to: date) -> CityEmailResponse:
emails = await query_city_emails(city, date_from, date_to)
return ResponseModel(emails, 'City email data retrieved successfully') \
if emails["emails"] > 0 \
else ResponseModel(
emails, "No results found"
)
```
#### File: ArtemisAPI/routes/state.py
```python
from fastapi import APIRouter, Body
from fastapi.encoders import jsonable_encoder
from datetime import date
from models.admin import ResponseModel
from models.state_request import StateCountBody
from models.state_response import StateAdResponse, StateEmailResponse, StatePhoneResponse, StateCountResponse
from controllers.state import query_state_counts, query_state_ads, query_state_emails, query_state_phones
router = APIRouter()
@router.post("/counts", response_description="Total data about state scrapes")
async def get_counts(state_count: StateCountBody = Body(...)) -> StateCountResponse:
state_count_dict = jsonable_encoder(state_count)
filter = state_count_dict.get('filter')
options = state_count_dict.get('options')
counts = await query_state_counts(filter, options)
return ResponseModel(counts, 'State count data retrieved successfully') \
if counts['totalResults'] > 0 \
else ResponseModel(
counts, "No results found"
)
@router.get("/ads", response_description="Get just total ads for state for date range")
async def get_ads(state: str, date_from: date, date_to: date) -> StateAdResponse:
ads = await query_state_ads(state, date_from, date_to)
return ResponseModel(ads, 'State ad data retrieved successfully') \
if ads["ads"] > 0 \
else ResponseModel(
ads, "No results found"
)
@router.get("/phones", response_description="Get just total phones for state for date range")
async def get_phones(state: str, date_from: date, date_to: date) -> StatePhoneResponse:
phones = await query_state_phones(state, date_from, date_to)
return ResponseModel(phones, 'State ad data retrieved successfully') \
if phones["phones"] > 0 \
else ResponseModel(
phones, "No results found"
)
@router.get("/emails", response_description="Get just total emails for state for date range")
async def get_emails(state: str, date_from: date, date_to: date) -> StateEmailResponse:
emails = await query_state_emails(state, date_from, date_to)
return ResponseModel(emails, 'State ad data retrieved successfully') \
if emails["emails"] > 0 \
else ResponseModel(
emails, "No results found"
)
``` |
{
"source": "JMD110/huaweicloud-sdk-python-obs",
"score": 3
} |
#### File: huaweicloud-sdk-python-obs/examples/concurrent_copy_part_sample.py
```python
AK = '*** Provide your Access Key ***'
SK = '*** Provide your Secret Key ***'
server = 'https://your-endpoint'
bucketName = 'my-obs-bucket-demo'
sourceBucketName = bucketName
sourceObjectKey = 'my-obs-object-key-demo'
objectKey = sourceObjectKey + '-back'
sampleFilePath = '*** Provide your local file path ***'
import platform, os, threading, multiprocessing
IS_WINDOWS = platform.system() == 'Windows' or os.name == 'nt'
def createSampleFile(sampleFilePath):
if not os.path.exists(sampleFilePath):
_dir = os.path.dirname(sampleFilePath)
if not os.path.exists(_dir):
os.makedirs(_dir, mode=0o755)
import uuid
index = 1000000
with open(sampleFilePath, 'w') as f:
while index >= 0:
f.write(str(uuid.uuid1()) + '\n')
f.write(str(uuid.uuid4()) + '\n')
index -= 1
return sampleFilePath
from obs import *
def doCopyPart(partETags, bucketName, objectKey, partNumber, uploadId, copySource, copySourceRange):
if IS_WINDOWS:
global obsClient
else:
obsClient = ObsClient(access_key_id=AK, secret_access_key=SK, server=server)
resp = obsClient.copyPart(bucketName=bucketName, objectKey=objectKey, partNumber=partNumber, uploadId=uploadId, copySource=copySource, copySourceRange=copySourceRange)
if resp.status < 300:
partETags[partNumber] = resp.body.etag
print('Part#' + str(partNumber) + 'done\n')
else:
print('\tPart#' + str(partNumber) + ' failed\n')
if __name__ == '__main__':
# Constructs a obs client instance with your account for accessing OBS
obsClient = ObsClient(access_key_id=AK, secret_access_key=SK, server=server)
# Create bucket
print('Create a new bucket for demo\n')
resp = obsClient.createBucket(bucketName)
if resp.status >= 300:
raise Exception('Create Bucket failed')
# # Upload an object to your source bucket
print('Uploading a new object to OBS from a file\n')
resp = obsClient.putFile(sourceBucketName, sourceObjectKey, sampleFilePath)
if resp.status >= 300:
raise Exception('putFile failed')
# Claim a upload id firstly
resp = obsClient.initiateMultipartUpload(bucketName, objectKey)
if resp.status >= 300:
raise Exception('initiateMultipartUpload failed')
uploadId = resp.body.uploadId
print('Claiming a new upload id ' + uploadId + '\n')
# 5MB
partSize = 5 * 1024 * 1024
resp = obsClient.getObjectMetadata(sourceBucketName, sourceObjectKey)
if resp.status >= 300:
raise Exception('getObjectMetadata failed')
header = dict(resp.header)
objectSize = int(header.get('content-length'))
partCount = int(objectSize / partSize) if (objectSize % partSize == 0) else int(objectSize / partSize) + 1
if partCount > 10000:
raise Exception('Total parts count should not exceed 10000')
print('Total parts count ' + str(partCount) + '\n')
# Upload multiparts by copy mode
print('Begin to upload multiparts to OBS by copy mode \n')
proc = threading.Thread if IS_WINDOWS else multiprocessing.Process
partETags = dict() if IS_WINDOWS else multiprocessing.Manager().dict()
processes = []
for i in range(partCount):
rangeStart = i * partSize
rangeEnd = objectSize - 1 if (i + 1 == partCount) else rangeStart + partSize - 1
p = proc(target=doCopyPart, args=(partETags, bucketName, objectKey, i+1, uploadId, sourceBucketName + '/' + sourceObjectKey, str(rangeStart) + '-' + str(rangeEnd)))
p.daemon = True
processes.append(p)
for p in processes:
p.start()
for p in processes:
p.join()
if len(partETags) != partCount:
raise Exception('copyParts fail due to some parts are not finished yet')
# View all parts uploaded recently
print('Listing all parts......')
resp = obsClient.listParts(bucketName, objectKey, uploadId)
if resp.status < 300:
for part in resp.body.parts:
print('\tPart#' + str(part.partNumber) + ', ETag=' + part.etag)
print('\n')
else:
raise Exception('listParts failed')
# Complete to upload multiparts
partETags = sorted(partETags.items(), key=lambda d: d[0])
parts = []
for key, value in partETags:
parts.append(CompletePart(partNum=key, etag=value))
print('Completing to upload multiparts\n')
resp = obsClient.completeMultipartUpload(bucketName, objectKey, uploadId, CompleteMultipartUploadRequest(parts))
if resp.status < 300:
print('Succeed to complete multiparts into an object named ' + objectKey + '\n')
else:
print('errorCode:', resp.errorCode)
print('errorMessage:', resp.errorMessage)
raise Exception('completeMultipartUpload failed')
```
#### File: huaweicloud-sdk-python-obs/examples/concurrent_upload_part_sample.py
```python
AK = '*** Provide your Access Key ***'
SK = '*** Provide your Secret Key ***'
server = 'https://your-endpoint'
bucketName = 'my-obs-bucket-demo'
objectKey = 'my-obs-object-key-demo'
sampleFilePath = '*** Provide your local file path ***'
import platform, os, threading, multiprocessing
IS_WINDOWS = platform.system() == 'Windows' or os.name == 'nt'
def createSampleFile(sampleFilePath):
if not os.path.exists(sampleFilePath):
_dir = os.path.dirname(sampleFilePath)
if not os.path.exists(_dir):
os.makedirs(_dir, mode=0o755)
import uuid
index = 1000000
with open(sampleFilePath, 'w') as f:
while index >= 0:
f.write(str(uuid.uuid1()) + '\n')
f.write(str(uuid.uuid4()) + '\n')
index -= 1
return sampleFilePath
from obs import *
def doUploadPart(partETags, bucketName, objectKey, partNumber, uploadId, filePath, partSize, offset):
if IS_WINDOWS:
global obsClient
else:
obsClient = ObsClient(access_key_id=AK, secret_access_key=SK, server=server)
resp = obsClient.uploadPart(bucketName, objectKey, partNumber, uploadId, content=filePath, isFile=True, partSize=partSize, offset=offset)
if resp.status < 300:
partETags[partNumber] = resp.body.etag
print('Part#' + str(partNumber) + 'done\n')
else:
print('\tPart#' + str(partNumber) + ' failed\n')
if __name__ == '__main__':
# Constructs a obs client instance with your account for accessing OBS
obsClient = ObsClient(access_key_id=AK, secret_access_key=SK, server=server)
# Create bucket
print('Create a new bucket for demo\n')
resp = obsClient.createBucket(bucketName)
if resp.status >= 300:
raise Exception('Create Bucket failed')
# Claim a upload id firstly
resp = obsClient.initiateMultipartUpload(bucketName, objectKey)
if resp.status >= 300:
raise Exception('initiateMultipartUpload failed')
uploadId = resp.body.uploadId
print('Claiming a new upload id ' + uploadId + '\n')
# 5MB
partSize = 5 * 1024 * 1024
#createSampleFile(sampleFilePath)
fileLength = os.path.getsize(sampleFilePath)
partCount = int(fileLength / partSize) if (fileLength % partSize == 0) else int(fileLength / partSize) + 1
if partCount > 10000:
raise Exception('Total parts count should not exceed 10000')
print('Total parts count ' + str(partCount) + '\n')
# Upload multiparts to your bucket
print('Begin to upload multiparts to OBS from a file\n')
proc = threading.Thread if IS_WINDOWS else multiprocessing.Process
partETags = dict() if IS_WINDOWS else multiprocessing.Manager().dict()
processes = []
for i in range(partCount):
offset = i * partSize
currPartSize = (fileLength - offset) if i + 1 == partCount else partSize
p = proc(target=doUploadPart, args=(partETags, bucketName, objectKey, i + 1, uploadId, sampleFilePath, currPartSize, offset))
p.daemon = True
processes.append(p)
for p in processes:
p.start()
for p in processes:
p.join()
if len(partETags) != partCount:
raise Exception('Upload multiparts fail due to some parts are not finished yet')
# View all parts uploaded recently
print('Listing all parts......')
resp = obsClient.listParts(bucketName, objectKey, uploadId)
if resp.status < 300:
for part in resp.body.parts:
print('\tPart#' + str(part.partNumber) + ', ETag=' + part.etag)
print('\n')
else:
raise Exception('listParts failed')
# Complete to upload multiparts
partETags = sorted(partETags.items(), key=lambda d: d[0])
parts = []
for key, value in partETags:
parts.append(CompletePart(partNum=key, etag=value))
print('Completing to upload multiparts\n')
resp = obsClient.completeMultipartUpload(bucketName, objectKey, uploadId, CompleteMultipartUploadRequest(parts))
if resp.status < 300:
print('Succeed to complete multiparts into an object named ' + objectKey + '\n')
else:
print('errorCode:', resp.errorCode)
print('errorMessage:', resp.errorMessage)
raise Exception('completeMultipartUpload failed')
```
#### File: huaweicloud-sdk-python-obs/examples/obs_python_sample.py
```python
from obs import *
AK = '*** Provide your Access Key ***'
SK = '*** Provide your Secret Key ***'
server = 'https://your-endpoint'
bucketName = 'my-obs-bucket-demo'
# create ObsClient instance
obsClient = ObsClient(access_key_id=AK, secret_access_key=SK, server=server)
bucketClient = obsClient.bucketClient(bucketName)
# init log
def initLog():
obsClient.initLog(LogConf('../log.conf'), 'obsclient')
# create bucket
def CreateBucket():
headers = CreateBucketHeader(aclControl='public-read', storageClass=StorageClass.WARM)
resp = bucketClient.createBucket(header=headers)
print('common msg:status:', resp.status, ',errorCode:', resp.errorCode, ',errorMessage:', resp.errorMessage, ',resHeader:', resp.header)
# delete bucket
def DeleteBucket():
resp = bucketClient.deleteBucket()
print('common msg:status:', resp.status, ',errorCode:', resp.errorCode, ',errorMessage:', resp.errorMessage, ',resHeader:', resp.header)
# list buckets
def ListBuckets():
resp = obsClient.listBuckets()
print('common msg:status:', resp.status, ',errorCode:', resp.errorCode, ',errorMessage:', resp.errorMessage)
listBucket = resp.body
if listBucket:
print('owner_id:', listBucket.owner.owner_id)
i = 0
for item in listBucket.buckets:
print('buckets[', i, ']:')
print('bucket_name:', item.name, ',create_date:', item.create_date)
i += 1
# head bucket
def HeadBucket():
resp = bucketClient.headBucket()
if resp.status < 300:
print('bucket exists')
elif resp.status == 404:
print('bucket does not exist')
else:
print('common msg:status:', resp.status, ',errorCode:', resp.errorCode, ',errorMessage:', resp.errorMessage, ',resHeader:', resp.header)
# get bucket metadata
def GetBucketMetadata():
resp = bucketClient.getBucketMetadata(origin='www.example.com', requestHeaders='header1')
print('common msg:status:', resp.status, ',errorCode:', resp.errorCode, ',errorMessage:', resp.errorMessage)
if resp.body:
print('storageClass:', resp.body.storageClass)
print('accessContorlAllowOrigin:', resp.body.accessContorlAllowOrigin)
print('accessContorlMaxAge:', resp.body.accessContorlMaxAge)
print('accessContorlExposeHeaders:', resp.body.accessContorlExposeHeaders)
print('accessContorlAllowMethods:', resp.body.accessContorlAllowMethods)
print('accessContorlAllowHeaders:', resp.body.accessContorlAllowHeaders)
# set bucket quota
def SetBucketQuota():
resp = bucketClient.setBucketQuota(quota=1048576 * 600)
print('common msg:status:', resp.status, ',errorCode:', resp.errorCode, ',errorMessage:', resp.errorMessage)
# get bucket quota
def GetBucketQuota():
resp = bucketClient.getBucketQuota()
print('common msg:status:', resp.status, ',errorCode:', resp.errorCode, ',errorMessage:', resp.errorMessage)
if resp.body:
print('quota:', resp.body.quota)
# set bucket storagePolicy
def SetBucketStoragePolicy():
resp = bucketClient.setBucketStoragePolicy(storageClass='STANDARD')
print('common msg:status:', resp.status, ',errorCode:', resp.errorCode, ',errorMessage:', resp.errorMessage)
# get bucket storagePolicy
def GetBucketStoragePolicy():
resp = bucketClient.getBucketStoragePolicy()
print('common msg:status:', resp.status, ',errorCode:', resp.errorCode, ',errorMessage:', resp.errorMessage)
if resp.body:
print('storageClass:', resp.body.storageClass)
# get bucket storageinfo
def GetBucketStorageInfo():
resp = bucketClient.getBucketStorageInfo()
print('common msg:status:', resp.status, ',errorCode:', resp.errorCode, ',errorMessage:', resp.errorMessage)
if resp.body:
print('size:', resp.body.size, ',objectNumber:', resp.body.objectNumber)
# set bucket acl
def SetBucketAcl():
Lowner = Owner(owner_id='ownerid')
Lgrantee1 = Grantee(grantee_id='userid', group=None)
Lgrantee2 = Grantee(group=Group.LOG_DELIVERY)
Lgrant1 = Grant(grantee=Lgrantee1, permission=Permission.READ)
Lgrant2 = Grant(grantee=Lgrantee2, permission=Permission.READ_ACP)
Lgrant3 = Grant(grantee=Lgrantee2, permission=Permission.WRITE)
Lgrants = [Lgrant1, Lgrant2, Lgrant3]
Lacl = ACL(owner=Lowner, grants=Lgrants)
resp = bucketClient.setBucketAcl(acl=Lacl)
# resp = obsClient.setBucketAcl(bucketName=bucketName, aclControl='public-read-write')
print('common msg:status:', resp.status, ',errorCode:', resp.errorCode, ',errorMessage:', resp.errorMessage)
# get bucket acl
def GetBucketAcl():
resp = bucketClient.getBucketAcl()
print('common msg:status:', resp.status, ',errorCode:', resp.errorCode, ',errorMessage:', resp.errorMessage)
if resp.body:
print('owner_id:', resp.body.owner.owner_id)
i = 0
for grant in resp.body.grants:
print('grants[', i, ']:')
print('permission:', grant.permission)
print('grantee_id:', grant.grantee.grantee_id, ',group:', grant.grantee.group)
i += 1
# set bucket policy
def SetBucketPolicy():
LpolicyJSON = 'your policy'
resp = bucketClient.setBucketPolicy(policyJSON=LpolicyJSON)
print('common msg:status:', resp.status, ',errorCode:', resp.errorCode, ',errorMessage:', resp.errorMessage)
# get bucket policy
def GetBucketPolicy():
resp = bucketClient.getBucketPolicy()
print('common msg:status:', resp.status, ',errorCode:', resp.errorCode, ',errorMessage:', resp.errorMessage)
if resp.body:
print('policyJSON:', resp.body)
# delete bucket policy
def DeleteBucketPolicy():
resp = bucketClient.deleteBucketPolicy()
print('common msg:status:', resp.status, ',errorCode:', resp.errorCode, ',errorMessage:', resp.errorMessage)
# set bucket versioning configuration
def SetBucketVersioning():
resp = bucketClient.setBucketVersioning(status='Enabled')
print('common msg:status:', resp.status, ',errorCode:', resp.errorCode, ',errorMessage:', resp.errorMessage)
# get bucket versioning configuration
def GetBucketVersioning():
resp = bucketClient.getBucketVersioning()
print('common msg:status:', resp.status, ',errorCode:', resp.errorCode, ',errorMessage:', resp.errorMessage)
print('status:', resp.body)
# list versions
def ListVersions():
lversion = Versions(prefix=None, key_marker=None, max_keys=10, delimiter=None, version_id_marker=None)
resp = bucketClient.listVersions(version=lversion)
print('common msg:status:', resp.status, ',errorCode:', resp.errorCode, ',errorMessage:', resp.errorMessage)
if resp.body:
print('name:', resp.body.head.name, ',prefix:', resp.body.head.prefix, ',keyMarker:', resp.body.head.keyMarker, ',maxKeys:', resp.body.head.maxKeys)
print('nextKeyMarker:', resp.body.head.nextKeyMarker, ',nextVersionIdMarker:', resp.body.head.nextVersionIdMarker, ',versionIdMarker:', resp.body.head.versionIdMarker, ',isTruncated:', resp.body.head.isTruncated)
i = 0
for version in resp.body.versions:
print('versions[', i, ']:')
print('owner_id:', version.owner.owner_id)
print('key:', version.key)
print('lastModified:', version.lastModified, ',versionId:', version.versionId, ',etag:', version.etag, ',storageClass:', version.storageClass, ',isLatest:', version.isLatest, ',size:', version.size)
i += 1
i = 0
for marker in resp.body.markers:
print('markers[', i, ']:')
print('owner_id:', marker.owner.owner_id)
print('key:', marker.key)
print('key:', marker.key, ',versionId:', marker.versionId, ',isLatest:', marker.isLatest, ',lastModified:', marker.lastModified)
i += 1
i = 0
for prefix in resp.body.commonPrefixs:
print('commonPrefixs[', i, ']')
print('prefix:', prefix.prefix)
i += 1
# list objects
def ListObjects():
resp = bucketClient.listObjects(prefix=None, marker=None, max_keys=10, delimiter=None)
print('common msg:status:', resp.status, ',errorCode:', resp.errorCode, ',errorMessage:', resp.errorMessage)
if resp.body:
print('name:', resp.body.name, ',prefix:', resp.body.prefix, ',marker:', resp.body.marker, ',max_keys:', resp.body.max_keys)
print('delimiter:', resp.body.delimiter, ',is_truncated:', resp.body.is_truncated, ',next_marker:', resp.body.next_marker)
i = 0
for content in resp.body.contents:
print('contents[', i, ']:')
print('owner_id:', content.owner.owner_id)
print('key:', content.key, ',lastModified:', content.lastModified, ',etag:', content.etag, ',size:', content.size, ',storageClass:', content.storageClass)
i += 1
i = 0
for prefix in resp.body.commonPrefixs:
print('commonprefixs[', i, ']:')
print('prefix:', prefix.prefix)
i += 1
# set bucket lifecycle configuration
def SetBucketLifecycle():
Lexpiration = Expiration(date=DateTime(2030, 6, 10), days=None)
noncurrentVersionExpiration = NoncurrentVersionExpiration(noncurrentDays=60)
Lrule = Rule(id='101', prefix='test', status='Enabled', expiration=Lexpiration, noncurrentVersionExpiration=noncurrentVersionExpiration)
Lrules = [Lrule]
Llifecycle = Lifecycle(rule=Lrules)
resp = bucketClient.setBucketLifecycle(lifecycle=Llifecycle)
print('common msg:status:', resp.status, ',errorCode:', resp.errorCode, ',errorMessage:', resp.errorMessage)
# get bucket lifecycle configuration
def GetBucketLifecycle():
resp = bucketClient.getBucketLifecycle()
print('common msg:status:', resp.status, ',errorCode:', resp.errorCode, ',errorMessage:', resp.errorMessage)
if resp.body:
i = 0
for rule in resp.body.lifecycleConfig.rule:
print('rule[', i, ']:')
print('id:', rule.id, ',prefix:', rule.prefix, ',status:', rule.status)
print('expiration:', rule.expiration)
print('noncurrentVersionExpiration:', rule.noncurrentVersionExpiration)
i += 1
# delete bucket lifecycle configuration
def DeleteBucketLifecycle():
resp = bucketClient.deleteBucketLifecycle()
print('common msg:status:', resp.status, ',errorCode:', resp.errorCode, ',errorMessage:', resp.errorMessage)
# set bucket website configuration
def SetBucketWebsite():
Lweb = RedirectAllRequestTo(hostName='www.xxx.com', protocol='http')
Lindex = IndexDocument(suffix='index.html')
Lerr = ErrorDocument(key='error.html')
Lcondition = Condition(keyPrefixEquals=None, httpErrorCodeReturnedEquals=404)
Lredirect = Redirect(protocol='http', hostName=None, replaceKeyPrefixWith=None, replaceKeyWith='NotFound.html',
httpRedirectCode=None)
Lrout = RoutingRule(condition=Lcondition, redirect=Lredirect)
Lrouts = [Lrout, Lrout]
Lwebsite = WebsiteConfiguration(redirectAllRequestTo=None, indexDocument=Lindex, errorDocument=Lerr,
routingRules=Lrouts)
resp = bucketClient.setBucketWebsite(website=Lwebsite)
print('common msg:status:', resp.status, ',errorCode:', resp.errorCode, ',errorMessage:', resp.errorMessage)
# get bucket website configuration
def GetBucketWebsite():
resp = bucketClient.getBucketWebsite()
print('common msg:status:', resp.status, ',errorCode:', resp.errorCode, ',errorMessage:', resp.errorMessage)
if resp.body:
if resp.body.redirectAllRequestTo:
print('redirectAllRequestTo.hostName:', resp.body.redirectAllRequestTo.hostName, ',redirectAllRequestTo.Protocol:', resp.body.redirectAllRequestTo.protocol)
if resp.body.indexDocument:
print('indexDocument.suffix:', resp.body.indexDocument.suffix)
if resp.body.errorDocument:
print('errorDocument.key:', resp.body.errorDocument.key)
if resp.body.routingRules:
i = 0
for rout in resp.body.routingRules:
print('routingRule[', i, ']:')
i += 1
print('condition.keyPrefixEquals:', rout.condition.keyPrefixEquals, ',condition.httpErrorCodeReturnedEquals:', rout.condition.httpErrorCodeReturnedEquals)
print('redirect.protocol:', rout.redirect.protocol, ',redirect.hostName:', rout.redirect.hostName, ',redirect.replaceKeyPrefixWith:', rout.redirect.replaceKeyPrefixWith, ',redirect.replaceKeyWith:', rout.redirect.replaceKeyWith, ',redirect.httpRedirectCode:', rout.redirect.httpRedirectCode)
# delete bucket website configuration
def DeleteBucketWebsite():
resp = bucketClient.deleteBucketWebsite()
print('common msg:status:', resp.status, ',errorCode:', resp.errorCode, ',errorMessage:', resp.errorMessage)
# set bucket logging configuration
def SetBucketLogging():
Lgrantee = Grantee(grantee_id='userid', group=None)
Lgrantee1 = Grantee(grantee_id=None, group=Group.ALL_USERS)
Lgrant1 = Grant(grantee=Lgrantee, permission=Permission.WRITE)
Lgrant2 = Grant(grantee=Lgrantee1, permission=Permission.READ)
LgrantList = [Lgrant1, Lgrant2]
Llog = Logging(targetBucket='bucket003', targetPrefix='log_1', targetGrants=LgrantList, agency='your agency')
resp = bucketClient.setBucketLogging(logstatus=Llog)
print('common msg:status:', resp.status, ',errorCode:', resp.errorCode, ',errorMessage:', resp.errorMessage)
# get bucket logging configuration
def GetBucketLogging():
resp = bucketClient.getBucketLogging()
print('common msg:status:', resp.status, ',errorCode:', resp.errorCode, ',errorMessage:', resp.errorMessage)
if resp.body:
print('targetBucket:', resp.body.targetBucket, 'targetPrefix:', resp.body.targetPrefix)
i = 0
for grant in resp.body.targetGrants:
print('targetGrant[', i, ']:')
i += 1
print('permission:', grant.permission, ',grantee.grantee_id:', grant.grantee.grantee_id, ',grantee.group:', grant.grantee.group)
# get bucket location
def GetBucketLocation():
resp = bucketClient.getBucketLocation()
print('common msg:status:', resp.status, ',errorCode:', resp.errorCode, ',errorMessage:', resp.errorMessage)
if resp.body:
print('location:', resp.body.location)
# set bucket tagging
def SetBucketTagging():
tagInfo = TagInfo()
tagInfo.addTag('testKey1', 'testValue1').addTag('testKey2','testValue2')
resp = bucketClient.setBucketTagging(tagInfo=tagInfo)
print('common msg:status:', resp.status, ',errorCode:', resp.errorCode, ',errorMessage:', resp.errorMessage)
# delete bucket tagging
def DeleteBucketTagging():
resp = bucketClient.deleteBucketTagging()
print('common msg:status:', resp.status, ',errorCode:', resp.errorCode, ',errorMessage:', resp.errorMessage)
# get bucket tagging
def GetBucketTagging():
resp = bucketClient.getBucketTagging()
print('common msg:status:', resp.status, ',errorCode:', resp.errorCode, ',errorMessage:', resp.errorMessage)
for tag in resp.body.tagSet:
print('{0}:{1}'.format(tag.key, tag.value))
# set bucket cors
def SetBucketCors():
cors1 = CorsRule(id='101', allowedMethod=['PUT', 'POST', 'GET', 'DELETE'],
allowedOrigin=['www.xxx.com', 'www.x.com'], allowedHeader=['header-1', 'header-2'],
maxAgeSecond=100, exposeHeader=['head1'])
cors2 = CorsRule(id='102', allowedMethod=['PUT', 'POST', 'GET', 'DELETE'],
allowedOrigin=['www.xxx.com', 'www.x.com'], allowedHeader=['header-1', 'header-2'],
maxAgeSecond=100, exposeHeader=['head1'])
corsList = [cors1, cors2]
resp = bucketClient.setBucketCors(corsList)
print('common msg:status:', resp.status, ',errorCode:', resp.errorCode, ',errorMessage:', resp.errorMessage)
# get bucket cors
def GetBucketCors():
resp = bucketClient.getBucketCors()
print('common msg:status:', resp.status, ',errorCode:', resp.errorCode, ',errorMessage:', resp.errorMessage)
if resp.body is not None:
index = 1
for rule in resp.body:
print('corsRule [' + str(index) + ']')
print('id:', rule.id)
print('allowedMethod', rule.allowedMethod)
print('allowedOrigin', rule.allowedOrigin)
print('allowedHeader', rule.allowedHeader)
print('maxAgeSecond', rule.maxAgeSecond)
print('exposeHeader', rule.exposeHeader)
index += 1
# delete bucket cors
def DeleteBucketCors():
resp = bucketClient.deleteBucketCors()
print('common msg:status:', resp.status, ',errorCode:', resp.errorCode, ',errorMessage:', resp.errorMessage)
# set bucket notification
def SetBucketNotification():
fr1 = FilterRule(name='prefix', value='smn')
fr2 = FilterRule(name='suffix', value='.jpg')
topicConfiguration = TopicConfiguration(id='001', topic='urn:smn:region3:35667523534:topic1', events=[EventType.OBJECT_CREATED_ALL], filterRules=[fr1, fr2])
resp = bucketClient.setBucketNotification(Notification(topicConfigurations=[topicConfiguration]))
print('common msg:status:', resp.status, ',errorCode:', resp.errorCode, ',errorMessage:', resp.errorMessage)
# get bucket notification
def GetBucketNotification():
resp = bucketClient.getBucketNotification()
print('common msg:status:', resp.status, ',errorCode:', resp.errorCode, ',errorMessage:', resp.errorMessage)
if resp.body is not None:
for topicConfiguration in resp.body.topicConfigurations:
print('id:', topicConfiguration.id)
print('topic:', topicConfiguration.topic)
print('events:', topicConfiguration.events)
index = 1
for rule in topicConfiguration.filterRules:
print('rule [' + str(index) + ']')
print('name:', rule.name)
print('value:', rule.value)
# list multipart uploads
def ListMultipartUploads():
Lmultipart = ListMultipartUploadsRequest(delimiter=None, prefix=None, max_uploads=10, key_marker=None,
upload_id_marker=None)
resp = bucketClient.listMultipartUploads(multipart=Lmultipart)
print('common msg:status:', resp.status, ',errorCode:', resp.errorCode, ',errorMessage:', resp.errorMessage)
if resp.body:
print('bucket:', resp.body.bucket, ',keyMarker:', resp.body.keyMarker, 'uploadIdMarker:', resp.body.uploadIdMarker, ',nextKeyMarker:', resp.body.nextKeyMarker, 'delimiter:', resp.body.delimiter)
print('nextUploadIdMarker:', resp.body.nextUploadIdMarker, ',maxUploads:', resp.body.maxUploads, 'isTruncated:', resp.body.isTruncated, ',prefix:', resp.body.prefix)
if resp.body.upload:
i = 0
for upload in resp.body.upload:
print('upload[', i, ']:')
i += 1
print('key:', upload.key, ',uploadId:', upload.uploadId, ',storageClass:', upload.storageClass, ',initiated:', upload.initiated)
if upload.owner:
print('owner.owner_id:', upload.owner.owner_id)
if upload.initiator:
print('initiator.id:', upload.initiator.id, 'initiator.name:', upload.initiator.name)
if resp.body.commonPrefixs:
i = 0
for commonPrefix in resp.body.commonPrefixs:
print('commonPrefix[', i, ']:')
i += 1
print('prefix:', commonPrefix.prefix)
# set object acl
def SetObjectAcl():
Lowner = Owner(owner_id='ownerid')
Lgrantee = Grantee(grantee_id='userid', group=None)
Lgrant = Grant(grantee=Lgrantee, permission=Permission.READ)
Lgrants = [Lgrant]
Lacl = ACL(owner=Lowner, grants=Lgrants)
resp = bucketClient.setObjectAcl(objectKey='test.txt', acl=None, versionId=None,
aclControl='public-read-write')
print('common msg:status:', resp.status, ',errorCode:', resp.errorCode, ',errorMessage:', resp.errorMessage)
# get object acl
def GetObjectAcl():
resp = bucketClient.getObjectAcl(objectKey='test.txt', versionId=None)
print('common msg:status:', resp.status, ',errorCode:', resp.errorCode, ',errorMessage:', resp.errorMessage)
if resp.body:
print('owner_id:', resp.body.owner.owner_id)
i = 0
for grant in resp.body.grants:
print('Grant[', i, ']:')
i += 1
print('permission:', grant.permission)
print('grantee_id:', grant.grantee.grantee_id, ',grantee.group:', grant.grantee.group)
# delete object
def DeleteObject():
resp = bucketClient.deleteObject(objectKey='test.txt', versionId=None)
print('common msg:status:', resp.status, ',errorCode:', resp.errorCode, ',errorMessage:', resp.errorMessage)
# delete objects
def DeleteObjects():
Lobject1 = Object(key='test.xml', versionId=None)
Lobject2 = Object(key='test.txt', versionId=None)
Lobject3 = Object(key='test', versionId=None)
Lobjects = [Lobject1, Lobject2, Lobject3]
Lreq = DeleteObjectsRequest(quiet=False, objects=Lobjects)
resp = bucketClient.deleteObjects(deleteObjectsRequest=Lreq)
print('common msg:status:', resp.status, ',errorCode:', resp.errorCode, ',errorMessage:', resp.errorMessage)
if resp.body:
if resp.body.deleted:
i = 0
for delete in resp.body.deleted:
print('deleted[', i, ']:')
i += 1
print('key:', delete.key, ',deleteMarker:', delete.deleteMarker, ',deleteMarkerVersionId:', delete.deleteMarkerVersionId)
if resp.body.error:
i = 0
for err in resp.body.error:
print('error[', i, ']:')
print('key:', err.key, ',code:', err.code, ',message:', err.message)
# abort multipart uplod
def AbortMultipartUpload():
resp = bucketClient.abortMultipartUpload(objectKey='test.zip', uploadId='uploadid')
print('common msg:status:', resp.status, ',errorCode:', resp.errorCode, ',errorMessage:', resp.errorMessage)
# initiate multipart upload
def InitiateMultipartUpload():
resp = bucketClient.initiateMultipartUpload(objectKey='test.zip', websiteRedirectLocation=None)
print('common msg:status:', resp.status, ',errorCode:', resp.errorCode, ',errorMessage:', resp.errorMessage)
if resp.body:
print('bucketName:', resp.body.bucketName, ',objectKey:', resp.body.objectKey, ',uploadId:', resp.body.uploadId)
# complete multipart upload
def CompleteMultipartUpload():
Lpart1 = CompletePart(partNum=1, etag='etagvalue1')
Lpart2 = CompletePart(partNum=2, etag='etagvalue2')
Lparts = []
Lparts.append(Lpart1)
Lparts.append(Lpart2)
LcompleteMultipartUploadRequest = CompleteMultipartUploadRequest(parts=Lparts)
resp = bucketClient.completeMultipartUpload(objectKey='test.zip', uploadId='uploadid',
completeMultipartUploadRequest=LcompleteMultipartUploadRequest)
print('common msg:status:', resp.status, ',errorCode:', resp.errorCode, ',errorMessage:', resp.errorMessage)
if resp.body:
print('location:', resp.body.location, ',bucket:', resp.body.bucket, ',key:', resp.body.key, ',etag:', resp.body.etag)
# upload part
def UploadPart():
resp = bucketClient.uploadPart(objectKey='test.zip', partNumber=1, uploadId='uploadid',
content='/temp/bigfile.zip', isFile=True, partSize=100 * 1024 * 1024, offset=0)
print('common msg:status:', resp.status, ',errorCode:', resp.errorCode, ',errorMessage:', resp.errorMessage, ',header:', resp.header)
etag1 = dict(resp.header).get('etag')
print(etag1)
resp = bucketClient.uploadPart(objectKey='test.zip', partNumber=2, uploadId='uploadid',
content='/temp/bigfile.zip', isFile=True, partSize=200 * 1024 * 1024,
offset=100 * 1024 * 1024)
print('common msg:status:', resp.status, ',errorCode:', resp.errorCode, ',errorMessage:', resp.errorMessage, ',header:', resp.header)
etag2 = dict(resp.header).get('etag')
print(etag2)
# copy part
def CopyPart():
resp = bucketClient.copyPart(objectKey='test.txt', partNumber=1, uploadId='uploadid',
copySource='bucket002/test.txt')
print('common msg:status:', resp.status, ',errorCode:', resp.errorCode, ',errorMessage:', resp.errorMessage)
if resp.body:
print('lastModified:', resp.body.lastModified, ',etag:', resp.body.etag)
# list parts
def ListParts():
resp = bucketClient.listParts(objectKey='test.zip', uploadId='uploadid', maxParts=None,
partNumberMarker=None)
print('common msg:status:', resp.status, ',errorCode:', resp.errorCode, ',errorMessage:', resp.errorMessage)
if resp.body:
print('bucketName:', resp.body.bucketName, ',objectKey:', resp.body.objectKey, ',uploadId:', resp.body.uploadId, ',storageClass:', resp.body.storageClass,)
print('partNumbermarker:', resp.body.partNumbermarker, ',nextPartNumberMarker:', resp.body.nextPartNumberMarker, ',maxParts:', resp.body.maxParts, ',isTruncated:', resp.body.isTruncated,)
if resp.body.initiator:
print('initiator.name:', resp.body.initiator.name, ',initiator.id:', resp.body.initiator.id)
if resp.body.parts:
i = 0
for part in resp.body.parts:
print('part[', i, ']:')
i += 1
print('partNumber:', part.partNumber, ',lastModified:', part.lastModified, ',etag:', part.etag, ',size:', part.size)
# restore object
def RestoreObject():
resp = bucketClient.restoreObject(objectKey='test.txt', days=1, versionId=None, tier=RestoreTier.EXPEDITED)
print('common msg:status:', resp.status, ',errorCode:', resp.errorCode, ',errorMessage:', resp.errorMessage)
# get object metadata
def GetObjectMetadata():
resp = bucketClient.getObjectMetadata(objectKey='test.txt', versionId=None)
print('common msg:status:', resp.status, ',errorCode:', resp.errorCode, ',errorMessage:', resp.errorMessage)
print('etag:', resp.body.etag)
print('lastModified:', resp.body.lastModified)
print('contentType:', resp.body.contentType)
print('contentLength:', resp.body.contentLength)
# put content
def PutContent():
Lheaders = PutObjectHeader(md5=None, acl='private', location=None, contentType='text/plain')
Lmetadata = {'key': 'value'}
resp = bucketClient.putContent(objectKey='test.txt', content='msg content to put',
metadata=Lmetadata, headers=Lheaders)
print('common msg:status:', resp.status, ',errorCode:', resp.errorCode, ',errorMessage:', resp.errorMessage)
print(resp.header)
def AppendObject():
Lheaders = AppendObjectHeader(md5=None, acl='private', location=None, contentType=None)
Lmetadata = {'key': 'value'}
content = AppendObjectContent()
content.content = 'msg content to put'
resp = bucketClient.appendObject(objectKey='test.txt', content=content,
metadata=Lmetadata, headers=Lheaders)
print('common msg:status:', resp.status, ',errorCode:', resp.errorCode, ',errorMessage:', resp.errorMessage)
print(resp.body)
content.position = resp.body.nextPosition
resp = bucketClient.appendObject(objectKey='test.txt', content=content,
metadata=Lmetadata, headers=Lheaders)
print('common msg:status:', resp.status, ',errorCode:', resp.errorCode, ',errorMessage:', resp.errorMessage)
# put file
def PutFile():
Lheaders = PutObjectHeader(md5=None, acl='private', location=None, contentType='text/plain')
Lmetadata = {'key': 'value'}
file_path = '/temp/test.txt'
resp = bucketClient.putFile(objectKey='test.txt', file_path=file_path,
metadata=Lmetadata, headers=Lheaders)
if isinstance(resp, list):
for k, v in resp:
print('objectKey', k, 'common msg:status:', v.status, ',errorCode:', v.errorCode, ',errorMessage:', v.errorMessage)
else:
print('common msg:status:', resp.status, ',errorCode:', resp.errorCode, ',errorMessage:', resp.errorMessage)
# copy object
def CopyObject():
Lheader = CopyObjectHeader(acl=None, directive=None, if_match=None, if_none_match=None,
if_modified_since=DateTime(2017,6,6), if_unmodified_since=None,
location=None)
Lmetadata = {'key': 'value'}
resp = bucketClient.copyObject(sourceBucketName=bucketName, sourceObjectKey='test.txt',
destObjectKey='test-back.txt', metadata=Lmetadata, headers=Lheader)
print('common msg:status:', resp.status, ',errorCode:', resp.errorCode, ',errorMessage:', resp.errorMessage)
if resp.body:
print('lastModified:', resp.body.lastModified, ',etag:', resp.body.etag)
# get object
def GetObject():
LobjectRequest = GetObjectRequest(content_type='text/plain', content_language=None, expires=None,
cache_control=None, content_disposition=None, content_encoding=None,
versionId=None)
Lheaders = GetObjectHeader(range='0-10', if_modified_since=None, if_unmodified_since=None, if_match=None,
if_none_match=None)
loadStreamInMemory = False
resp = bucketClient.getObject(objectKey='test.txt', downloadPath='/temp/test',
getObjectRequest=LobjectRequest, headers=Lheaders, loadStreamInMemory=loadStreamInMemory)
print('common msg:status:', resp.status, ',errorCode:', resp.errorCode, ',errorMessage:', resp.errorMessage, ',header:', resp.header)
if loadStreamInMemory:
print(resp.body.buffer)
print(resp.body.size)
elif resp.body.response:
response = resp.body.response
chunk_size = 65536
if response is not None:
while True:
chunk = response.read(chunk_size)
if not chunk:
break
print(chunk)
response.close()
else:
print(resp.body.url)
if __name__ == '__main__':
# initLog()
#=========================================================
# bucket operations
# =========================================================
# CreateBucket()
# DeleteBucket()
ListBuckets()
# HeadBucket()
# GetBucketMetadata()
# SetBucketQuota()
# GetBucketQuota()
# SetBucketStoragePolicy()
# GetBucketStoragePolicy()
# GetBucketStorageInfo()
# SetBucketAcl()
# GetBucketAcl()
# SetBucketPolicy()
# GetBucketPolicy()
# DeleteBucketPolicy()
# SetBucketVersioning()
# GetBucketVersioning()
# ListVersions()
# ListObjects()
# ListMultipartUploads()
# SetBucketLifecycle()
# GetBucketLifecycle()
# DeleteBucketLifecycle()
# SetBucketWebsite()
# GetBucketWebsite()
# DeleteBucketWebsite()
# SetBucketLogging()
# GetBucketLogging()
# GetBucketLocation()
# SetBucketTagging()
# GetBucketTagging()
# DeleteBucketTagging()
# SetBucketCors()
# GetBucketCors()
# DeleteBucketCors()
# SetBucketNotification()
# GetBucketNotification()
#=========================================================
# object operations
# =========================================================
# PutContent()
# AppendObject()
# CopyObject()
# PutFile()
# GetObject()
# GetObjectMetadata()
# SetObjectAcl()
# GetObjectAcl()
# DeleteObject()
# DeleteObjects()
# RestoreObject()
# AbortMultipartUpload()
# InitiateMultipartUpload()
# UploadPart()
# CompleteMultipartUpload()
# CopyPart()
# ListParts()
pass
``` |
{
"source": "jmdacruz/celery-k8s-operator",
"score": 2
} |
#### File: celery-k8s-operator/examples/celeryconfig.py
```python
def __main__():
"""Main initialization function for Celery configuration.
Keep imports in this function, so module is as clean as possible.
Celery (in tools such as `celery inspect conf`) will list all
globals on this module as configuration items.
"""
import os
from ruamel.yaml import YAML
FILE = os.getenv("CELERY_CONFIG")
yaml = YAML()
with open(FILE) as fp:
content = yaml.load(fp)
globals().update(content)
__main__()
``` |
{
"source": "jmdaemon/cs",
"score": 3
} |
#### File: cs/src/cs.py
```python
import argparse
import math
import matplotlib.pyplot as plt
import numpy as np
# cs.py - Graph mathematical equations and functions
def main():
parser = argparse.ArgumentParser(description='Executable file for Labs')
parser.add_argument('input' , type=str, help='File path to some input.csv')
parser.add_argument('range' , type=tuple, help='File path to some input.csv')
parser.add_argument('n' , type=int, help='File path to some input.csv')
args = parser.parse_args()
inp = args.input
beg, end = args.range
n = args.n
x = np.linspace(beg, end, n)
fn = eval(inp)
fig, ax = plt.subplots()
ax.plot(x, fn)
plt.show()
``` |
{
"source": "jmdaemon/iproc",
"score": 3
} |
#### File: src/iproc/cli.py
```python
import argparse
from iproc.braillify import preprocess, braillify
from iproc.merge import merge
def main():
parser = argparse.ArgumentParser(description='Executable file for Labs')
parser.add_argument('cmd' , type=str, help='File path to the current lab directory')
parser.add_argument('opts', help='Apply string manipulation', nargs=argparse.REMAINDER, default=None)
args = parser.parse_args()
cmd = args.cmd
opts = args.opts
match cmd:
case 'braillify':
fp = opts[0]
print(braillify(preprocess(fp)))
case 'merge':
imfp1 = opts[0]
imfp2 = opts[1]
output = opts[2]
attach = opts[3] if len(opts) > 3 else 'h'
merge(imfp1, imfp2, output, attach)
``` |
{
"source": "jmdaemon/sap",
"score": 2
} |
#### File: jmdaemon/sap/setup.py
```python
from setuptools import find_packages, setup
import codecs
import os.path
def read(rel_path):
here = os.path.abspath(os.path.dirname(__file__))
with codecs.open(os.path.join(here, rel_path), 'r') as fp:
return fp.read()
def get_version(rel_path):
''' Single source package version in src/package_name/__init__.py '''
for line in read(rel_path).splitlines():
if line.startswith('__version__'):
delim = '"' if '"' in line else "'"
return line.split(delim)[1]
else:
raise RuntimeError("Unable to find version string.")
with open('README.md', 'r', encoding='utf-8') as fh:
long_description = fh.read()
setup(
name='sapply',
version=get_version("src/sapply/__init__.py"),
license='MIT',
author='<NAME>',
author_email='<EMAIL>',
description='Easily apply arbitrary string manipulations on text.',
long_description=long_description,
long_description_content_type='text/markdown',
url='https://github.com/jmdaemon/sapply',
project_urls={ 'Bug Tracker': 'https://github.com/jmdaemon/sapply/issues', },
classifiers=[
'Programming Language :: Python :: 3',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
],
package_dir={'': 'src'},
packages=find_packages(where='src'),
package_data={'': ['*.json']},
python_requires='>=3.6',
py_modules=['sapply.charmap', 'sapply.cli', 'sapply.flip', 'sapply.zalgo',
'sapply.morse', 'sapply.tokens', 'sapply.cmapdefs'],
install_requires=['wora', 'spacy', 'regex'],
include_package_data=True,
zip_safe=False,
entry_points={
'console_scripts': [
'sapply = sapply.cli:main',
],
},
test_suite='tests',
)
```
#### File: src/sapply/cli.py
```python
from sapply.cmapdefs import cmapdefs
from sapply.charmap import to_charmap
from sapply.flip import flip
from sapply.zalgo import zalgo
from sapply.morse import to_morse
from sapply.tokens import to_string,parse_transforms
from sapply import __version__
# Third Party Libraries
from wora.cli import reset_sigpipe_handling
# Standard library
import os
import re
import sys
import logging
from pkg_resources import resource_string
reset_sigpipe_handling()
def convert(char_map, text):
''' Convert characters from ASCII to a specific unicode character map '''
out = ""
for char in text:
if char in char_map:
out += char_map[char]
elif char.lower() in char_map:
out += char_map[char.lower()]
else:
out += char
return out
def strikethrough(text, strikeover):
''' Converts ASCII characters into unicode 'striked' characters '''
return ''.join([char + strikeover for char in text])
def mapto(cmap: str):
''' Maps ASCII characters to a unicode character map '''
file = cmapdefs[cmap]
conts = resource_string('sapply.resources', file)
logging.debug(f'Resource File Contents:\n{conts}')
return (to_charmap(conts))
def match_effects(cmd: str, text: str, opt=None) -> str:
''' Applies unicode character mappings to ASCII text '''
out = ''
opt = u'\u0336' if (opt == '-') else u'\u0334' # - or ~ strikethrough
logging.debug('In match_effects:')
match cmd:
case '--sub' : out = convert(mapto('subscript'), text)
case '--super' : out = convert(mapto('superscript'), text)
case '-ds' | '--doublestruck' : out = convert(mapto('doubleStruck'), text)
case '-oe' | '--oldeng' : out = convert(mapto('oldEnglish'), text)
case '-med' | '--medieval' : out = convert(mapto('medieval'), text)
case '-mono' | '--monospace' : out = convert(mapto('monospace'), text)
case '-b' | '--bold' : out = convert(mapto('bold'), text)
case '-i' | '--italics' : out = convert(mapto('italic'), text)
case '-bs' | '--boldsans' : out = convert(mapto('boldSans'), text)
case '-ib' | '--italicbold' : out = convert(mapto('boldItalic'), text)
case '-is' | '--italicsans' : out = convert(mapto('italicSans'), text)
case '-st' | '--strike' : out = strikethrough(text, opt)
return out
def show(text: str):
''' Displays the mapped text without the newline ending '''
print(text, end='\0') # Strip newlines from text
def main():
''' Main application entry point
Usage:
sapply asdf -i
sapply asdf -is
sapply asdf -cmap ./cmap.json
'''
loglevel = os.environ.get("LOGLEVEL")
loglevel = loglevel if loglevel is not None else logging.ERROR
logging.basicConfig(level=loglevel)
cmds = ['flip', 'zalgo', 'morse']
subcmd = None
text = None
effects = None
i = 0
for cmd in cmds:
if cmd in sys.argv:
subcmd = cmd
if sys.argv[i] == "-v":
print(f'sapply v{__version__}')
exit(0)
i += 1
if subcmd is None:
text = sys.argv[1]
effects = sys.argv[2:]
else:
text = sys.argv[2]
effects = sys.argv[3:]
logging.info(f'Subcommand : {subcmd}')
logging.info(f'Text : {text}')
logging.info(f'Effects : {effects}')
if not text:
sys.exit()
# Subcommands
match subcmd:
case 'flip' : show(flip(text))
case 'zalgo' : show(zalgo(text))
case 'morse' : show(to_morse(text.upper())) # TODO: Pass `effects` off to function for processing
# If a subcommand is used
if subcmd is not None:
# Exit early
return
out = ''
if (len(effects) < 2):
logging.debug('Non-combinable effect')
cmd = effects[0]
out = match_effects(cmd, text)
logging.debug(f'Effect: {cmd}')
elif (len(effects) < 3):
logging.debug('Combinable effect')
cmd = effects[0]
opt = effects[1]
logging.debug(f'Effect: {cmd}')
logging.debug(f'Option: {opt}')
if (opt is None):
opt = re.match(re.compile(r'-st='), cmd)
# Handle combinable effects
match cmd, opt:
case '--cmap', _:
cmap = read_charmap(opt)
out = convert(cmap, text)
case '-f', _:
# opt == fp
token_dict = parse_transforms(opt)
for effect, text in token_dict.items():
if (text == '\n'):
out += '\n'
else:
out += match_effects(effect, text) + ' '
case _,_: out = match_effects(effect, text, opt)
show(out)
``` |
{
"source": "jmdagamewiz/word_pattern",
"score": 3
} |
#### File: word_pattern/dictionaries/dictionary.py
```python
import os
import shutil
import json
def get_dictionary(language):
module_dir, module_file = os.path.split(__file__)
if language in __dictionary__.keys():
output = []
with open(os.path.join(module_dir, __dictionary__[language]), "r") as infile:
for line in infile:
output.append(line.rstrip("\n").lower())
return output
return None
def list_dictionaries():
return __dictionary__.keys()
def __remove_pattern_dir__(language):
module_dir, module_file = os.path.split(__file__)
patterns_dir = os.path.join(module_dir, "patterns")
language_dir = os.path.join(patterns_dir, language)
if os.path.exists(patterns_dir) and os.path.isdir(patterns_dir):
if os.path.exists(language_dir):
if os.path.isdir(language_dir):
shutil.rmtree(language_dir)
else:
os.remove(language_dir)
def build_word_pattern(word, language):
"""Given a word and a language, calculate the word pattern and the number of unique character is has."""
pattern = ""
unique = 0
char_map = {}
for char in word:
if char not in char_map.keys():
char_map[char] = unique
unique += 1
pattern += __alphabet__[language][char_map[char]]
return pattern, unique
def __build_pattern_map__(language):
patterns = {}
wordlist = get_dictionary(language)
for word in wordlist:
word_pattern, word_unique = build_word_pattern(word, language)
if len(word) not in patterns.keys():
patterns[len(word)] = {}
if word_unique not in patterns[len(word)]:
patterns[len(word)][word_unique] = {}
if word_pattern not in patterns[len(word)][word_unique]:
patterns[len(word)][word_unique][word_pattern] = []
patterns[len(word)][word_unique][word_pattern].append(word)
return patterns
def __build_pattern_map_directories__(language, patterns):
module_dir, module_file = os.path.split(__file__)
patterns_dir = os.path.join(module_dir, "patterns")
if not os.path.exists(patterns_dir):
os.mkdir(patterns_dir)
language_dir = os.path.join(patterns_dir, language)
os.mkdir(language_dir)
for word_length in patterns.keys():
os.mkdir(os.path.join(language_dir, str(word_length)))
def __save_pattern_map__(language, patterns):
module_dir, module_file = os.path.split(__file__)
language_dir = os.path.join(module_dir, "patterns", language)
for word_length in patterns.keys():
word_length_dir = os.path.join(language_dir, str(word_length))
for word_unique in patterns[word_length].keys():
with open(os.path.join(word_length_dir, str(word_unique) + ".json"), "w", encoding="utf-8") as outfile:
json.dump(patterns[word_length][word_unique], outfile, ensure_ascii=False, indent=4)
def generate_pattern_map(language):
"""Sets up the project with a pattern map for the specified language.
:param language: the desired language
:return:
The dictionary is divided in buckets based on each word's length.
Each bucket is further divided in sub-buckets based on the number of unique letters.
Each sub-bucket is a json file that maps each pattern to the words in that pattern.
Another file is used to map each bucket (length and unique letters) to its file.
Patterns should be compiled by assigning "a" to the first letter, "b" to the second, and so on.
For example, "ancillary" would have the pattern "abcdeeafg"."""
if language not in __dictionary__.keys():
raise ValueError("Unknown language code: " + language)
__remove_pattern_dir__(language) # remove any previous pattern map
patterns = __build_pattern_map__(language)
__build_pattern_map_directories__(language, patterns)
__save_pattern_map__(language, patterns)
def lookup_pattern(pattern, language):
"""Looks up a specific pattern and returns all matching words.
:param pattern: the desired word or pattern (the pattern is recreated to ensure consistency)
:param language: the desired language
:return: a list of all words matching the pattern in the specified language
If the language does not exist, it throws an error.
If the pattern map does not exist, it creates it.
It looks up the pattern in the pattern_map and returns a list of all matching words.
"""
if language not in __dictionary__.keys():
raise ValueError("Unknown language code: " + language)
pattern_map = {}
module_dir, module_file = os.path.split(__file__)
patterns_dir = os.path.join(module_dir, "patterns")
language_dir = os.path.join(patterns_dir, language)
if not os.path.exists(patterns_dir) or not os.path.isdir(patterns_dir) or not os.path.exists(language_dir):
generate_pattern_map(language)
word_pattern, word_unique = build_word_pattern(pattern, language)
length_dir = os.path.join(language_dir, str(len(pattern)))
map_file = os.path.join(length_dir, str(word_unique) + ".json")
if os.path.exists(length_dir) and os.path.isdir(length_dir):
if os.path.exists(map_file) and os.path.isfile(map_file):
with open(map_file, "r") as infile:
pattern_map = json.load(infile)
if word_pattern in pattern_map.keys():
return pattern_map[word_pattern]
return []
__dictionary__ = {
"en": "english.txt"
}
__alphabet__ = {
"en": "abcdefghijklmnopqrstuvwxyz"
}
``` |
{
"source": "JM-data/Unsupervised_DeepFunctionalMaps",
"score": 2
} |
#### File: JM-data/Unsupervised_DeepFunctionalMaps/test_DFMnet.py
```python
import time
import tensorflow as tf
import scipy.io as sio
import numpy as np
from scipy.spatial import cKDTree
flags = tf.app.flags
FLAGS = flags.FLAGS
flags.DEFINE_integer('num_evecs', 120,
'number of eigenvectors used for representation')
flags.DEFINE_integer('num_model', 5000, '')
flags.DEFINE_string('test_shapes_dir', './Shapes/', '')
flags.DEFINE_string('files_name', 'tr_reg_', 'name common to all the shapes')
flags.DEFINE_string('log_dir', './Testing/',
'directory to save targets results')
flags.DEFINE_string('matches_dir', './Matches/',
'directory to matches')
def get_test_pair_source(source_fname):
input_data = {}
source_file = '%s%s.mat' % (FLAGS.test_shapes_dir, source_fname)
# This loads the source but with a target name so next lines re-names
input_data.update(sio.loadmat(source_file))
input_data['source_evecs'] = input_data['target_evecs']
del input_data['target_evecs']
input_data['source_evecs_trans'] = input_data['target_evecs_trans']
del input_data['target_evecs_trans']
input_data['source_shot'] = input_data['target_shot']
del input_data['target_shot']
input_data['source_evals'] = np.transpose(input_data['target_evals'])
del input_data['target_evals']
return input_data
def get_test_pair_target(target_fname):
input_data = {}
target_file = '%s%s.mat' % (FLAGS.test_shapes_dir, target_fname)
input_data.update(sio.loadmat(target_file))
input_data['target_evals'] = np.transpose(input_data['target_evals'])
return input_data
def run_test():
# Start session
config = tf.ConfigProto()
config.gpu_options.allow_growth = True
sess = tf.Session(config=config)
print('restoring graph...')
saver = tf.train.import_meta_graph('%smodel.ckpt-%s.meta'
% (FLAGS.log_dir, FLAGS.num_model))
saver.restore(sess, tf.train.latest_checkpoint('%s' % FLAGS.log_dir))
graph = tf.get_default_graph()
# Retrieve placeholder variables
source_evecs = graph.get_tensor_by_name('source_evecs:0')
source_evecs_trans = graph.get_tensor_by_name('source_evecs_trans:0')
target_evecs = graph.get_tensor_by_name('target_evecs:0')
target_evecs_trans = graph.get_tensor_by_name('target_evecs_trans:0')
source_shot = graph.get_tensor_by_name('source_shot:0')
target_shot = graph.get_tensor_by_name('target_shot:0')
phase = graph.get_tensor_by_name('phase:0')
source_evals = graph.get_tensor_by_name('source_evals:0')
target_evals = graph.get_tensor_by_name('target_evals:0')
Ct_est = graph.get_tensor_by_name(
'matrix_solve_ls/cholesky_solve/MatrixTriangularSolve_1:0'
)
for i in range(80, 99):
input_data_source = get_test_pair_source(FLAGS.files_name + '%.3d' % i)
source_evecs_ = input_data_source['source_evecs'][:, 0:FLAGS.num_evecs]
for j in range(i+1, 100):
t = time.time()
input_data_target = get_test_pair_target(FLAGS.files_name +
'%.3d' % j)
feed_dict = {
phase: True,
source_shot: [input_data_source['source_shot']],
target_shot: [input_data_target['target_shot']],
source_evecs: [input_data_source['source_evecs'][
:,
0:FLAGS.num_evecs
]
],
source_evecs_trans: [input_data_source[
'source_evecs_trans'
][
0:FLAGS.num_evecs,
:]
],
source_evals: [input_data_source[
'source_evals'
][0][0:FLAGS.num_evecs]],
target_evecs: [input_data_target[
'target_evecs'
][:, 0:FLAGS.num_evecs]],
target_evecs_trans: [input_data_target[
'target_evecs_trans'][
0:FLAGS.num_evecs,
:]
],
target_evals: [input_data_target[
'target_evals'][0][0:FLAGS.num_evecs]]
}
Ct_est_ = sess.run([Ct_est], feed_dict=feed_dict)
Ct = np.squeeze(Ct_est_) #Keep transposed
kdt = cKDTree(np.matmul(source_evecs_, Ct))
target_evecs_ = input_data_target['target_evecs'][:, 0:FLAGS.num_evecs]
dist, indices = kdt.query(target_evecs_, n_jobs=-1)
indices = indices + 1
print("Computed correspondences for pair: %s, %s." % (i, j) +
" Took %f seconds" % (time.time() - t))
params_to_save = {}
params_to_save['matches'] = indices
#params_to_save['C'] = Ct.T
# For Matlab where index start at 1
sio.savemat(FLAGS.matches_dir +
FLAGS.files_name + '%.3d-' % i +
FLAGS.files_name + '%.3d.mat' % j, params_to_save)
def main(_):
import time
start_time = time.time()
run_test()
print("--- %s seconds ---" % (time.time() - start_time))
if __name__ == '__main__':
tf.app.run()
``` |
{
"source": "jmd-dk/advent-of-code-2021",
"score": 3
} |
#### File: advent-of-code-2021/15/main.py
```python
import collections, heapq
# Part one
with open('input.txt') as f:
cavern = [list(map(int, line.strip())) for line in f.readlines()]
shape = (len(cavern), len(cavern[0]))
def dijkstra(start, goal, n_tiles=1):
"""Dijkstra's algorithm, terminating at goal"""
inf = float('inf')
distances = collections.defaultdict(lambda: inf)
distances[start] = 0
cur = start
unvisited_set = set()
unvisited_heap = []
while cur != goal:
for step in [(1, 0), (-1, 0), (0, 1), (0, -1)]:
new = (cur[0] + step[0], cur[1] + step[1])
tile = (new[0]//shape[0], new[1]//shape[1])
if not (0 <= tile[0] < n_tiles and 0 <= tile[1] < n_tiles):
continue
visited = new not in unvisited_set
if distances[new] < inf and visited:
continue
# Unvisited
risk = cavern[new[0] - tile[0]*shape[0]][new[1] - tile[1]*shape[1]]
risk += tile[0] + tile[1]
risk -= 9*(risk//10)
dist = min(
distances[new],
distances[cur] + risk,
)
distances[new] = dist
if visited:
unvisited_set.add(new)
heapq.heappush(unvisited_heap, (dist, new))
# Visit
cur = heapq.heappop(unvisited_heap)[1]
unvisited_set.remove(cur)
return distances[goal]
start = (0, 0)
goal = (shape[0] - 1, shape[1] - 1)
lowest_total_risk = dijkstra(start, goal)
print('part one:', lowest_total_risk)
# Part two
start = (0, 0)
n_tiles = 5
goal = (n_tiles*shape[0] - 1, n_tiles*shape[1] - 1)
lowest_total_risk = dijkstra(start, goal, n_tiles)
print('part two:', lowest_total_risk)
```
#### File: advent-of-code-2021/22/main.py
```python
import collections, re
import numpy as np
# Part one
range_inclusive = lambda bgn, end: range(bgn, end + 1)
Step = collections.namedtuple('Step', ('state', 'x', 'y', 'z'))
steps = []
with open('input.txt') as f:
for line in f:
match = re.search(r'(on|off) x=(-?\d+)\.\.(-?\d+),y=(-?\d+)\.\.(-?\d+),z=(-?\d+)\.\.(-?\d+)', line)
state = (match.group(1) == 'on')
x = range_inclusive(*map(int, match.group(2, 3)))
y = range_inclusive(*map(int, match.group(4, 5)))
z = range_inclusive(*map(int, match.group(6, 7)))
steps.append(Step(state, x, y, z))
def transform(s):
return slice(size + s.start, size + s.stop)
size = 50
grid = np.zeros([2*size + 1]*3, dtype=bool)
for step in steps:
grid[transform(step.x), transform(step.y), transform(step.z)] = step.state
print('part one:', grid.sum())
``` |
{
"source": "j-m-dean/Overscreening_and_Underscreening",
"score": 2
} |
#### File: Overscreening_and_Underscreening/kmc_paper_data/overscreening_figure.py
```python
import json
import numpy as np
import matplotlib.pyplot as plt
import os
# Stylistic preferences
from matplotlib import rc, rcParams
from collections import OrderedDict
# ---------------------------------------------------
# Color sets
# ---------------------------------------------------
#Standard tableau 20 set
tableau = OrderedDict([
("blue", "#0173B2"),
("orange", "#DE8F05"),
("green", "#029E73"),
("red", "#D55E00"),
("purple", "#CC78BC"),
("brown", "#CA9161"),
("pink", "#FBAFE4"),
("grey", "#949494"),
("yellow", "#ECE133"),
("turquoise", "#56B4E9"),
])
fontsize=15
nearly_black = '#333333'
light_grey = '#EEEEEE'
lighter_grey = '#F5F5F5'
white = '#ffffff'
grey = '#7F7F7F'
master_formatting = {'axes.formatter.limits': (-5,5),
'axes.titlepad':10,
'xtick.major.pad': 7,
'ytick.major.pad': 7,
'ytick.color': nearly_black,
'xtick.color': nearly_black,
'axes.labelcolor': nearly_black,
'axes.linewidth': .5,
'axes.edgecolor' : nearly_black,
'axes.spines.bottom': True,
'axes.spines.left': True,
'axes.spines.right': True,
'axes.spines.top': True,
'axes.axisbelow': True,
'legend.frameon': False,
'lines.linewidth': 1.25,
'pdf.fonttype': 42,
'ps.fonttype': 42,
'font.size': fontsize,
'text.usetex': False,
'savefig.bbox':'tight',
'axes.facecolor': white,
'axes.labelpad': 10.0,
'axes.labelsize': fontsize,
'axes.titlesize': fontsize,
'axes.grid': False,
'lines.markersize': 7.0,
'lines.scale_dashes': False,
'xtick.labelsize': fontsize,
'ytick.labelsize': fontsize,
'legend.fontsize': fontsize,
'figure.figsize':[5.5,5.5]}
for k, v in master_formatting.items():
rcParams[k] = v
color_cycle = tableau.values()
try:
from matplotlib import cycler
rcParams['axes.prop_cycle'] = cycler(color=color_cycle)
except Exception:
raise
# Import data
home = os.getcwd() + "/"
with open(home + "averaged_distributions/2109_charges_distributions_errors.json") as outfile:
sim_data = json.load(outfile)
with open(home + "charges_2109/permittivity_1/outputs.json") as outfile:
fitted_data_perm_1 = json.load(outfile)
with open(home + "charges_2109/permittivity_10/outputs.json") as outfile:
fitted_data_perm_10 = json.load(outfile)
with open(home + "charges_2109/permittivity_100/outputs.json") as outfile:
fitted_data_perm_100 = json.load(outfile)
# Define x
x = np.array(range(1,38)) * 2.5e-10
# Get simulated data
# Divide by 75**2 to convert to mole fraction
# Multiply errors by 1.96 to get 95% confidence intervals
y_sim_1 = np.array(sim_data["distribution_1"]) / 75**2
yerr_sim_1 = 1.96 * np.array(sim_data["standard_errors_1"]) / 75**2
y_sim_10 = np.array(sim_data["distribution_10"]) / 75**2
yerr_sim_10 = 1.96 * np.array(sim_data["standard_errors_10"]) / 75**2
y_sim_100 = np.array(sim_data["distribution_100"]) / 75**2
yerr_sim_100 = 1.96 * np.array(sim_data["standard_errors_100"]) / 75**2
# Smooth x for the fitted curves
x_smooth = np.linspace(6.25e-10, 37*2.5e-10, 100000)
# Define fitting functions
def purely_exponential(x, alpha, A, n_infty):
"""
Description: A purely exponential decay of charged
species away from a grain boundary. This
distribution is of the form:
n(x) = n_infty - A * exp(- alpha * x)
Args:
x (numpy.array): distance from the grain boundary
alpha (float): the recipricol of the decay length
A (float): the amplitude of the decay
n_infty (float): the asymptotic bulk mole fraction
Return:
n_x (numpy.array): the charged species distribution
moving away from a grain boundary.
"""
n_x = n_infty - A * np.exp( - alpha * x)
return n_x
def oscillatory_exponential(x, alpha, A, n_infty, xi,theta):
"""
Description: A oscillatory exponential decay of charged
species away from a grain boundary. This distribution
is of the form:
n(x) = n_infty - A * exp(- alpha * x) * cos( xi * x + theta)
Args:
x (numpy.array): distance from the grain boundary
alpha (float): the recipricol of the decay length
A (float): the amplitude of the decay
n_infty (float): the asymptotic bulk mole fraction
xi (float): (2 * pi) / xi is the period of oscillations
theta (float): the phase shift
Return:
n_x (numpy.array): the charged species distribution
moving away from a grain boundary.
"""
n_x = n_infty - A * np.exp( - alpha * x) * np.cos( xi * x + theta)
return n_x
# Linear Plot
y_fitted_purely_exp_perm_1 = purely_exponential(x_smooth, fitted_data_perm_1["alpha_dilute"], fitted_data_perm_1["A_dilute"], fitted_data_perm_1["n_dilute"]) / 75**2
y_fitted_purely_exp_perm_10 = purely_exponential(x_smooth, fitted_data_perm_10["alpha_dilute"], fitted_data_perm_10["A_dilute"], fitted_data_perm_1["n_dilute"]) / 75**2
y_fitted_purely_exp_perm_100 = purely_exponential(x_smooth, fitted_data_perm_100["alpha_dilute"], fitted_data_perm_100["A_dilute"], fitted_data_perm_1["n_dilute"]) / 75**2
y_fitted_osc_exp_perm_1 = oscillatory_exponential(x_smooth, fitted_data_perm_1["alpha_osc"], fitted_data_perm_1["A_osc"], fitted_data_perm_1["n_osc"], fitted_data_perm_1["xi_osc"], fitted_data_perm_1["theta_osc"]) / 75**2
y_fitted_osc_exp_perm_10 = oscillatory_exponential(x_smooth, fitted_data_perm_10["alpha_osc"], fitted_data_perm_10["A_osc"], fitted_data_perm_10["n_osc"], fitted_data_perm_10["xi_osc"], fitted_data_perm_10["theta_osc"]) / 75**2
y_fitted_osc_exp_perm_100 = oscillatory_exponential(x_smooth, fitted_data_perm_100["alpha_osc"], fitted_data_perm_100["A_osc"], fitted_data_perm_100["n_osc"], fitted_data_perm_100["xi_osc"], fitted_data_perm_100["theta_osc"]) / 75**2
MARKERSIZE = 4.0
# linear plots
fig, axs = plt.subplots(1, 3, figsize=(15.5, 5), sharey='row')
plt.subplots_adjust(hspace = 0.35)
axs[0].errorbar(x * 1e9, y_sim_100,yerr_sim_100, fmt = "o", label = "simulated", markersize= MARKERSIZE, zorder = 0)
axs[0].plot(x_smooth * 1e9, y_fitted_purely_exp_perm_100, label = "exponential", alpha = 0.7, linewidth = 1.5, zorder = 1 )
axs[0].plot(x_smooth * 1e9, y_fitted_osc_exp_perm_100,label = "oscillatory", alpha = 0.7, linewidth = 1.5, zorder = 2)
axs[1].errorbar(x * 1e9, y_sim_10, yerr_sim_10, fmt = "o", label = "simulated", markersize= MARKERSIZE, zorder = 0 )
axs[1].plot(x_smooth * 1e9, y_fitted_purely_exp_perm_10, label = "exponential", alpha = 0.7, linewidth = 2, zorder = 1)
axs[1].plot(x_smooth * 1e9, y_fitted_osc_exp_perm_10,label = "oscillatory", alpha = 0.7, linewidth = 2, zorder = 2)
axs[2].errorbar(x * 1e9, y_sim_1, yerr_sim_1, fmt = "o", label = "simulated", markersize= MARKERSIZE, zorder = 0 )
axs[2].plot(x_smooth * 1e9, y_fitted_purely_exp_perm_1, label = "exponential", alpha = 0.7, linewidth = 2, zorder = 1)
axs[2].plot(x_smooth * 1e9, y_fitted_osc_exp_perm_1,label = "oscillatory", alpha = 0.7, linewidth = 2, zorder = 2)
axs[2].legend(loc = 4, fontsize = "x-small")
axs[0].set_ylabel(r"$ \langle n(x) \rangle$")
axs[0].set_xlabel(r"$x$ / nm")
axs[1].set_xlabel(r"$x$ / nm")
axs[2].set_xlabel(r"$x$ / nm")
axs[0].set_title(r"(a) $\varepsilon_{r}$ = 100")
axs[1].set_title(r"(b) $\varepsilon_{r}$ = 10")
axs[2].set_title(r"(c) $\varepsilon_{r}$ = 1")
axs[0].hlines( 2109 / 75**3,0, 10, color = nearly_black, linestyles = "dotted")
axs[1].hlines( 2109 / 75**3,0, 10, color = nearly_black, linestyles = "dotted")
axs[2].hlines( 2109 / 75**3,0, 10, color = nearly_black, linestyles = "dotted")
axs[0].set_xticks([0, 2.5, 5, 7.5, 10])
axs[1].set_xticks([0, 2.5, 5, 7.5, 10])
axs[2].set_xticks([0, 2.5, 5, 7.5, 10])
axs[0].tick_params(
axis='x', # changes apply to the x-axis
which='both', # both major and minor ticks are affected
top=False, bottom = True, color = grey) # ticks along the top edge are off
axs[0].tick_params(
axis='y', # changes apply to the x-axis
left=True, color = grey) # ticks along the top edge are off
axs[1].tick_params(
axis='x', # changes apply to the x-axis
which='both', # both major and minor ticks are affected
top=False, bottom = True, color = grey) # ticks along the top edge are off
axs[1].tick_params(
axis='y', # changes apply to the x-axis
left=True, color = grey) # ticks along the top edge are off
axs[2].tick_params(
axis='x', # changes apply to the x-axis
which='both', # both major and minor ticks are affected
top=False, bottom = True, color = grey) # ticks along the top edge are off
axs[2].tick_params(
axis='y', # changes apply to the x-axis
left=True, color = grey) # ticks along the top edge are off
plt.tight_layout()
plt.savefig(home + "Figures/Overscreening.pdf")
plt.show()
# Log Plot
# Smooth x for the fitted curves
x_smooth = np.linspace(6.25e-10, 37*2.5e-10, 750)
y_fitted_purely_exp_perm_1 = purely_exponential(x_smooth, fitted_data_perm_1["alpha_dilute"], fitted_data_perm_1["A_dilute"], fitted_data_perm_1["n_dilute"]) / 75**2
y_fitted_purely_exp_perm_10 = purely_exponential(x_smooth, fitted_data_perm_10["alpha_dilute"], fitted_data_perm_10["A_dilute"], fitted_data_perm_10["n_dilute"]) / 75**2
y_fitted_purely_exp_perm_100 = purely_exponential(x_smooth, fitted_data_perm_100["alpha_dilute"], fitted_data_perm_100["A_dilute"], fitted_data_perm_100["n_dilute"]) / 75**2
y_fitted_osc_exp_perm_1 = oscillatory_exponential(x_smooth, fitted_data_perm_1["alpha_osc"], fitted_data_perm_1["A_osc"], fitted_data_perm_1["n_osc"], fitted_data_perm_1["xi_osc"], fitted_data_perm_1["theta_osc"]) / 75**2
y_fitted_osc_exp_perm_10 = oscillatory_exponential(x_smooth, fitted_data_perm_10["alpha_osc"], fitted_data_perm_10["A_osc"], fitted_data_perm_10["n_osc"], fitted_data_perm_10["xi_osc"], fitted_data_perm_10["theta_osc"]) / 75**2
y_fitted_osc_exp_perm_100 = oscillatory_exponential(x_smooth, fitted_data_perm_100["alpha_osc"], fitted_data_perm_100["A_osc"], fitted_data_perm_100["n_osc"], fitted_data_perm_100["xi_osc"], fitted_data_perm_100["theta_osc"]) / 75**2
MARKERSIZE = 4.0
# linear plots
fig, axs = plt.subplots(1, 3, figsize=(15.5, 5), sharey='row')
plt.subplots_adjust(hspace = 0.35)
axs[0].plot(x * 1e9, np.log(np.abs(y_sim_100 * 75**2 - fitted_data_perm_100["n_osc"]) ), "o", label = "simulated", markersize= MARKERSIZE, zorder = 0)
axs[0].plot(x_smooth * 1e9, np.log(np.abs(y_fitted_purely_exp_perm_100 * 75**2 - fitted_data_perm_100["n_dilute"]) ),label = "exponential", alpha = 0.7, linewidth = 1.5, zorder = 1)
axs[0].plot(x_smooth * 1e9, np.log(np.abs(y_fitted_osc_exp_perm_100 * 75**2 - fitted_data_perm_100["n_osc"]) ),label = "oscillatory", alpha = 0.7, linewidth = 1.5, zorder = 2)
axs[1].plot(x * 1e9, np.log(np.abs(y_sim_10 * 75**2 - fitted_data_perm_10["n_osc"])), "o", label = "simulated", markersize= MARKERSIZE, zorder = 0)
axs[1].plot(x_smooth * 1e9, np.log(np.abs(y_fitted_purely_exp_perm_10 * 75**2 - fitted_data_perm_10["n_dilute"]) ),label = "exponential", alpha = 0.7, linewidth = 1.5, zorder = 1)
axs[1].plot(x_smooth * 1e9, np.log(np.abs(y_fitted_osc_exp_perm_10 * 75**2 - fitted_data_perm_10["n_osc"]) ),label = "oscillatory", alpha = 0.7, linewidth = 1.5, zorder = 2)
axs[2].plot(x * 1e9, np.log(np.abs(y_sim_1 * 75**2 - fitted_data_perm_1["n_osc"]) ), "o", label = "simulated", markersize= MARKERSIZE, zorder = 0)
axs[2].plot(x_smooth * 1e9, np.log(np.abs(y_fitted_purely_exp_perm_1 * 75**2 - fitted_data_perm_1["n_dilute"]) ),label = "exponential", alpha = 0.7, linewidth = 1.5, zorder = 1)
axs[2].plot(x_smooth * 1e9, np.log(np.abs(y_fitted_osc_exp_perm_1 * 75**2 - fitted_data_perm_1["n_osc"]) ),label = "oscillatory", alpha = 0.7, linewidth = 1.5, zorder = 2)
axs[2].legend(loc = 1, fontsize = "x-small")
axs[0].set_ylabel(r"$\ln(|\langle n(x) \rangle - n_{\infty}| $")
axs[0].set_xlabel(r"$x$ / nm")
axs[1].set_xlabel(r"$x$ / nm")
axs[2].set_xlabel(r"$x$ / nm")
axs[0].set_title(r"(a) $\varepsilon_{r}$ = 100")
axs[1].set_title(r"(b) $\varepsilon_{r}$ = 10")
axs[2].set_title(r"(c) $\varepsilon_{r}$ = 1")
axs[0].set_xticks([0, 2.5, 5, 7.5, 10])
axs[1].set_xticks([0, 2.5, 5, 7.5, 10])
axs[2].set_xticks([0, 2.5, 5, 7.5, 10])
axs[0].tick_params(
axis='x', # changes apply to the x-axis
which='both', # both major and minor ticks are affected
top=False, bottom = True, color = grey) # ticks along the top edge are off
axs[0].tick_params(
axis='y', # changes apply to the x-axis
left=True, color = grey) # ticks along the top edge are off
axs[1].tick_params(
axis='x', # changes apply to the x-axis
which='both', # both major and minor ticks are affected
top=False, bottom = True, color = grey) # ticks along the top edge are off
axs[1].tick_params(
axis='y', # changes apply to the x-axis
left=True, color = grey) # ticks along the top edge are off
axs[2].tick_params(
axis='x', # changes apply to the x-axis
which='both', # both major and minor ticks are affected
top=False, bottom = True, color = grey) # ticks along the top edge are off
axs[2].tick_params(
axis='y', # changes apply to the x-axis
left=True, color = grey) # ticks along the top edge are off
axs[0].set_ylim(-8, 4)
plt.tight_layout()
plt.savefig(home + "Figures/Overscreening_logarithmic.pdf")
plt.show()
``` |
{
"source": "j-m-dean/pyscses",
"score": 3
} |
#### File: pyscses/pyscses/defect_species.py
```python
class DefectSpecies(object):
"""
The DefectSpecies class describes the properties for a single defect species present in the system.
Attributes:
label (string): Label for this defect species e.g. "Vo" for an oxygen vacancy.
valence (float): The formal charge for this defect species, in atomic units.
mole_fraction (float): The bulk mole fraction of this defect species.
mobility (float): Mobility of this defect species. Default is `0.0`.
fixed (bool): Specifies whether this defect species is allowed to redistribute to achieve an equilibrium distribution,
or is kept at its input distribution. Default is `False`.
"""
def __init__(self,
label: str,
valence: float,
mole_fraction: float,
mobility: float = 0.0,
fixed: bool = False) -> None:
if not isinstance(label, str):
raise TypeError("When initialising a DefectSpecies object, the label argument must be a string.")
if not isinstance(valence, float):
raise TypeError("When initialising a DefectSpecies object, the valence argument must be a float.")
if not isinstance(mole_fraction, float):
raise TypeError("When initialising a DefectSpecies object, the mole_fraction argument must be a float.")
self.label = label
self.valence = valence
self.mole_fraction = mole_fraction
self.mobility = mobility
self.fixed = fixed
```
#### File: pyscses/pyscses/set_of_sites.py
```python
from __future__ import annotations
from scipy.interpolate import griddata # type: ignore
import numpy as np
import math
from pyscses.set_up_calculation import site_from_input_file, load_site_data
from pyscses.grid import index_of_grid_at_x, phi_at_x, energy_at_x
from pyscses.constants import boltzmann_eV
from pyscses.defect_species import DefectSpecies
from bisect import bisect_left
from typing import List, Iterator, Tuple, Optional, Union
from pyscses.site import Site
from pyscses.grid import Grid
class SetOfSites:
"""The SetOfSites object groups together all of the Site objects into one object and
contains functions for the calculations that provide properties of all of the sites
together rather than individually.
Attributes:
sites (list(Site)): List of Site objects.
"""
def __init__(self,
sites: Union[List[Site], Tuple[Site, ...]]) -> None:
"""Initialise a SetOfSites object."""
self.sites = tuple(sites)
def __add__(self,
other: SetOfSites) -> SetOfSites:
"""Allows the concatenation of multiple SetOfSites objects"""
if type(other) is not SetOfSites:
raise TypeError
return SetOfSites(self.sites + other.sites)
def __getitem__(self,
index: int) -> Site:
"""Returns the site corresponding to a given index """
return self.sites[index]
def __iter__(self) -> Iterator[Site]:
"""Iterator over self.sites"""
return iter(self.sites)
def __len__(self) -> int:
"""Number of sites"""
return len(self.sites)
def subset(self,
label: str) -> SetOfSites:
"""Returns a subset of all the sites which contain a particular defect """
return SetOfSites(sites=[s for s in self.sites if s.label == label])
def get_coords(self,
label: str) -> List[float]:
"""
Returns a list of the x coordinates for all the sites wich contain a particular defect
Args:
label (str): Label identifying the required defect species.
Returns:
list(float): List of site coordinates for a specific defect species.
"""
return [s.x for s in self.sites if s.label == label]
# BEN: Is this used?
# def calculate_energies_on_grid(self,
# grid: Grid,
# phi: np.ndarray) -> np.ndarray:
# """
# Returns an array of energies at their points on a one-dimensional grid.
#
# Args:
# grid (Grid): Grid object - contains properties of the grid including the x coordinates and the volumes. Used to access the x coordinates.
# phi (array): electrostatic potential on a one-dimensional grid.
#
# Returns:
# array: energies at their grid points
#
# """
# energies_on_grid = np.zeros_like(grid.x)
# for site in self.sites:
# energies_on_grid[index_of_grid_at_x(grid.x, site.x)] =+ energy_at_x(site.defect_energies, grid.x, site.x)
# return energies_on_grid
def calculate_probabilities(self,
grid: Grid,
phi: np.ndarray,
temp: float) -> np.ndarray:
"""
Calculates the probability of a site being occupied by its corresponding defect.
Args:
grid (Grid): Grid object - contains properties of the grid including the x coordinates and the volumes. Used to access the x coordinates.
phi (array): electrostatic potential on a one-dimensional grid.
temp (float): Absolute temperature.
Returns:
array: probabilities of defects occupying each site using their grid points
"""
# TODO: This is Jacob's fixed code, but it is inefficient and slow.
probability = np.zeros_like(grid.x)
for i,j in enumerate(grid.x):
prob = []
for site in self.sites:
if j == site.x:
prob.append(site.probabilities_as_list(phi_at_x(phi, grid.x, site.x), temp))
if len(prob) == 0:
probability[i] = 0
else:
probability[i] = np.mean(prob)
return probability
def calculate_defect_density(self,
grid: Grid,
phi: np.ndarray,
temp: float) -> np.ndarray:
"""
Calculates the defect density at each site.
Args:
grid (Grid): Grid object - contains properties of the grid including the x coordinates and the volumes. Used to access the x coordinates.
phi (array): electrostatic potential on a one-dimensional grid.
temp (float): Absolute temperature.
Returns:
array: defect density for each site using their grid points
"""
defect_density = np.zeros_like( grid.x )
for site in self.sites:
i = index_of_grid_at_x( grid.x, site.x )
defect_density[ i ] += np.asarray( site.probabilities_as_list( phi_at_x( phi, grid.x, site.x ), temp ) ) / grid.volumes[ i ]
return defect_density
def subgrid_calculate_defect_density(self,
sub_grid: Grid,
full_grid: Grid,
phi: np.ndarray,
temp: float) -> np.ndarray:
"""
Calculates the defect density at each site for a given subset of sites.
Args:
subgrid (Grid): Grid object - contains properties of the grid including the x coordinates and the volumes. Used to access the x coordinates. For a given subset of sites.
full_grid (Grid): Grid object - contains properties of the grid including the x coordinates and the volumes. Used to access the x coordinates. For all sites.
phi (array): electrostatic potential on a one-dimensional grid.
temp (float): Absolute temperature.
Returns:
array: defect density for each site using their grid points
"""
defect_density = np.zeros_like( sub_grid.x )
for site in self.sites:
i = index_of_grid_at_x( sub_grid.x, site.x )
defect_density[ i ] += np.asarray( site.probabilities_as_list( phi_at_x( phi, full_grid.x, site.x ), temp ) ) / sub_grid.volumes[ i ]
return defect_density
def form_continuum_sites(all_sites,
x_min,
x_max,
n_points,
b,
c,
defect_species,
limits_for_laplacian,
site_labels,
defect_labels):
"""
Creates a SetOfSites object for sites interpolated onto a regular grid, this is equivalent to assuming a continuum approximation.
Args:
all_sites (SetOfSites): Original SetOfSites object from full data.
x_min (float): Minimum x coordinate value defining the calculation region.
x_max (float): Maximum x coordinate value defining the calculation region.
n_points (int): Number of points that the data should be interpolated on to.
b (float): b dimension for every grid point.
c (float): c dimension for every grid point.
defect_species (object): Class object containing information about the defect species present in the system.
limits for laplacian (list): distance between the endmost sites and the midpoint of the next site outside of the calculation region for the first and last sites respectively.
site_labels( list ): List of strings for the different site species.
defect_labels (list): List of strings for the different defect species.
Returns:
:obj:`SetOfSites`: Sites interpolated onto a regular grid.
"""
grid = np.linspace(x_min, x_max, n_points)
limits = (grid[1] - grid[0], grid[1] - grid[0])
sites = []
for label, d_label in zip(site_labels, defect_labels):
scaling = len( all_sites.subset(label)) / len(grid)
continuum_grid = Grid(grid, b, c, limits, limits_for_laplacian, all_sites.subset(label))
average_energies = np.array([site.average_local_energy(method='mean')[0] for site in all_sites.subset(label)])
new_energies = griddata(([site.x for site in all_sites.subset(label)]),
average_energies,
grid,
method='nearest')
for x, e in zip(grid, new_energies):
sites.append(Site(label,
x,
[defect_species[d_label]],
[e],
scaling=np.array([scaling])))
return SetOfSites(sites), limits
@ classmethod
def set_of_sites_from_input_data(cls: object,
filename: str,
limits: Tuple[float, float],
defect_species: List[DefectSpecies],
site_charge: bool,
core: str,
temperature: float,
offset: float = 0.0) -> SetOfSites:
"""
Takes the data from the input file and creates a SetOfSites object for those sites.
The input data file is a .txt file where each line in the file corresponds to a site. The values in each line are formatted and separated into the corresponding properties before creating a Site object for each site.
Args:
filename (str): Name of the input file to be parsed.
limits (list): Minimum and maximum x coordinated defining the calculation region.
defect_species (object): Class object containing information about the defect species present in the system.
site_charge (bool): The site charge refers to the contribution to the overall charge of a site given by the original, non-defective species present at that site. True if the site charge contribution is to be included in the calculation, False if it is not to be included.
core (str): Core definition. 'single' = Single segregation energy used to define the core. 'multi-site' = Layered segregation energies used to define the core while the energies fall in the region of positive and negative kT. 'all' = All sites between a minimum and maximum x coordinate used in calculation.
temperature (float): Temperature that the calculation is being run at.
Returns:
:obj:`SetOfSites`: `SetOfSites` object for the input data.
"""
site_data = load_site_data(filename, limits[0], limits[1], site_charge, offset)
energies = [line[4] for line in site_data]
min_energy = min(energies)
if core == 'single':
for line in site_data:
if line[4] > min_energy:
line[4] = 0.0
if core == 'multi_site':
for line in site_data:
if ( -boltzmann_eV * temperature) <= line[4] <= ( boltzmann_eV * temperature ):
line[4] = 0.0
return SetOfSites([site_from_input_file(line, defect_species, site_charge, core, temperature) for line in site_data])
# BEN: Is this used?
# @ classmethod
# def core_width_analysis(cls,
# input_data,
# limits,
# defect_species,
# site_charge,
# core,
# temperature):
# """
# Calculated the width of the 'core' region. This is given as the region where the segregation energies in the system are within a region of positive to negative kT.
#
# Args:
# input_data (file): A .txt file where each line includes information about a site.
# limits (list): Minimum and maximum x coordinates defining the calculation region.
# defect_species (object): Class object containing information about the defect species present in the system.
# site_charge (bool): The site charge refers to the contribution to the overall charge of a site given by the original, non-defective species present at that site. True if the site charge contribution is to be included in the calculation, False if it is not to be included.
# core (str): Core definition. Allowed keywords: 'single' = Single segregation energy used to define the core. 'multi-site' = Layered segregation energies used to define the core while the energies fall in the region of positive and negative kT. 'all' = All sites between a minimum and maximum x coordinate used in calculation.
# temperature (float): Temperature that the calculation is being run at.
#
# Returns:
# float: Distance between the minimum and maximum x coordinates where the segregation energy is in the range of positive to negative kT.
#
# """
# site_data = load_site_data( input_data, limits[0], limits[1], site_charge )
# energies = [ line[4] for line in site_data ]
# min_energy = min(energies)
# if core == 'single':
# for line in site_data:
# if line[4] > min_energy:
# line[4] = 0.0
# #print(boltzmann_eV * temperature, flush=True)
# if core == 'multi_site':
# for line in site_data:
# if ( -boltzmann_eV * temperature) <= line[4] <= ( boltzmann_eV * temperature ):
# line[4] = 0.0
# energies = [line[4] for line in site_data ]
# x = [line[2] for line in site_data ]
# x_seg = np.column_stack(( x, energies ))
# minval = np.min(x_seg[:,0][np.nonzero(x_seg[:,1])])
# maxval = np.max(x_seg[:,0][np.nonzero(x_seg[:,1])])
# core_width = maxval-minval
# return core_width
``` |
{
"source": "j-m-dean/uravu",
"score": 3
} |
#### File: uravu/uravu/distribution.py
```python
import numpy as np
from scipy.stats import normaltest
from uravu.kde import gaussian_kde
from scipy.optimize import minimize
class Distribution:
"""
In addition to storage of the probability distribution, this class allows for some basic analysis, such as determination of normality.
Attributes:
samples (:py:attr:`array_like`): Samples in the distribution.
name (:py:attr:`str`): Distribution name.
ci_points (:py:attr:`array_like`): The percentiles at which confidence intervals should be found.
normal (:py:attr:`bool`): Are the samples normally distributed?
kde (:py:class:`scipy.stats.kde.gaussian_kde`): Kernel density approximation for the distribution.
Args:
samples (:py:attr:`array_like`): Sample for the distribution.
name (:py:attr:`str`, optional): A name to identify the distribution. Default is :py:attr:`'Distribution'`.
ci_points (:py:attr:`array_like`, optional): The two percentiles at which confidence intervals should be found. Default is :py:attr:`[2.5, 97.5]` (a 95 % confidence interval).
.. _FAQ: ./faq.html
"""
def __init__(self, samples, name="Distribution", ci_points=None):
"""
Initialisation function for a :py:class:`~uravu.distribution.Distribution` object.
"""
self.name = name
self.samples = np.array([])
if ci_points is None:
self.ci_points = np.array([2.5, 97.5])
else:
if len(ci_points) != 2:
raise ValueError("The ci_points must be an array of length two.")
self.ci_points = np.array(ci_points)
self.normal = False
self.add_samples(np.array(samples))
@property
def size(self):
"""
Get the number of samples in the distribution.
Returns:
:py:attr:`int`: Number of samples.
"""
return self.samples.size
def check_normality(self):
"""
Uses a :func:`scipy.stats.normaltest()` to evaluate if samples are normally distributed and updates the :py:attr:`~uravu.distribution.Distribution.normal` attribute.
"""
alpha = 0.05
test_samples = self.samples
if self.size > 500:
test_samples = np.random.choice(self.samples, size=500)
p_value = normaltest(test_samples)[1]
if p_value > alpha:
self.normal = True
else:
self.normal = False
def pdf(self, x):
"""
Get the probability density function for the distribution.
Args:
x (:py:attr:`float`): Value to return probability of.
Return:
:py:attr:`float`: Probability.
"""
return self.kde.pdf(x)
def logpdf(self, x):
"""
Get the natural log probability density function for the distribution.
Args:
x (:py:attr:`float`): Value to return natural log probability of.
Return:
:py:attr:`float`: Natural log probability.
"""
return self.kde.logpdf(x)
def negative_pdf(self, x):
"""
Get the negative of the probability density function for the distribution.
Args:
x (:py:attr:`float`): Value to return negative probability of.
Return:
:py:attr:`float`: Negative probability.
"""
return -self.kde.pdf(x)
@property
def dist_max(self):
"""
Get the value that maximises the distribution. If no :py:attr:`kde` has been created (for example if the distribution has fewer than 8 values) the median is returned.
Returns
:py:attr:`float`: Most likely value.
"""
try:
return minimize(self.negative_pdf, x0=[self.n]).x
except AttributeError:
return self.n
@property
def min(self):
"""
Get sample minimum.
Returns:
:py:attr:`float`: Sample minimum.
"""
return self.samples.min()
@property
def max(self):
"""
Get sample maximum.
Returns:
:py:attr:`float`: Sample maximum.
"""
return self.samples.max()
@property
def n(self):
"""
Get the median value of the distribution (for a normal distribution this is the same as the mean).
Returns:
:py:attr:`float`: Median value.
"""
return np.percentile(self.samples, [50])[0]
@property
def s(self):
"""
Get the standard deviation of the distribution. For a non-normal distribution, this will return :py:attr:`None`.
Returns:
:py:attr:`float` or :py:attr:`None`: Standard deviation of the distribution.
"""
if self.normal:
return np.std(self.samples, ddof=1)
return None
@property
def v(self):
"""
Get the variance of the distribution. For a non-normal distribution, this will return :py:attr:`None`.
Returns:
:py:attr:`float` or :py:attr:`None`: Standard deviation of the distribution.
"""
if self.normal:
return np.var(self.samples, ddof=1)
return None
@property
def con_int(self):
"""
Get the extrema of the confidence intervals of the distribution.
Returns:
:py:attr:`array_like`: Distribution values at the confidence interval.
"""
return np.percentile(self.samples, self.ci_points)
def add_samples(self, samples):
"""
Add samples to the distribution.
Args:
samples (:py:attr:`array_like`): Samples to be added to the distribution.
"""
self.samples = np.append(self.samples, np.array(samples).flatten())
if self.size > 8:
self.check_normality()
self.kde = gaussian_kde(self.samples)
```
#### File: uravu/uravu/plotting.py
```python
import numpy as np
try:
import matplotlib.pyplot as plt
from corner import corner
from uravu import _fig_params
colors = _fig_params.colors
except ModuleNotFoundError:
raise ModuleNotFoundError("The matplotlib, corner and seaborn packages are necessary for the use of the plotting module, please install these.")
def plot_relationship(relationship, axes=None, figsize=(10, 6)): # pragma: no cover
"""
Plot the relationship. Additional plots will be included on this if posterior sampling has been used to find distributions.
Args:
relationship (:py:class:`uravu.relationship.Relationship`): The relationship to be plotted.
axes (:py:class:`matplotlib.axes.Axes`, optional): Axes to which the plot should be added. If :py:attr:`None` given new axes will be created. Default is :py:attr:`None`.
fig_size (:py:attr:`tuple`, optional): horizontal and veritcal size for figure (in inches). Default is :py:attr:`(10, 6)`.
Returns:
(:py:class:`matplotlib.axes.Axes`): The axes with new plots.
"""
if axes is None:
axes = plt.subplots(figsize=figsize)[1]
variables = relationship.variables
axes.errorbar(relationship.x, relationship.y.mode, relationship.y.s, c=colors[0], ecolor=colors[0] + '40', marker='.', ls='')
smooth_x = np.linspace(relationship.x.min(), relationship.x.max(), 1000)
if relationship.mcmc_done or relationship.nested_sampling_done:
plot_samples = np.random.randint(0, variables[0].samples.size, size=100)
for i in plot_samples:
float_variables = relationship.get_sample(i)
axes.plot(smooth_x, relationship.function(smooth_x, *float_variables), color=colors[1], alpha=0.05)
else:
float_variables = relationship.variable_medians
axes.plot(smooth_x, relationship.function(smooth_x, *float_variables), color=colors[1])
return axes
def plot_distribution(distro, axes=None, figsize=(5, 3)): # pragma: no cover
"""
Plot the probability density function for a distribution.
Args:
distro (:py:class`uravu.distriobution.Distribution`): The distribution to be plotted.
axes (:py:class:`matplotlib.axes.Axes`, optional): Axes to which the plot should be added. If :py:attr:`None` given new axes will be created. Default is :py:attr:`None`.
fig_size (:py:class:`tuple`): Horizontal and veritcal size for figure (in inches). Default is :py:attr:`(10, 6)`.
Returns:
(:py:class:`matplotlib.axes.Axes`): The axes with new plots.
"""
if axes is None:
axes = plt.subplots(figsize=figsize)[1]
kde = distro.kde
abscissa = np.linspace(distro.samples.min(), distro.samples.max(), 100)
ordinate = kde.evaluate(abscissa)
axes.plot(abscissa, ordinate, color=colors[0])
axes.hist(distro.samples, bins=25, density=True, color=colors[0], alpha=0.5)
axes.fill_betweenx(np.linspace(0, ordinate.max() + ordinate.max() * 0.1), distro.con_int[0], distro.con_int[1], alpha=0.2)
axes.set_ylim((0, ordinate.max() + ordinate.max() * 0.1))
return axes
def plot_corner(relationship, figsize=(8, 8)): # pragma: no cover
"""
Plot the :py:mod:`corner` (named for the Python package) plot between the relationships variables.
Args:
relationship (:py:class:`uravu.relationship.Relationship`): The relationship containing the distributions to be plotted.
fig_size (:py:attr:`tuple`, optional): horizontal and veritcal size for figure (in inches). Default is :py:attr:`(10, 6)`.
Returns:
:py:attr:`tuple`: Containing:
- :py:class:`matplotlib.figure.Figure`: The figure with new plots.
- :py:class:`matplotlib.axes.Axes`: The axes with new plots.
"""
n = len(relationship.variables)
fig, ax = plt.subplots(n, n, figsize=figsize)
samples = np.zeros((relationship.variables[0].size, len(relationship.variables)))
for i, v in enumerate(relationship.variables):
samples[:, i] = v.samples
corner(samples, color=colors[0], hist_kwargs={"lw": 4, "histtype": "step"}, label_kwargs={"fontsize": _fig_params.rcParams["axes.labelsize"]}, fig=fig)
return fig, ax
```
#### File: uravu/tests/test_axis.py
```python
import unittest
import numpy as np
from numpy.testing import assert_almost_equal, assert_equal
from uravu.distribution import Distribution
import scipy.stats
from uravu.axis import Axis
from uravu import kde
from scipy.stats import norm, uniform
DISTRO1 = Distribution(norm.rvs(loc=0, scale=1, size=10000, random_state=np.random.RandomState(1)))
DISTRO2 = Distribution(norm.rvs(loc=1, scale=1, size=10000, random_state=np.random.RandomState(2)))
AX = Axis([DISTRO1, DISTRO2])
AX_ARRAY = Axis([0, 1])
class TestDistribution(unittest.TestCase):
"""
Testing the Axis class.
"""
def test_init_values(self):
assert_equal(AX.values[0].samples, DISTRO1.samples)
assert_equal(AX.values[1].samples, DISTRO2.samples)
def test_init_kde(self):
assert_equal(isinstance(AX.kde, kde.gaussian_kde), True)
def test_init_kde_size_change(self):
distro2 = Distribution(norm.rvs(loc=1, scale=1, size=1000, random_state=np.random.RandomState(2)))
AX = Axis([DISTRO1, distro2])
assert_equal(AX.values[1].samples, distro2.samples)
def test_n(self):
assert_almost_equal(AX.n, [0, 1], decimal=1)
def test_n_array(self):
assert_equal(AX_ARRAY.n, [0, 1])
def test_s(self):
assert_almost_equal(AX.s, [[1.96, 1.96], [1.96, 1.96]], decimal=1)
def test_s_array(self):
assert_equal(AX_ARRAY.s, [0, 0])
def test_mode(self):
assert_almost_equal(AX.mode, [0, 1], decimal=1)
def test_mode_array(self):
assert_equal(AX_ARRAY.mode, [0, 1])
def test_size(self):
assert_equal(AX.size, 2)
def test_size_array(self):
assert_equal(AX_ARRAY.size, 2)
def test_shape(self):
assert_equal(AX.shape, 2)
def test_shape_array(self):
ax = Axis(np.ones((3, 3)))
assert_equal(ax.shape, (3, 3))
def test_pdf(self):
assert_almost_equal(AX.pdf([0, 1]), [0.1495], decimal=0)
def test_logpdf(self):
assert_almost_equal(AX.logpdf([0, 1]), np.log([0.1495]), decimal=1)
```
#### File: uravu/tests/test_sampling.py
```python
import unittest
import numpy as np
from numpy.testing import assert_equal
from scipy.stats import norm
from uravu import sampling, utils
from uravu import relationship
from uravu.distribution import Distribution
import scipy.stats
TEST_Y = []
for i in np.arange(1, 9, 1):
TEST_Y.append(Distribution(scipy.stats.norm.rvs(loc=i, scale=0.5, size=200)))
TEST_X = np.arange(1, 9, 1)
class TestSampling(unittest.TestCase):
"""
Unit tests for optimize module.
"""
def test_mcmc(self):
test_rel = relationship.Relationship(
utils.straight_line, TEST_X, TEST_Y
)
test_rel.max_likelihood('mini')
actual_results = sampling.mcmc(test_rel, n_burn=10, n_samples=10)
assert_equal(isinstance(actual_results["distributions"][0], Distribution), True)
assert_equal(isinstance(actual_results["distributions"][1], Distribution), True)
assert_equal(actual_results["distributions"][0].size, 500)
assert_equal(actual_results["distributions"][1].size, 500)
def test_mcmc_with_other_prior(self):
test_rel = relationship.Relationship(
utils.straight_line, TEST_X, TEST_Y
)
test_rel.max_likelihood('mini')
def other_prior():
"""
Another potential prior.
"""
priors = []
for i, variable in enumerate(test_rel.variables):
loc = variable.n
scale = 1
priors.append(norm(loc=loc, scale=scale))
return priors
actual_results = sampling.mcmc(
test_rel, prior_function=other_prior, n_burn=10, n_samples=10
)
assert_equal(isinstance(actual_results["distributions"][0], Distribution), True)
assert_equal(isinstance(actual_results["distributions"][1], Distribution), True)
assert_equal(actual_results["distributions"][0].size, 500)
assert_equal(actual_results["distributions"][1].size, 500)
def test_mcmc_with_variable_median_zero(self):
test_rel = relationship.Relationship(
utils.straight_line, TEST_X, TEST_Y
)
test_rel.variables[0] = Distribution(np.zeros((7)))
actual_results = sampling.mcmc(test_rel, n_burn=10, n_samples=10)
assert_equal(isinstance(actual_results["distributions"][0], Distribution), True)
assert_equal(isinstance(actual_results["distributions"][1], Distribution), True)
assert_equal(actual_results["distributions"][0].size, 500)
assert_equal(actual_results["distributions"][1].size, 500)
def test_nested_sampling(self):
test_rel = relationship.Relationship(
utils.straight_line, TEST_X, TEST_Y, bounds=((0, 10), (-1, 1)))
actual_results = sampling.nested_sampling(test_rel, maxiter=100)
assert_equal(isinstance(actual_results, dict), True)
assert_equal(isinstance(actual_results["logz"][-1], float), True)
assert_equal(isinstance(actual_results["logzerr"][-1], float), True)
def test_nested_sampling_b_with_other_prior(self):
test_rel = relationship.Relationship(
utils.straight_line, TEST_X, TEST_Y, bounds=((0, 10), (-1, 1)))
test_rel.max_likelihood('mini')
def other_prior():
"""
Another potential prior.
"""
priors = []
for i, variable in enumerate(test_rel.variables):
loc = variable.n
scale = 1
priors.append(norm(loc=loc, scale=scale))
return priors
actual_results = sampling.nested_sampling(
test_rel, prior_function=other_prior, maxiter=100
)
assert_equal(isinstance(actual_results, dict), True)
assert_equal(isinstance(actual_results["logz"][-1], float), True)
assert_equal(isinstance(actual_results["logzerr"][-1], float), True)
def test_dynamic_nested_sampling(self):
test_rel = relationship.Relationship(
utils.straight_line, TEST_X, TEST_Y, bounds=((0, 10), (-1, 1)))
actual_results = sampling.nested_sampling(test_rel, dynamic=True, maxiter=100)
assert_equal(isinstance(actual_results, dict), True)
assert_equal(isinstance(actual_results["logz"][-1], float), True)
assert_equal(isinstance(actual_results["logzerr"][-1], float), True)
``` |
{
"source": "jmdecastel/GEOTADMIN",
"score": 2
} |
#### File: cirkwi/migrations/0001_move_cirkwi_tables.py
```python
from south.v2 import SchemaMigration
class Migration(SchemaMigration):
def forwards(self, orm):
pass
def backwards(self, orm):
pass
models = {
u'cirkwi.cirkwilocomotion': {
'Meta': {'ordering': "['name']", 'object_name': 'CirkwiLocomotion', 'db_table': "'o_b_cirkwi_locomotion'"},
'eid': ('django.db.models.fields.IntegerField', [], {'unique': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '128', 'db_column': "'nom'"})
},
u'cirkwi.cirkwipoicategory': {
'Meta': {'ordering': "['name']", 'object_name': 'CirkwiPOICategory', 'db_table': "'o_b_cirkwi_poi_category'"},
'eid': ('django.db.models.fields.IntegerField', [], {'unique': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '128', 'db_column': "'nom'"})
},
u'cirkwi.cirkwitag': {
'Meta': {'ordering': "['name']", 'object_name': 'CirkwiTag', 'db_table': "'o_b_cirkwi_tag'"},
'eid': ('django.db.models.fields.IntegerField', [], {'unique': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '128', 'db_column': "'nom'"})
}
}
complete_apps = ['cirkwi']
```
#### File: common/tests/test_tasks.py
```python
import os
from django.test import TestCase
from geotrek.common.tasks import import_datas
from geotrek.common.models import FileType
class TasksTest(TestCase):
def setUp(self):
self.filetype = FileType.objects.create(type=u"Photographie")
def test_import_exceptions(self):
self.assertRaises(
ImportError, import_datas, filename='bombadil', class_name='haricot', module_name='toto')
def test_import_message_exception(self):
self.assertRaisesMessage(
ImportError,
"Failed to import parser class 'haricot' from module 'toto'",
import_datas,
filename='bombadil',
class_name='haricot',
module_name='toto'
)
def test_import_return(self):
filename = os.path.join(os.path.dirname(__file__), 'data', 'organism.xls')
task = import_datas.delay('AttachmentParser', filename, 'geotrek.common.tests.test_parsers')
self.assertEqual(task.status, 'SUCCESS')
self.assertEqual(task.result['parser'], 'AttachmentParser')
self.assertEqual(task.result['filename'], 'organism.xls')
self.assertEqual(task.result['current'], 100)
self.assertEqual(task.result['total'], 100)
self.assertEqual(task.result['name'], 'geotrek.common.import-file')
```
#### File: common/utils/import_celery.py
```python
import os
import importlib
from geotrek.common.parsers import Parser
def subclasses(cls):
all_subclasses = []
for subclass in cls.__subclasses__():
all_subclasses.append(subclass)
all_subclasses.extend(subclasses(subclass))
return all_subclasses
def create_tmp_destination(name):
save_dir = '/tmp/geotrek/{}'.format(name)
if not os.path.exists('/tmp/geotrek'):
os.mkdir('/tmp/geotrek')
if not os.path.exists(save_dir):
os.mkdir(save_dir)
return save_dir, '/'.join((save_dir, name))
def discover_available_parsers():
choices = []
choices_url = []
try:
importlib.import_module('bulkimport.parsers')
except ImportError:
pass
classes = subclasses(Parser)
for index, cls in enumerate(classes):
if cls.label is None:
continue
if not getattr(cls, 'url', None) and not getattr(cls, 'base_url', None):
choices.append((index, cls.label))
else:
choices_url.append((index, cls.label))
choices = sorted(choices, key=lambda x: x[1])
choices_url = sorted(choices_url, key=lambda x: x[1])
return choices, choices_url, classes
```
#### File: geotrek/feedback/serializers.py
```python
from django.contrib.gis.geos import GEOSGeometry
from rest_framework import serializers as rest_serializers
from geotrek.feedback import models as feedback_models
class ReportSerializer(rest_serializers.ModelSerializer):
class Meta:
model = feedback_models.Report
geo_field = 'geom'
id_field = 'id'
def validate_geom(self, attrs, source):
if source not in attrs:
return attrs
geom = attrs[source]
point = GEOSGeometry(geom, srid=4326)
attrs[source] = point
return attrs
```
#### File: flatpages/migrations/0004_auto__chg_field_flatpage_external_url.py
```python
from south.db import db
from south.v2 import SchemaMigration
class Migration(SchemaMigration):
def forwards(self, orm):
# Changing field 'FlatPage.external_url'
db.alter_column('p_t_page', 'url_externe', self.gf('django.db.models.fields.URLField')(max_length=200, null=True, db_column='url_externe'))
def backwards(self, orm):
# Changing field 'FlatPage.external_url'
db.alter_column('p_t_page', 'url_externe', self.gf('django.db.models.fields.TextField')(null=True, db_column='url_externe'))
models = {
u'flatpages.flatpage': {
'Meta': {'object_name': 'FlatPage', 'db_table': "'p_t_page'"},
'content': ('django.db.models.fields.TextField', [], {'null': 'True', 'db_column': "'contenu'", 'blank': 'True'}),
'date_insert': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'db_column': "'date_insert'", 'blank': 'True'}),
'date_update': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'db_column': "'date_update'", 'blank': 'True'}),
'external_url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True', 'db_column': "'url_externe'", 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'publication_date': ('django.db.models.fields.DateField', [], {'null': 'True', 'db_column': "'date_publication'", 'blank': 'True'}),
'published': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_column': "'public'"}),
'target': ('django.db.models.fields.CharField', [], {'default': "'all'", 'max_length': '12', 'db_column': "'cible'"}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '200', 'db_column': "'titre'"})
}
}
complete_apps = ['flatpages']
```
#### File: flatpages/migrations/0008_auto__add_field_flatpage_id.py
```python
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding M2M table for field portal on 'FlatPage'
m2m_table_name = 't_r_page_portal'
db.create_table(m2m_table_name, (
('id', models.AutoField(verbose_name='ID', primary_key=True, auto_created=True)),
('flatpage', models.ForeignKey(orm[u'flatpages.flatpage'], null=False)),
('targetportal', models.ForeignKey(orm[u'common.targetportal'], null=False))
))
db.create_unique(m2m_table_name, ['flatpage_id', 'targetportal_id'])
def backwards(self, orm):
# Removing M2M table for field portal on 'FlatPage'
db.delete_table('t_r_page_portal')
models = {
u'authent.structure': {
'Meta': {'ordering': "['name']", 'object_name': 'Structure'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '256'})
},
u'common.recordsource': {
'Meta': {'ordering': "['name']", 'object_name': 'RecordSource', 'db_table': "'o_b_source_fiche'"},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'pictogram': ('django.db.models.fields.files.FileField', [], {'max_length': '512', 'null': 'True', 'db_column': "'picto'", 'blank': 'True'}),
'structure': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['authent.Structure']", 'db_column': "'structure'"}),
'website': ('django.db.models.fields.URLField', [], {'max_length': '256', 'null': 'True', 'db_column': "'website'", 'blank': 'True'})
},
u'common.targetportal': {
'Meta': {'ordering': "('name',)", 'object_name': 'TargetPortal', 'db_table': "'o_b_target_portal'"},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': "'True'", 'max_length': '50'}),
'website': ('django.db.models.fields.URLField', [], {'unique': "'True'", 'max_length': '256', 'db_column': "'website'"})
},
u'flatpages.flatpage': {
'Meta': {'ordering': "['order', 'id']", 'object_name': 'FlatPage', 'db_table': "'p_t_page'"},
'content': ('django.db.models.fields.TextField', [], {'null': 'True', 'db_column': "'contenu'", 'blank': 'True'}),
'date_insert': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'db_column': "'date_insert'", 'blank': 'True'}),
'date_update': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'db_column': "'date_update'", 'blank': 'True'}),
'external_url': ('django.db.models.fields.URLField', [], {'default': "''", 'max_length': '200', 'db_column': "'url_externe'", 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'order': ('django.db.models.fields.IntegerField', [], {'default': 'None', 'null': 'True', 'blank': 'True'}),
'portal': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "'flatpages'", 'blank': 'True', 'db_table': "'t_r_page_portal'", 'to': u"orm['common.TargetPortal']"}),
'publication_date': ('django.db.models.fields.DateField', [], {'null': 'True', 'db_column': "'date_publication'", 'blank': 'True'}),
'published': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_column': "'public'"}),
'source': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "'flatpages'", 'blank': 'True', 'db_table': "'t_r_page_source'", 'to': u"orm['common.RecordSource']"}),
'target': ('django.db.models.fields.CharField', [], {'default': "'all'", 'max_length': '12', 'db_column': "'cible'"}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '200', 'db_column': "'titre'"})
}
}
complete_apps = ['flatpages']
```
#### File: geotrek/infrastructure/models.py
```python
from django.db import models
from django.utils.translation import ugettext_lazy as _
from django.contrib.gis.db import models as gismodels
from extended_choices import Choices
from mapentity.models import MapEntityMixin
from geotrek.common.utils import classproperty
from geotrek.core.models import Topology, Path
from geotrek.authent.models import StructureRelatedManager, StructureRelated
INFRASTRUCTURE_TYPES = Choices(
('BUILDING', 'A', _("Building")),
('FACILITY', 'E', _("Facility")),
('SIGNAGE', 'S', _("Signage")),
)
class InfrastructureTypeQuerySet(models.query.QuerySet):
def for_infrastructures(self):
return self.exclude(type__exact=INFRASTRUCTURE_TYPES.SIGNAGE)
def for_signages(self):
return self.filter(type__exact=INFRASTRUCTURE_TYPES.SIGNAGE)
class InfrastructureTypeManager(models.Manager):
def get_queryset(self):
return InfrastructureTypeQuerySet(self.model, using=self._db)
def for_signages(self):
return self.get_queryset().for_signages()
def for_infrastructures(self):
return self.get_queryset().for_infrastructures()
class InfrastructureType(StructureRelated):
""" Types of infrastructures (bridge, WC, stairs, ...) """
label = models.CharField(db_column="nom", max_length=128)
type = models.CharField(db_column="type", max_length=1, choices=INFRASTRUCTURE_TYPES)
objects = InfrastructureTypeManager()
class Meta:
db_table = 'a_b_amenagement'
verbose_name = _(u"Infrastructure Type")
verbose_name_plural = _(u"Infrastructure Types")
ordering = ['label', 'type']
def __unicode__(self):
return self.label
class InfrastructureCondition(StructureRelated):
label = models.CharField(verbose_name=_(u"Name"), db_column="etat", max_length=250)
class Meta:
verbose_name = _(u"Infrastructure Condition")
verbose_name_plural = _(u"Infrastructure Conditions")
db_table = "a_b_etat"
def __unicode__(self):
return self.label
class BaseInfrastructure(MapEntityMixin, Topology, StructureRelated):
""" A generic infrastructure in the park """
topo_object = models.OneToOneField(Topology, parent_link=True,
db_column='evenement')
name = models.CharField(db_column="nom", max_length=128,
help_text=_(u"Reference, code, ..."), verbose_name=_("Name"))
description = models.TextField(blank=True, db_column='description',
verbose_name=_("Description"), help_text=_(u"Specificites"))
type = models.ForeignKey(InfrastructureType, db_column='type', verbose_name=_("Type"))
condition = models.ForeignKey(InfrastructureCondition, db_column='etat',
verbose_name=_("Condition"), blank=True, null=True,
on_delete=models.PROTECT)
class Meta:
db_table = 'a_t_amenagement'
def __unicode__(self):
return self.name
@property
def name_display(self):
return '<a href="%s" title="%s" >%s</a>' % (self.get_detail_url(),
self,
self)
@property
def name_csv_display(self):
return unicode(self)
@property
def type_display(self):
return unicode(self.type)
@property
def cities_display(self):
if hasattr(self, 'cities'):
return [unicode(c) for c in self.cities]
return []
@classproperty
def cities_verbose_name(cls):
return _("Cities")
class InfrastructureGISManager(gismodels.GeoManager):
""" Overide default typology mixin manager, and filter by type. """
def get_queryset(self):
return super(InfrastructureGISManager, self).get_queryset().exclude(type__type=INFRASTRUCTURE_TYPES.SIGNAGE)
class InfrastructureStructureManager(StructureRelatedManager):
""" Overide default structure related manager, and filter by type. """
def get_queryset(self):
return super(InfrastructureStructureManager, self).get_queryset().exclude(type__type=INFRASTRUCTURE_TYPES.SIGNAGE)
class Infrastructure(BaseInfrastructure):
""" An infrastructure in the park, which is not of type SIGNAGE """
objects = BaseInfrastructure.get_manager_cls(InfrastructureGISManager)()
in_structure = InfrastructureStructureManager()
class Meta:
proxy = True
verbose_name = _(u"Infrastructure")
verbose_name_plural = _(u"Infrastructures")
@classmethod
def path_infrastructures(cls, path):
return cls.objects.existing().filter(aggregations__path=path).distinct('pk')
@classmethod
def topology_infrastructures(cls, topology):
return cls.overlapping(topology)
Path.add_property('infrastructures', lambda self: Infrastructure.path_infrastructures(self), _(u"Infrastructures"))
Topology.add_property('infrastructures', lambda self: Infrastructure.topology_infrastructures(self), _(u"Infrastructures"))
class SignageGISManager(gismodels.GeoManager):
""" Overide default typology mixin manager, and filter by type. """
def get_queryset(self):
return super(SignageGISManager, self).get_queryset().filter(type__type=INFRASTRUCTURE_TYPES.SIGNAGE)
class SignageStructureManager(StructureRelatedManager):
""" Overide default structure related manager, and filter by type. """
def get_queryset(self):
return super(SignageStructureManager, self).get_queryset().filter(type__type=INFRASTRUCTURE_TYPES.SIGNAGE)
class Signage(BaseInfrastructure):
""" An infrastructure in the park, which is of type SIGNAGE """
objects = BaseInfrastructure.get_manager_cls(SignageGISManager)()
in_structure = SignageStructureManager()
class Meta:
proxy = True
verbose_name = _(u"Signage")
verbose_name_plural = _(u"Signages")
@classmethod
def path_signages(cls, path):
return cls.objects.existing().filter(aggregations__path=path).distinct('pk')
@classmethod
def topology_signages(cls, topology):
return cls.overlapping(topology)
Path.add_property('signages', lambda self: Signage.path_signages(self), _(u"Signages"))
Topology.add_property('signages', lambda self: Signage.topology_signages(self), _(u"Signages"))
```
#### File: land/tests/test_views.py
```python
from django.test import TestCase
from geotrek.common.tests import CommonTest
from geotrek.authent.factories import PathManagerFactory
from geotrek.core.factories import PathFactory, PathAggregationFactory
from geotrek.common.factories import OrganismFactory
from geotrek.land.models import (PhysicalEdge, LandEdge, CompetenceEdge,
WorkManagementEdge, SignageManagementEdge)
from geotrek.land.factories import (PhysicalEdgeFactory, LandEdgeFactory,
CompetenceEdgeFactory, WorkManagementEdgeFactory,
SignageManagementEdgeFactory, PhysicalTypeFactory,
LandTypeFactory)
class EdgeHelperTest(TestCase):
factory = None
helper_name = None
def test_path_helpers(self):
if not self.factory:
return # ignore abstract test
p = PathFactory.create()
self.assertEquals(len(getattr(p, self.helper_name)), 0)
l = self.factory.create(no_path=True)
PathAggregationFactory.create(topo_object=l, path=p)
self.assertEqual([o.pk for o in getattr(p, self.helper_name).all()],
[l.pk])
class LandEdgeTest(EdgeHelperTest):
factory = LandEdgeFactory
helper_name = 'land_edges'
class PhysicalEdgeTest(EdgeHelperTest):
factory = PhysicalEdgeFactory
helper_name = 'physical_edges'
class CompetenceEdgeTest(EdgeHelperTest):
factory = CompetenceEdgeFactory
helper_name = 'competence_edges'
class WorkManagementEdgeTest(EdgeHelperTest):
factory = WorkManagementEdgeFactory
helper_name = 'work_edges'
class SignageManagementEdgeTest(EdgeHelperTest):
factory = SignageManagementEdgeFactory
helper_name = 'signage_edges'
class PhysicalEdgeViewsTest(CommonTest):
model = PhysicalEdge
modelfactory = PhysicalEdgeFactory
userfactory = PathManagerFactory
def get_good_data(self):
path = PathFactory.create()
return {
'physical_type': PhysicalTypeFactory.create().pk,
'topology': '{"paths": [%s]}' % path.pk,
}
class LandEdgeViewsTest(CommonTest):
model = LandEdge
modelfactory = LandEdgeFactory
userfactory = PathManagerFactory
def get_good_data(self):
path = PathFactory.create()
return {
'land_type': LandTypeFactory.create().pk,
'topology': '{"paths": [%s]}' % path.pk,
}
class CompetenceEdgeViewsTest(CommonTest):
model = CompetenceEdge
modelfactory = CompetenceEdgeFactory
userfactory = PathManagerFactory
def get_good_data(self):
path = PathFactory.create()
return {
'organization': OrganismFactory.create().pk,
'topology': '{"paths": [%s]}' % path.pk,
}
class WorkManagementEdgeViewsTest(CommonTest):
model = WorkManagementEdge
modelfactory = WorkManagementEdgeFactory
userfactory = PathManagerFactory
def get_good_data(self):
path = PathFactory.create()
return {
'organization': OrganismFactory.create().pk,
'topology': '{"paths": [%s]}' % path.pk,
}
class SignageManagementEdgeViewsTest(CommonTest):
model = SignageManagementEdge
modelfactory = SignageManagementEdgeFactory
userfactory = PathManagerFactory
def get_good_data(self):
path = PathFactory.create()
return {
'organization': OrganismFactory.create().pk,
'topology': '{"paths": [%s]}' % path.pk,
}
```
#### File: tourism/migrations/0008_auto__add_field_touristiccontent_published__add_field_touristiccontent.py
```python
from south.db import db
from south.v2 import SchemaMigration
from django.conf import settings
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'TouristicContent.published'
db.add_column('t_t_contenu_touristique', 'published',
self.gf('django.db.models.fields.BooleanField')(default=False, db_column='public'),
keep_default=False)
# Adding field 'TouristicContent.publication_date'
db.add_column('t_t_contenu_touristique', 'publication_date',
self.gf('django.db.models.fields.DateField')(null=True, db_column='date_publication', blank=True),
keep_default=False)
def backwards(self, orm):
# Deleting field 'TouristicContent.published'
db.delete_column('t_t_contenu_touristique', 'public')
# Deleting field 'TouristicContent.publication_date'
db.delete_column('t_t_contenu_touristique', 'date_publication')
models = {
u'authent.structure': {
'Meta': {'ordering': "['name']", 'object_name': 'Structure'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '256'})
},
u'tourism.datasource': {
'Meta': {'ordering': "['title', 'url']", 'object_name': 'DataSource', 'db_table': "'t_t_source_donnees'"},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'pictogram': ('django.db.models.fields.files.FileField', [], {'max_length': '512', 'db_column': "'picto'"}),
'targets': ('multiselectfield.db.fields.MultiSelectField', [], {'max_length': '512', 'null': 'True', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '128', 'db_column': "'titre'"}),
'type': ('django.db.models.fields.CharField', [], {'max_length': '32', 'db_column': "'type'"}),
'url': ('django.db.models.fields.URLField', [], {'max_length': '400', 'db_column': "'url'"})
},
u'tourism.informationdesk': {
'Meta': {'ordering': "['name']", 'object_name': 'InformationDesk', 'db_table': "'o_b_renseignement'"},
'description': ('django.db.models.fields.TextField', [], {'db_column': "'description'", 'blank': 'True'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '256', 'null': 'True', 'db_column': "'email'", 'blank': 'True'}),
'geom': ('django.contrib.gis.db.models.fields.PointField', [], {'srid': str(settings.SRID), 'null': 'True', 'spatial_index': 'False', 'db_column': "'geom'", 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'municipality': ('django.db.models.fields.CharField', [], {'max_length': '256', 'null': 'True', 'db_column': "'commune'", 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '256', 'db_column': "'nom'"}),
'phone': ('django.db.models.fields.CharField', [], {'max_length': '32', 'null': 'True', 'db_column': "'telephone'", 'blank': 'True'}),
'photo': ('django.db.models.fields.files.FileField', [], {'max_length': '512', 'null': 'True', 'db_column': "'photo'", 'blank': 'True'}),
'postal_code': ('django.db.models.fields.CharField', [], {'max_length': '8', 'null': 'True', 'db_column': "'code'", 'blank': 'True'}),
'street': ('django.db.models.fields.CharField', [], {'max_length': '256', 'null': 'True', 'db_column': "'rue'", 'blank': 'True'}),
'type': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'desks'", 'db_column': "'type'", 'to': u"orm['tourism.InformationDeskType']"}),
'website': ('django.db.models.fields.URLField', [], {'max_length': '256', 'null': 'True', 'db_column': "'website'", 'blank': 'True'})
},
u'tourism.informationdesktype': {
'Meta': {'ordering': "['label']", 'object_name': 'InformationDeskType', 'db_table': "'o_b_type_renseignement'"},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'label': ('django.db.models.fields.CharField', [], {'max_length': '128', 'db_column': "'label'"}),
'pictogram': ('django.db.models.fields.files.FileField', [], {'max_length': '512', 'null': 'True', 'db_column': "'picto'"})
},
u'tourism.touristiccontent': {
'Meta': {'object_name': 'TouristicContent', 'db_table': "'t_t_contenu_touristique'"},
'category': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'contents'", 'db_column': "'categorie'", 'to': u"orm['tourism.TouristicContentCategory']"}),
'date_insert': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'db_column': "'date_insert'", 'blank': 'True'}),
'date_update': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'db_column': "'date_update'", 'blank': 'True'}),
'deleted': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_column': "'supprime'"}),
'geom': ('django.contrib.gis.db.models.fields.GeometryField', [], {'srid': str(settings.SRID)}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '128', 'db_column': "'nom'"}),
'publication_date': ('django.db.models.fields.DateField', [], {'null': 'True', 'db_column': "'date_publication'", 'blank': 'True'}),
'published': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_column': "'public'"}),
'structure': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['authent.Structure']", 'db_column': "'structure'"})
},
u'tourism.touristiccontentcategory': {
'Meta': {'ordering': "['label']", 'object_name': 'TouristicContentCategory', 'db_table': "'t_b_contenu_touristique'"},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'label': ('django.db.models.fields.CharField', [], {'max_length': '128', 'db_column': "'nom'"}),
'pictogram': ('django.db.models.fields.files.FileField', [], {'max_length': '512', 'null': 'True', 'db_column': "'picto'"})
}
}
complete_apps = ['tourism']
```
#### File: tourism/tests/test_models.py
```python
from django.test import TestCase
from django.conf import settings
from django.test.utils import override_settings
from geotrek.core import factories as core_factories
from geotrek.tourism import factories as tourism_factories
from geotrek.trekking import factories as trekking_factories
class TourismRelations(TestCase):
def setUp(self):
self.content = tourism_factories.TouristicContentFactory(geom='SRID=%s;POINT(1 1)' % settings.SRID)
self.content2 = tourism_factories.TouristicContentFactory(geom='SRID=%s;POINT(2 2)' % settings.SRID)
self.event = tourism_factories.TouristicEventFactory(geom='SRID=%s;POINT(50 50)' % settings.SRID)
self.event2 = tourism_factories.TouristicEventFactory(geom='SRID=%s;POINT(60 60)' % settings.SRID)
path = core_factories.PathFactory(geom='SRID=%s;LINESTRING(0 100, 100 100)' % settings.SRID)
self.trek = trekking_factories.TrekFactory(no_path=True)
self.trek.add_path(path)
self.poi = trekking_factories.POIFactory(no_path=True)
self.poi.add_path(path, start=0.5, end=0.5)
def test_spatial_link_with_tourism(self):
self.assertIn(self.content2, self.content.touristic_contents.all())
self.assertIn(self.event, self.content.touristic_events.all())
self.assertIn(self.content, self.event.touristic_contents.all())
self.assertIn(self.event2, self.event.touristic_events.all())
def test_spatial_links_do_not_self_intersect(self):
self.assertNotIn(self.content, self.content.touristic_contents.all())
self.assertNotIn(self.event, self.event.touristic_contents.all())
@override_settings(TOURISM_INTERSECTION_MARGIN=10)
def test_spatial_link_with_tourism_respects_limit(self):
self.assertNotIn(self.event, self.content.touristic_events.all())
self.assertNotIn(self.content, self.event.touristic_contents.all())
def test_spatial_link_with_topologies(self):
self.assertIn(self.trek, self.content.treks.all())
self.assertIn(self.poi, self.content.pois.all())
self.assertIn(self.trek, self.event.treks.all())
self.assertIn(self.poi, self.event.pois.all())
@override_settings(TOURISM_INTERSECTION_MARGIN=10)
def test_spatial_link_with_topologies_respects_limit(self):
self.assertNotIn(self.trek, self.content.treks.all())
self.assertNotIn(self.poi, self.content.pois.all())
self.assertNotIn(self.trek, self.event.treks.all())
self.assertNotIn(self.poi, self.event.pois.all())
def test_spatial_link_from_topologies(self):
self.assertIn(self.content, self.trek.touristic_contents.all())
self.assertIn(self.content, self.poi.touristic_contents.all())
self.assertIn(self.event, self.trek.touristic_events.all())
self.assertIn(self.event, self.poi.touristic_events.all())
@override_settings(TOURISM_INTERSECTION_MARGIN=10)
def test_spatial_link_from_topologies_respects_limit(self):
self.assertNotIn(self.content, self.trek.touristic_contents.all())
self.assertNotIn(self.content, self.poi.touristic_contents.all())
self.assertNotIn(self.event, self.trek.touristic_events.all())
self.assertNotIn(self.event, self.poi.touristic_events.all())
def test_spatial_link_from_trek_with_practice_distance(self):
self.trek.practice.distance = 2000
self.trek.practice.save()
self.assertIn(self.content, self.trek.touristic_contents.all())
self.assertIn(self.event, self.trek.touristic_events.all())
def test_spatial_link_from_trek_with_practice_distance_respects_limit(self):
self.trek.practice.distance = 10
self.trek.practice.save()
self.assertNotIn(self.content, self.trek.touristic_contents.all())
self.assertNotIn(self.event, self.trek.touristic_events.all())
```
#### File: geotrek/trekking/factories.py
```python
import factory
from django.contrib.gis.geos import Point
from . import models
from geotrek.core.factories import TopologyFactory, PointTopologyFactory
from geotrek.common.utils.testdata import dummy_filefield_as_sequence
class TrekNetworkFactory(factory.Factory):
FACTORY_FOR = models.TrekNetwork
network = factory.Sequence(lambda n: u"network %s" % n)
class PracticeFactory(factory.Factory):
FACTORY_FOR = models.Practice
name = factory.Sequence(lambda n: u"usage %s" % n)
pictogram = dummy_filefield_as_sequence('thumbnail %s')
class AccessibilityFactory(factory.Factory):
FACTORY_FOR = models.Accessibility
name = factory.Sequence(lambda n: u"accessibility %s" % n)
pictogram = dummy_filefield_as_sequence('thumbnail %s')
class RouteFactory(factory.Factory):
FACTORY_FOR = models.Route
route = factory.Sequence(lambda n: u"route %s" % n)
class DifficultyLevelFactory(factory.Factory):
FACTORY_FOR = models.DifficultyLevel
difficulty = factory.Sequence(lambda n: u"difficulty %s" % n)
pictogram = dummy_filefield_as_sequence('thumbnail %s')
class WebLinkCategoryFactory(factory.Factory):
FACTORY_FOR = models.WebLinkCategory
label = factory.Sequence(lambda n: u"Category %s" % n)
pictogram = dummy_filefield_as_sequence('thumbnail %s')
class WebLinkFactory(factory.Factory):
FACTORY_FOR = models.WebLink
name = factory.Sequence(lambda n: u"web link name %s" % n)
url = factory.Sequence(lambda n: u"http://dummy.url/%s" % n)
category = factory.SubFactory(WebLinkCategoryFactory)
class TrekFactory(TopologyFactory):
FACTORY_FOR = models.Trek
name = factory.Sequence(lambda n: u"name %s" % n)
departure = factory.Sequence(lambda n: u"departure %s" % n)
arrival = factory.Sequence(lambda n: u"arrival %s" % n)
published = True
length = 10
ascent = 0
descent = 0
min_elevation = 0
max_elevation = 0
description_teaser = factory.Sequence(lambda n: u"<p>description_teaser %s</p>" % n)
description = factory.Sequence(lambda n: u"<p>description %s</p>" % n)
ambiance = factory.Sequence(lambda n: u"<p>ambiance %s</p>" % n)
access = factory.Sequence(lambda n: u"<p>access %s</p>" % n)
disabled_infrastructure = factory.Sequence(lambda n: u"<p>disabled_infrastructure %s</p>" % n)
duration = 1.5 # hour
is_park_centered = False
advised_parking = factory.Sequence(lambda n: u"<p>Advised parking %s</p>" % n)
parking_location = Point(1, 1)
public_transport = factory.Sequence(lambda n: u"<p>Public transport %s</p>" % n)
advice = factory.Sequence(lambda n: u"<p>Advice %s</p>" % n)
route = factory.SubFactory(RouteFactory)
difficulty = factory.SubFactory(DifficultyLevelFactory)
practice = factory.SubFactory(PracticeFactory)
@classmethod
def _prepare(cls, create, **kwargs):
sources = kwargs.pop('sources', None)
portals = kwargs.pop('portals', None)
trek = super(TrekFactory, cls)._prepare(create, **kwargs)
if create:
if sources:
for source in sources:
trek.source.add(source)
if portals:
for portal in portals:
trek.portal.add(portal)
return trek
class TrekWithPOIsFactory(TrekFactory):
@classmethod
def _prepare(cls, create, **kwargs):
trek = super(TrekWithPOIsFactory, cls)._prepare(create, **kwargs)
path = trek.paths.all()[0]
poi1 = POIFactory.create(no_path=True)
poi1.add_path(path, start=0.5, end=0.5)
poi2 = POIFactory.create(no_path=True)
poi2.add_path(path, start=0.4, end=0.4)
if create:
trek.save()
return trek
class TrekWithServicesFactory(TrekFactory):
@classmethod
def _prepare(cls, create, **kwargs):
trek = super(TrekWithServicesFactory, cls)._prepare(create, **kwargs)
path = trek.paths.all()[0]
service1 = ServiceFactory.create(no_path=True)
service1.add_path(path, start=0.5, end=0.5)
service1.type.practices.add(trek.practice)
service2 = ServiceFactory.create(no_path=True)
service2.add_path(path, start=0.4, end=0.4)
service2.type.practices.add(trek.practice)
if create:
trek.save()
return trek
class TrekRelationshipFactory(factory.Factory):
FACTORY_FOR = models.TrekRelationship
has_common_departure = False
has_common_edge = False
is_circuit_step = False
trek_a = factory.SubFactory(TrekFactory)
trek_b = factory.SubFactory(TrekFactory)
class POITypeFactory(factory.Factory):
FACTORY_FOR = models.POIType
label = factory.Sequence(lambda n: u"POIType %s" % n)
pictogram = dummy_filefield_as_sequence('pictogram %s')
class POIFactory(PointTopologyFactory):
FACTORY_FOR = models.POI
name = factory.Sequence(lambda n: u"POI %s" % n)
description = factory.Sequence(lambda n: u"<p>description %s</p>" % n)
type = factory.SubFactory(POITypeFactory)
published = True
class ServiceTypeFactory(factory.Factory):
FACTORY_FOR = models.ServiceType
name = factory.Sequence(lambda n: u"ServiceType %s" % n)
pictogram = dummy_filefield_as_sequence('pictogram %s')
published = True
class ServiceFactory(PointTopologyFactory):
FACTORY_FOR = models.Service
type = factory.SubFactory(ServiceTypeFactory)
```
#### File: trekking/tests/test_views.py
```python
import os
import json
import datetime
from collections import OrderedDict
import mock
from bs4 import BeautifulSoup
from django.conf import settings
from django.test import TestCase
from django.contrib.auth.models import User, Group, Permission
from django.contrib.gis.geos import LineString, MultiPoint, Point
from django.core.management import call_command
from django.core.urlresolvers import reverse
from django.db import connection, connections, DEFAULT_DB_ALIAS
from django.template.loader import find_template
from django.test import RequestFactory
from django.test.utils import override_settings
from django.utils import translation
from django.utils.timezone import utc, make_aware
from django.utils.unittest import util as testutil
from mapentity.tests import MapEntityLiveTest
from mapentity.factories import SuperUserFactory
from geotrek.authent.models import default_structure
from geotrek.common.factories import (AttachmentFactory, ThemeFactory,
RecordSourceFactory, TargetPortalFactory)
from geotrek.common.tests import CommonTest, TranslationResetMixin
from geotrek.common.utils.testdata import get_dummy_uploaded_image
from geotrek.authent.factories import TrekkingManagerFactory, StructureFactory, UserProfileFactory
from geotrek.authent.tests.base import AuthentFixturesTest
from geotrek.core.factories import PathFactory
from geotrek.zoning.factories import DistrictFactory, CityFactory
from geotrek.trekking.models import POI, Trek, Service, OrderedTrekChild
from geotrek.trekking.factories import (POIFactory, POITypeFactory, TrekFactory, TrekWithPOIsFactory,
TrekNetworkFactory, WebLinkFactory, AccessibilityFactory,
TrekRelationshipFactory, ServiceFactory, ServiceTypeFactory,
TrekWithServicesFactory)
from geotrek.trekking.templatetags import trekking_tags
from geotrek.trekking.serializers import timestamp
from geotrek.trekking import views as trekking_views
from geotrek.tourism import factories as tourism_factories
# Make sur to register Trek model
from geotrek.trekking import urls # NOQA
from .base import TrekkingManagerTest
class POIViewsTest(CommonTest):
model = POI
modelfactory = POIFactory
userfactory = TrekkingManagerFactory
def get_good_data(self):
PathFactory.create()
return {
'name_fr': 'test',
'name_en': 'test',
'description_fr': 'ici',
'description_en': 'here',
'type': POITypeFactory.create().pk,
'topology': '{"lat": 5.1, "lng": 6.6}',
'structure': default_structure().pk
}
def test_empty_topology(self):
self.login()
data = self.get_good_data()
data['topology'] = ''
response = self.client.post(self.model.get_add_url(), data)
self.assertEqual(response.status_code, 200)
form = self.get_form(response)
self.assertEqual(form.errors, {'topology': [u'Topology is empty.']})
def test_listing_number_queries(self):
self.login()
# Create many instances
for i in range(100):
self.modelfactory.create()
for i in range(10):
DistrictFactory.create()
# Enable query counting
settings.DEBUG = True
for url in [self.model.get_jsonlist_url(),
self.model.get_format_list_url()]:
num_queries_old = len(connection.queries)
self.client.get(url)
num_queries_new = len(connection.queries)
nb_queries = num_queries_new - num_queries_old
self.assertTrue(0 < nb_queries < 100, '%s queries !' % nb_queries)
settings.DEBUG = False
class POIJSONDetailTest(TrekkingManagerTest):
def setUp(self):
self.login()
polygon = 'SRID=%s;MULTIPOLYGON(((700000 6600000, 700000 6600003, 700003 6600003, 700003 6600000, 700000 6600000)))' % settings.SRID
self.city = CityFactory(geom=polygon)
self.district = DistrictFactory(geom=polygon)
self.poi = POIFactory.create(published=True)
self.attachment = AttachmentFactory.create(obj=self.poi,
attachment_file=get_dummy_uploaded_image())
self.touristic_content = tourism_factories.TouristicContentFactory(
geom='SRID=%s;POINT(700001 6600001)' % settings.SRID, published=True)
tourism_factories.TouristicContentFactory(geom='SRID=%s;POINT(700001 6600001)' % settings.SRID,
published=False) # not published
tourism_factories.TouristicContentFactory(geom='SRID=%s;POINT(700001 6600001)' % settings.SRID,
published=True).delete() # deleted
tourism_factories.TouristicContentFactory(geom='SRID=%s;POINT(701000 6601000)' % settings.SRID,
published=True) # too far
self.touristic_event = tourism_factories.TouristicEventFactory(
geom='SRID=%s;POINT(700002 6600002)' % settings.SRID, published=True)
tourism_factories.TouristicEventFactory(geom='SRID=%s;POINT(700002 6600002)' % settings.SRID,
published=False) # not published
tourism_factories.TouristicEventFactory(geom='SRID=%s;POINT(700002 6600002)' % settings.SRID,
published=True).delete() # deleted
tourism_factories.TouristicEventFactory(geom='SRID=%s;POINT(702000 6602000)' % settings.SRID,
published=True) # too far
self.pk = self.poi.pk
url = '/api/en/pois/%s.json' % self.pk
self.response = self.client.get(url)
self.result = json.loads(self.response.content)
def test_name(self):
self.assertEqual(self.result['name'],
self.poi.name)
def test_slug(self):
self.assertEqual(self.result['slug'],
self.poi.slug)
def test_published(self):
self.assertEqual(self.result['published'], True)
def test_published_status(self):
self.assertDictEqual(self.result['published_status'][0],
{u'lang': u'en', u'status': True, u'language': u'English'})
def test_type(self):
self.assertDictEqual(self.result['type'],
{'id': self.poi.type.pk,
'label': self.poi.type.label,
'pictogram': os.path.join(settings.MEDIA_URL, self.poi.type.pictogram.name),
})
def test_altimetry(self):
self.assertEqual(self.result['min_elevation'], 0.0)
def test_cities(self):
self.assertDictEqual(self.result['cities'][0],
{u"code": self.city.code,
u"name": self.city.name})
def test_districts(self):
self.assertDictEqual(self.result['districts'][0],
{u"id": self.district.id,
u"name": self.district.name})
def test_related_urls(self):
self.assertEqual(self.result['map_image_url'],
'/image/poi-%s.png' % self.pk)
self.assertEqual(self.result['filelist_url'],
'/paperclip/get/trekking/poi/%s/' % self.pk)
def test_touristic_contents(self):
self.assertEqual(len(self.result['touristic_contents']), 1)
self.assertDictEqual(self.result['touristic_contents'][0], {
u'id': self.touristic_content.pk,
u'category_id': self.touristic_content.prefixed_category_id})
def test_touristic_events(self):
self.assertEqual(len(self.result['touristic_events']), 1)
self.assertDictEqual(self.result['touristic_events'][0], {
u'id': self.touristic_event.pk,
u'category_id': 'E'})
class TrekViewsTest(CommonTest):
model = Trek
modelfactory = TrekFactory
userfactory = TrekkingManagerFactory
def get_bad_data(self):
return OrderedDict([
('name_en', ''),
('trek_relationship_a-TOTAL_FORMS', '0'),
('trek_relationship_a-INITIAL_FORMS', '1'),
('trek_relationship_a-MAX_NUM_FORMS', '0'),
]), u'This field is required.'
def get_good_data(self):
path = PathFactory.create()
return {
'name_fr': 'Huhu',
'name_en': 'Hehe',
'departure_fr': '',
'departure_en': '',
'arrival_fr': '',
'arrival_en': '',
'published': '',
'difficulty': '',
'route': '',
'description_teaser_fr': '',
'description_teaser_en': '',
'description_fr': '',
'description_en': '',
'ambiance_fr': '',
'ambiance_en': '',
'access_fr': '',
'access_en': '',
'disabled_infrastructure_fr': '',
'disabled_infrastructure_en': '',
'duration': '0',
'is_park_centered': '',
'advised_parking': 'Very close',
'parking_location': 'POINT (1.0 1.0)',
'public_transport': 'huhu',
'advice_fr': '',
'advice_en': '',
'themes': ThemeFactory.create().pk,
'networks': TrekNetworkFactory.create().pk,
'practice': '',
'accessibilities': AccessibilityFactory.create().pk,
'web_links': WebLinkFactory.create().pk,
'information_desks': tourism_factories.InformationDeskFactory.create().pk,
'topology': '{"paths": [%s]}' % path.pk,
'trek_relationship_a-TOTAL_FORMS': '2',
'trek_relationship_a-INITIAL_FORMS': '0',
'trek_relationship_a-MAX_NUM_FORMS': '',
'trek_relationship_a-0-id': '',
'trek_relationship_a-0-trek_b': TrekFactory.create().pk,
'trek_relationship_a-0-has_common_edge': 'on',
'trek_relationship_a-0-has_common_departure': 'on',
'trek_relationship_a-0-is_circuit_step': '',
'trek_relationship_a-1-id': '',
'trek_relationship_a-1-trek_b': TrekFactory.create().pk,
'trek_relationship_a-1-has_common_edge': '',
'trek_relationship_a-1-has_common_departure': '',
'trek_relationship_a-1-is_circuit_step': 'on',
'structure': default_structure().pk
}
def test_badfield_goodgeom(self):
self.login()
bad_data, form_error = self.get_bad_data()
bad_data['parking_location'] = 'POINT (1.0 1.0)' # good data
url = self.model.get_add_url()
response = self.client.post(url, bad_data)
self.assertEqual(response.status_code, 200)
form = self.get_form(response)
self.assertEqual(form.data['parking_location'], bad_data['parking_location'])
def test_basic_format(self):
super(TrekViewsTest, self).test_basic_format()
self.modelfactory.create(name="ukélélé") # trek with utf8
for fmt in ('csv', 'shp', 'gpx'):
response = self.client.get(self.model.get_format_list_url() + '?format=' + fmt)
self.assertEqual(response.status_code, 200)
class TrekViewsLiveTest(MapEntityLiveTest):
model = Trek
modelfactory = TrekFactory
userfactory = SuperUserFactory
class TrekCustomViewTests(TrekkingManagerTest):
def setUp(self):
self.login()
def test_pois_geojson(self):
trek = TrekWithPOIsFactory.create(published=True)
self.assertEqual(len(trek.pois), 2)
poi = trek.pois[0]
poi.published = True
poi.save()
AttachmentFactory.create(obj=poi, attachment_file=get_dummy_uploaded_image())
self.assertNotEqual(poi.thumbnail, None)
self.assertEqual(len(trek.pois), 2)
url = '/api/en/treks/{pk}/pois.geojson'.format(pk=trek.pk)
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
poislayer = json.loads(response.content)
poifeature = poislayer['features'][0]
self.assertTrue('thumbnail' in poifeature['properties'])
def test_services_geojson(self):
trek = TrekWithServicesFactory.create(published=True)
self.assertEqual(len(trek.services), 2)
service = trek.services[0]
service.published = True
service.save()
self.assertEqual(len(trek.services), 2)
url = '/api/en/treks/{pk}/services.geojson'.format(pk=trek.pk)
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
serviceslayer = json.loads(response.content)
servicefeature = serviceslayer['features'][0]
self.assertTrue('type' in servicefeature['properties'])
def test_kml(self):
trek = TrekWithPOIsFactory.create()
url = '/api/en/treks/{pk}/slug.kml'.format(pk=trek.pk)
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
self.assertEqual(response['Content-Type'], 'application/vnd.google-earth.kml+xml')
def test_not_published_profile_json(self):
trek = TrekFactory.create(published=False)
url = '/api/en/treks/{pk}/profile.json'.format(pk=trek.pk)
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
self.assertEqual(response['Content-Type'], 'application/json')
def test_not_published_elevation_area_json(self):
trek = TrekFactory.create(published=False)
url = '/api/en/treks/{pk}/dem.json'.format(pk=trek.pk)
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
self.assertEqual(response['Content-Type'], 'application/json')
def test_profile_svg(self):
trek = TrekFactory.create()
url = '/api/en/treks/{pk}/profile.svg'.format(pk=trek.pk)
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
self.assertEqual(response['Content-Type'], 'image/svg+xml')
def test_weblink_popup(self):
url = reverse('trekking:weblink_add')
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
@override_settings(TREK_EXPORT_POI_LIST_LIMIT=1)
@mock.patch('mapentity.models.MapEntityMixin.prepare_map_image')
@mock.patch('mapentity.models.MapEntityMixin.get_attributes_html')
def test_trek_export_poi_list_limit(self, mocked_prepare, mocked_attributes):
trek = TrekWithPOIsFactory.create()
self.assertEqual(len(trek.pois), 2)
poi = trek.pois[0]
poi.published = True
poi.save()
view = trekking_views.TrekDocumentPublic()
view.object = trek
view.request = RequestFactory().get('/')
view.kwargs = {}
view.kwargs[view.pk_url_kwarg] = trek.pk
context = view.get_context_data()
self.assertEqual(len(context['pois']), 1)
class TrekCustomPublicViewTests(TrekkingManagerTest):
@mock.patch('django.template.loaders.filesystem.open', create=True)
def test_overriden_public_template(self, open_patched):
overriden_template = os.path.join(settings.MEDIA_ROOT, 'templates', 'trekking', 'trek_public.odt')
def fake_exists(f, *args):
if f == overriden_template:
return mock.MagicMock(spec=file)
raise IOError
open_patched.side_effect = fake_exists
find_template('trekking/trek_public.odt')
open_patched.assert_called_with(overriden_template, 'rb')
def test_profile_json(self):
trek = TrekFactory.create(published=True)
url = '/api/en/treks/{pk}/profile.json'.format(pk=trek.pk)
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
self.assertEqual(response['Content-Type'], 'application/json')
def test_not_published_profile_json(self):
trek = TrekFactory.create(published=False)
url = '/api/en/treks/{pk}/profile.json'.format(pk=trek.pk)
response = self.client.get(url)
self.assertEqual(response.status_code, 403)
def test_elevation_area_json(self):
trek = TrekFactory.create(published=True)
url = '/api/en/treks/{pk}/dem.json'.format(pk=trek.pk)
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
self.assertEqual(response['Content-Type'], 'application/json')
def test_not_published_elevation_area_json(self):
trek = TrekFactory.create(published=False)
url = '/api/en/treks/{pk}/dem.json'.format(pk=trek.pk)
response = self.client.get(url)
self.assertEqual(response.status_code, 403)
class TrekJSONDetailTest(TrekkingManagerTest):
""" Since we migrated some code to Django REST Framework, we should test
the migration extensively. Geotrek-rando mainly relies on this view.
"""
def setUp(self):
self.login()
polygon = 'SRID=%s;MULTIPOLYGON(((0 0, 0 3, 3 3, 3 0, 0 0)))' % settings.SRID
self.city = CityFactory(geom=polygon)
self.district = DistrictFactory(geom=polygon)
self.trek = TrekFactory.create(
name='Step 2',
no_path=True,
points_reference=MultiPoint([Point(0, 0), Point(1, 1)], srid=settings.SRID),
parking_location=Point(0, 0, srid=settings.SRID)
)
path1 = PathFactory.create(geom='SRID=%s;LINESTRING(0 0, 1 0)' % settings.SRID)
self.trek.add_path(path1)
self.attachment = AttachmentFactory.create(obj=self.trek,
attachment_file=get_dummy_uploaded_image())
self.information_desk = tourism_factories.InformationDeskFactory.create()
self.trek.information_desks.add(self.information_desk)
self.theme = ThemeFactory.create()
self.trek.themes.add(self.theme)
self.accessibility = AccessibilityFactory.create()
self.trek.accessibilities.add(self.accessibility)
self.network = TrekNetworkFactory.create()
self.trek.networks.add(self.network)
self.weblink = WebLinkFactory.create()
self.trek.web_links.add(self.weblink)
self.source = RecordSourceFactory.create()
self.trek.source.add(self.source)
self.portal = TargetPortalFactory.create()
self.trek.portal.add(self.portal)
self.trek_b = TrekFactory.create(no_path=True,
geom='SRID=%s;POINT(2 2)' % settings.SRID,
published=True)
path2 = PathFactory.create(geom='SRID=%s;LINESTRING(0 1, 1 1)' % settings.SRID)
self.trek_b.add_path(path2)
TrekRelationshipFactory.create(has_common_departure=True,
has_common_edge=False,
is_circuit_step=True,
trek_a=self.trek,
trek_b=self.trek_b)
self.touristic_content = tourism_factories.TouristicContentFactory(geom='SRID=%s;POINT(1 1)' % settings.SRID,
published=True)
tourism_factories.TouristicContentFactory(geom='SRID=%s;POINT(1 1)' % settings.SRID,
published=False) # not published
tourism_factories.TouristicContentFactory(geom='SRID=%s;POINT(1 1)' % settings.SRID,
published=True).delete() # deleted
tourism_factories.TouristicContentFactory(geom='SRID=%s;POINT(1000 1000)' % settings.SRID,
published=True) # too far
self.touristic_event = tourism_factories.TouristicEventFactory(geom='SRID=%s;POINT(2 2)' % settings.SRID,
published=True)
tourism_factories.TouristicEventFactory(geom='SRID=%s;POINT(2 2)' % settings.SRID,
published=False) # not published
tourism_factories.TouristicEventFactory(geom='SRID=%s;POINT(2 2)' % settings.SRID,
published=True).delete() # deleted
tourism_factories.TouristicEventFactory(geom='SRID=%s;POINT(2000 2000)' % settings.SRID,
published=True) # too far
trek2 = TrekFactory(no_path=True, published=False) # not published
trek2.add_path(path2)
self.trek3 = TrekFactory(no_path=True, published=True) # deleted
self.trek3.add_path(path2)
self.trek3.delete()
trek4 = TrekFactory(no_path=True, published=True) # too far
trek4.add_path(PathFactory.create(geom='SRID=%s;LINESTRING(0 2000, 1 2000)' % settings.SRID))
self.parent = TrekFactory.create(published=True, name='Parent')
self.child1 = TrekFactory.create(published=False, name='Child 1')
self.child2 = TrekFactory.create(published=True, name='Child 2')
self.sibling = TrekFactory.create(published=True, name='Sibling')
OrderedTrekChild(parent=self.parent, child=self.trek, order=0).save()
OrderedTrekChild(parent=self.trek, child=self.child1, order=3).save()
OrderedTrekChild(parent=self.trek, child=self.child2, order=2).save()
OrderedTrekChild(parent=self.parent, child=self.sibling, order=1).save()
self.pk = self.trek.pk
url = '/api/en/treks/{pk}.json'.format(pk=self.pk)
self.response = self.client.get(url)
self.result = json.loads(self.response.content)
def test_related_urls(self):
self.assertEqual(self.result['elevation_area_url'],
'/api/en/treks/{pk}/dem.json'.format(pk=self.pk))
self.assertEqual(self.result['map_image_url'],
'/image/trek-%s-en.png' % self.pk)
self.assertEqual(self.result['altimetric_profile'],
'/api/en/treks/{pk}/profile.json'.format(pk=self.pk))
self.assertEqual(self.result['filelist_url'],
'/paperclip/get/trekking/trek/%s/' % self.pk)
self.assertEqual(self.result['gpx'],
'/api/en/treks/{pk}/{slug}.gpx'.format(pk=self.pk, slug=self.trek.slug))
self.assertEqual(self.result['kml'],
'/api/en/treks/{pk}/{slug}.kml'.format(pk=self.pk, slug=self.trek.slug))
self.assertEqual(self.result['printable'],
'/api/en/treks/{pk}/{slug}.pdf'.format(pk=self.pk, slug=self.trek.slug))
def test_thumbnail(self):
self.assertEqual(self.result['thumbnail'],
os.path.join(settings.MEDIA_URL,
self.attachment.attachment_file.name) + '.120x120_q85_crop.png')
def test_published_status(self):
self.assertDictEqual(self.result['published_status'][0],
{u'lang': u'en', u'status': True, u'language': u'English'})
def test_pictures(self):
self.assertDictEqual(self.result['pictures'][0],
{u'url': os.path.join(settings.MEDIA_URL,
self.attachment.attachment_file.name) + '.800x800_q85.png',
u'title': self.attachment.title,
u'legend': self.attachment.legend,
u'author': self.attachment.author})
def test_cities(self):
self.assertDictEqual(self.result['cities'][0],
{u"code": self.city.code,
u"name": self.city.name})
def test_districts(self):
self.assertDictEqual(self.result['districts'][0],
{u"id": self.district.id,
u"name": self.district.name})
def test_networks(self):
self.assertDictEqual(self.result['networks'][0],
{u"id": self.network.id,
u"pictogram": None,
u"name": self.network.network})
def test_practice_not_none(self):
self.assertDictEqual(self.result['practice'],
{u"id": self.trek.practice.id,
u"pictogram": os.path.join(settings.MEDIA_URL, self.trek.practice.pictogram.name),
u"label": self.trek.practice.name})
def test_usages(self): # Rando v1 compat
self.assertDictEqual(self.result['usages'][0],
{u"id": self.trek.practice.id,
u"pictogram": os.path.join(settings.MEDIA_URL, self.trek.practice.pictogram.name),
u"label": self.trek.practice.name})
def test_accessibilities(self):
self.assertDictEqual(self.result['accessibilities'][0],
{u"id": self.accessibility.id,
u"pictogram": os.path.join(settings.MEDIA_URL, self.accessibility.pictogram.name),
u"label": self.accessibility.name})
def test_themes(self):
self.assertDictEqual(self.result['themes'][0],
{u"id": self.theme.id,
u"pictogram": os.path.join(settings.MEDIA_URL, self.theme.pictogram.name),
u"label": self.theme.label})
def test_weblinks(self):
self.assertDictEqual(self.result['web_links'][0],
{u"id": self.weblink.id,
u"url": self.weblink.url,
u"name": self.weblink.name,
u"category": {
u"id": self.weblink.category.id,
u"pictogram": os.path.join(settings.MEDIA_URL, self.weblink.category.pictogram.name),
u"label": self.weblink.category.label}
})
def test_route_not_none(self):
self.assertDictEqual(self.result['route'],
{u"id": self.trek.route.id,
u"pictogram": None,
u"label": self.trek.route.route})
def test_difficulty_not_none(self):
self.assertDictEqual(self.result['difficulty'],
{u"id": self.trek.difficulty.id,
u"pictogram": os.path.join(settings.MEDIA_URL, self.trek.difficulty.pictogram.name),
u"label": self.trek.difficulty.difficulty})
def test_information_desks(self):
desk_type = self.information_desk.type
self.assertDictEqual(self.result['information_desks'][0],
{u'description': self.information_desk.description,
u'email': self.information_desk.email,
u'latitude': self.information_desk.latitude,
u'longitude': self.information_desk.longitude,
u'name': self.information_desk.name,
u'phone': self.information_desk.phone,
u'photo_url': self.information_desk.photo_url,
u'postal_code': self.information_desk.postal_code,
u'street': self.information_desk.street,
u'municipality': self.information_desk.municipality,
u'website': self.information_desk.website,
u'type': {
u'id': desk_type.id,
u'pictogram': desk_type.pictogram.url,
u'label': desk_type.label}})
def test_relationships(self):
self.assertDictEqual(self.result['relationships'][0],
{u'published': self.trek_b.published,
u'has_common_departure': True,
u'has_common_edge': False,
u'is_circuit_step': True,
u'trek': {u'pk': self.trek_b.pk,
u'id': self.trek_b.id,
u'slug': self.trek_b.slug,
u'category_slug': u'trek',
u'name': self.trek_b.name}})
def test_parking_location_in_wgs84(self):
parking_location = self.result['parking_location']
self.assertEqual(parking_location[0], -1.3630812101179004)
def test_points_reference_are_exported_in_wgs84(self):
geojson = self.result['points_reference']
self.assertEqual(geojson['type'], 'MultiPoint')
self.assertEqual(geojson['coordinates'][0][0], -1.3630812101179)
def test_touristic_contents(self):
self.assertEqual(len(self.result['touristic_contents']), 1)
self.assertDictEqual(self.result['touristic_contents'][0], {
u'id': self.touristic_content.pk,
u'category_id': self.touristic_content.prefixed_category_id})
def test_touristic_events(self):
self.assertEqual(len(self.result['touristic_events']), 1)
self.assertDictEqual(self.result['touristic_events'][0], {
u'id': self.touristic_event.pk,
u'category_id': self.touristic_event.prefixed_category_id})
def test_close_treks(self):
self.assertEqual(len(self.result['treks']), 1)
self.assertDictEqual(self.result['treks'][0], {
u'id': self.trek_b.pk,
u'category_id': self.trek_b.prefixed_category_id})
def test_type1(self):
self.assertDictEqual(self.result['type1'][0],
{u"id": self.trek.practice.id,
u"pictogram": os.path.join(settings.MEDIA_URL, self.trek.practice.pictogram.name),
u"name": self.trek.practice.name})
def test_type2(self):
self.assertDictEqual(self.result['type2'][0],
{u"id": self.accessibility.id,
u"pictogram": os.path.join(settings.MEDIA_URL, self.accessibility.pictogram.name),
u"name": self.accessibility.name})
def test_category(self):
self.assertDictEqual(self.result['category'],
{u"id": 'T',
u"order": None,
u"label": u"Trek",
u"slug": u"trek",
u"type1_label": u"Practice",
u"type2_label": u"Accessibility",
u"pictogram": u"/static/trekking/trek.svg"})
def test_sources(self):
self.assertDictEqual(self.result['source'][0], {
u'name': self.source.name,
u'website': self.source.website,
u"pictogram": os.path.join(settings.MEDIA_URL, self.source.pictogram.name)})
def portals(self):
self.assertDictEqual(self.result['portal'][0], {
u'name': self.portal.name,
u'website': self.portal.website, })
def test_children(self):
self.assertEqual(self.result['children'], [self.child2.pk, self.child1.pk])
def test_parents(self):
self.assertEqual(self.result['parents'], [self.parent.pk])
def test_previous(self):
self.assertDictEqual(self.result['previous'],
{u"%s" % self.parent.pk: None})
def test_next(self):
self.assertDictEqual(self.result['next'],
{u"%s" % self.parent.pk: self.sibling.pk})
class TrekPointsReferenceTest(TrekkingManagerTest):
def setUp(self):
self.login()
self.trek = TrekFactory.create()
self.trek.points_reference = MultiPoint([Point(0, 0), Point(1, 1)], srid=settings.SRID)
self.trek.save()
def test_points_reference_editable_as_hidden_input(self):
url = self.trek.get_update_url()
response = self.client.get(url)
self.assertContains(response, 'name="points_reference"')
@override_settings(TREK_POINTS_OF_REFERENCE_ENABLED=False)
def test_points_reference_is_marked_as_disabled_when_disabled(self):
url = self.trek.get_update_url()
response = self.client.get(url)
self.assertNotContains(response, 'name="points_reference"')
class TrekGPXTest(TrekkingManagerTest):
def setUp(self):
# Create a simple fake DEM
conn = connections[DEFAULT_DB_ALIAS]
cur = conn.cursor()
cur.execute('CREATE TABLE mnt (rid serial primary key, rast raster)')
cur.execute('INSERT INTO mnt (rast) VALUES (ST_MakeEmptyRaster(10, 10, 700040, 6600040, 10, 10, 0, 0, %s))',
[settings.SRID])
cur.execute('UPDATE mnt SET rast = ST_AddBand(rast, \'16BSI\')')
for y in range(0, 1):
for x in range(0, 1):
cur.execute('UPDATE mnt SET rast = ST_SetValue(rast, %s, %s, %s::float)', [x + 1, y + 1, 42])
self.login()
self.trek = TrekWithPOIsFactory.create()
self.trek.description_en = 'Nice trek'
self.trek.description_it = 'Bonnito iti'
self.trek.description_fr = '<NAME>'
self.trek.save()
for poi in self.trek.pois.all():
poi.description_it = poi.description
poi.save()
url = '/api/it/treks/{pk}/slug.gpx'.format(pk=self.trek.pk)
self.response = self.client.get(url)
self.parsed = BeautifulSoup(self.response.content)
def tearDown(self):
translation.deactivate()
def test_gpx_is_served_with_content_type(self):
self.assertEqual(self.response.status_code, 200)
self.assertEqual(self.response['Content-Type'], 'application/gpx+xml')
def test_gpx_trek_as_route_points(self):
self.assertEqual(len(self.parsed.findAll('rte')), 1)
self.assertEqual(len(self.parsed.findAll('rtept')), 2)
def test_gpx_translated_using_another_language(self):
route = self.parsed.findAll('rte')[0]
description = route.find('desc').string
self.assertTrue(description.startswith(self.trek.description_it))
def test_gpx_contains_pois(self):
waypoints = self.parsed.findAll('wpt')
pois = self.trek.pois.all()
self.assertEqual(len(waypoints), len(pois))
waypoint = waypoints[0]
name = waypoint.find('name').string
description = waypoint.find('desc').string
elevation = waypoint.find('ele').string
self.assertEqual(name, u"%s: %s" % (pois[0].type, pois[0].name))
self.assertEqual(description, pois[0].description)
self.assertEqual(waypoint['lat'], '46.5003601787')
self.assertEqual(waypoint['lon'], '3.00052158552')
self.assertEqual(elevation, '42.0')
class TrekViewTranslationTest(TrekkingManagerTest):
def setUp(self):
self.trek = TrekFactory.build()
self.trek.name_fr = 'Voie lactee'
self.trek.name_en = 'Milky way'
self.trek.name_it = 'Via Lattea'
self.trek.published_fr = True
self.trek.published_it = False
self.trek.save()
def tearDown(self):
translation.deactivate()
self.client.logout()
def test_json_translation(self):
for lang, expected in [('fr', self.trek.name_fr),
('it', 404)]:
url = '/api/{lang}/treks/{pk}.json'.format(lang=lang, pk=self.trek.pk)
response = self.client.get(url)
if expected == 404:
self.assertEqual(response.status_code, 404)
else:
self.assertEqual(response.status_code, 200)
obj = json.loads(response.content)
self.assertEqual(obj['name'], expected)
def test_geojson_translation(self):
url = '/api/trek/trek.geojson'
for lang, expected in [('fr', self.trek.name_fr),
('it', self.trek.name_it)]:
self.login()
response = self.client.get(url, HTTP_ACCEPT_LANGUAGE=lang)
self.assertEqual(response.status_code, 200)
obj = json.loads(response.content)
self.assertEqual(obj['features'][0]['properties']['name'], expected)
self.client.logout() # Django 1.6 keeps language in session
def test_published_translation(self):
url = '/api/trek/trek.geojson'
for lang, expected in [('fr', self.trek.published_fr),
('it', self.trek.published_it)]:
self.login()
response = self.client.get(url, HTTP_ACCEPT_LANGUAGE=lang)
self.assertEqual(response.status_code, 200)
obj = json.loads(response.content)
self.assertEqual(obj['features'][0]['properties']['published'], expected)
self.client.logout() # Django 1.6 keeps language in session
def test_poi_geojson_translation(self):
# Create a Trek with a POI
trek = TrekFactory.create(no_path=True, published_fr=True, published_it=True)
p1 = PathFactory.create(geom=LineString((0, 0), (4, 4)))
poi = POIFactory.create(no_path=True)
poi.name_fr = "Chapelle"
poi.name_en = "Chapel"
poi.name_it = "Capela"
poi.published_fr = True
poi.published_en = True
poi.published_it = True
poi.save()
trek.add_path(p1, start=0.5)
poi.add_path(p1, start=0.6, end=0.6)
# Check that it applies to GeoJSON also :
self.assertEqual(len(trek.pois), 1)
poi = trek.pois[0]
for lang, expected in [('fr', poi.name_fr),
('it', poi.name_it)]:
url = '/api/{lang}/treks/{pk}/pois.geojson'.format(lang=lang, pk=trek.pk)
self.login()
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
obj = json.loads(response.content)
jsonpoi = obj.get('features', [])[0]
self.assertEqual(jsonpoi.get('properties', {}).get('name'), expected)
self.client.logout() # Django 1.6 keeps language in session
class TemplateTagsTest(TestCase):
def test_duration(self):
self.assertEqual(u"15 min", trekking_tags.duration(0.25))
self.assertEqual(u"30 min", trekking_tags.duration(0.5))
self.assertEqual(u"1 h", trekking_tags.duration(1))
self.assertEqual(u"1 h 45", trekking_tags.duration(1.75))
self.assertEqual(u"3 h 30", trekking_tags.duration(3.5))
self.assertEqual(u"4 h", trekking_tags.duration(4))
self.assertEqual(u"6 h", trekking_tags.duration(6))
self.assertEqual(u"10 h", trekking_tags.duration(10))
self.assertEqual(u"1 days", trekking_tags.duration(24))
self.assertEqual(u"2 days", trekking_tags.duration(32))
self.assertEqual(u"2 days", trekking_tags.duration(48))
self.assertEqual(u"3 days", trekking_tags.duration(49))
self.assertEqual(u"8 days", trekking_tags.duration(24 * 8))
self.assertEqual(u"9 days", trekking_tags.duration(24 * 9))
class TrekViewsSameStructureTests(AuthentFixturesTest):
def setUp(self):
profile = UserProfileFactory.create(user__username='homer',
user__password='<PASSWORD>',
language='en')
self.user = profile.user
self.user.groups.add(Group.objects.get(name=u"Référents communication"))
self.client.login(username='homer', password='<PASSWORD>')
self.content1 = TrekFactory.create()
structure = StructureFactory.create()
self.content2 = TrekFactory.create(structure=structure)
def add_bypass_perm(self):
perm = Permission.objects.get(codename='can_bypass_structure')
self.user.user_permissions.add(perm)
def test_edit_button_same_structure(self):
url = "/trek/{pk}/".format(pk=self.content1.pk)
response = self.client.get(url)
self.assertContains(response,
'<a class="btn btn-primary pull-right" '
'href="/trek/edit/{pk}/">'
'<i class="icon-pencil icon-white"></i> '
'Update</a>'.format(pk=self.content1.pk))
def test_edit_button_other_structure(self):
url = "/trek/{pk}/".format(pk=self.content2.pk)
response = self.client.get(url)
self.assertContains(response,
'<span class="btn disabled pull-right" href="#">'
'<i class="icon-pencil"></i> Update</span>')
def test_edit_button_bypass_structure(self):
self.add_bypass_perm()
url = "/trek/{pk}/".format(pk=self.content2.pk)
response = self.client.get(url)
self.assertContains(response,
'<a class="btn btn-primary pull-right" '
'href="/trek/edit/{pk}/">'
'<i class="icon-pencil icon-white"></i> '
'Update</a>'.format(pk=self.content2.pk))
def test_can_edit_same_structure(self):
url = "/trek/edit/{pk}/".format(pk=self.content1.pk)
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
def test_cannot_edit_other_structure(self):
url = "/trek/edit/{pk}/".format(pk=self.content2.pk)
response = self.client.get(url)
self.assertRedirects(response, "/trek/{pk}/".format(pk=self.content2.pk))
def test_can_edit_bypass_structure(self):
self.add_bypass_perm()
url = "/trek/edit/{pk}/".format(pk=self.content2.pk)
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
def test_can_delete_same_structure(self):
url = "/trek/delete/{pk}/".format(pk=self.content1.pk)
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
def test_cannot_delete_other_structure(self):
url = "/trek/delete/{pk}/".format(pk=self.content2.pk)
response = self.client.get(url)
self.assertRedirects(response, "/trek/{pk}/".format(pk=self.content2.pk))
def test_can_delete_bypass_structure(self):
self.add_bypass_perm()
url = "/trek/delete/{pk}/".format(pk=self.content2.pk)
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
class POIViewsSameStructureTests(TranslationResetMixin, AuthentFixturesTest):
def setUp(self):
profile = UserProfileFactory.create(user__username='homer',
user__password='<PASSWORD>')
user = profile.user
user.groups.add(Group.objects.get(name=u"Référents communication"))
self.client.login(username=user.username, password='<PASSWORD>')
self.content1 = POIFactory.create()
structure = StructureFactory.create()
self.content2 = POIFactory.create(structure=structure)
def tearDown(self):
self.client.logout()
def test_can_edit_same_structure(self):
url = "/poi/edit/{pk}/".format(pk=self.content1.pk)
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
def test_cannot_edit_other_structure(self):
url = "/poi/edit/{pk}/".format(pk=self.content2.pk)
response = self.client.get(url)
self.assertRedirects(response, "/poi/{pk}/".format(pk=self.content2.pk))
def test_can_delete_same_structure(self):
url = "/poi/delete/{pk}/".format(pk=self.content1.pk)
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
def test_cannot_delete_other_structure(self):
url = "/poi/delete/{pk}/".format(pk=self.content2.pk)
response = self.client.get(url)
self.assertRedirects(response, "/poi/{pk}/".format(pk=self.content2.pk))
class CirkwiTests(TranslationResetMixin, TestCase):
def setUp(self):
testutil._MAX_LENGTH = 10000
creation = make_aware(datetime.datetime(2014, 1, 1), utc)
self.trek = TrekFactory.create(published=True)
self.trek.date_insert = creation
self.trek.save()
self.poi = POIFactory.create(published=True)
self.poi.date_insert = creation
self.poi.save()
TrekFactory.create(published=False)
POIFactory.create(published=False)
def tearDown(self):
testutil._MAX_LENGTH = 80
def test_export_circuits(self):
response = self.client.get('/api/cirkwi/circuits.xml')
self.assertEqual(response.status_code, 200)
attrs = {
'pk': self.trek.pk,
'title': self.trek.name,
'date_update': timestamp(self.trek.date_update),
'n': self.trek.description.replace('<p>description ', '').replace('</p>', ''),
'poi_pk': self.poi.pk,
'poi_title': self.poi.name,
'poi_date_update': timestamp(self.poi.date_update),
'poi_description': self.poi.description.replace('<p>', '').replace('</p>', ''),
}
self.assertXMLEqual(
response.content,
'<?xml version="1.0" encoding="utf8"?>\n'
'<circuits version="2">'
'<circuit id_circuit="{pk}" date_modification="{date_update}" date_creation="1388534400">'
'<informations>'
'<information langue="en">'
'<titre>{title}</titre>'
'<description>description_teaser {n}\n\ndescription {n}</description>'
'<informations_complementaires>'
'<information_complementaire><titre>Departure</titre><description>departure {n}</description></information_complementaire>'
'<information_complementaire><titre>Arrival</titre><description>arrival {n}</description></information_complementaire>'
'<information_complementaire><titre>Ambiance</titre><description>ambiance {n}</description></information_complementaire>'
'<information_complementaire><titre>Access</titre><description>access {n}</description></information_complementaire>'
'<information_complementaire><titre>Disabled infrastructure</titre><description>disabled_infrastructure {n}</description></information_complementaire>'
'<information_complementaire><titre>Advised parking</titre><description>Advised parking {n}</description></information_complementaire>'
'<information_complementaire><titre>Public transport</titre><description>Public transport {n}</description></information_complementaire>'
'<information_complementaire><titre>Advice</titre><description>Advice {n}</description></information_complementaire></informations_complementaires>'
'<tags_publics></tags_publics>'
'</information>'
'</informations>'
'<distance>141</distance>'
'<locomotions><locomotion duree="5400"></locomotion></locomotions>'
'<trace><point><lat>46.5</lat><lng>3.0</lng></point><point><lat>46.5009004423</lat><lng>3.00130397672</lng></point></trace>'
'<pois>'
'<poi id_poi="{poi_pk}" date_modification="{poi_date_update}" date_creation="1388534400">'
'<informations>'
'<information langue="en"><titre>{poi_title}</titre><description>{poi_description}</description></information>'
'</informations>'
'<adresse><position><lat>46.5</lat><lng>3.0</lng></position></adresse>'
'</poi>'
'</pois>'
'</circuit>'
'</circuits>'.format(**attrs))
def test_export_pois(self):
response = self.client.get('/api/cirkwi/pois.xml')
self.assertEqual(response.status_code, 200)
attrs = {
'pk': self.poi.pk,
'title': self.poi.name,
'description': self.poi.description.replace('<p>', '').replace('</p>', ''),
'date_update': timestamp(self.poi.date_update),
}
self.assertXMLEqual(
response.content,
'<?xml version="1.0" encoding="utf8"?>\n'
'<pois version="2">'
'<poi id_poi="{pk}" date_modification="{date_update}" date_creation="1388534400">'
'<informations>'
'<information langue="en"><titre>{title}</titre><description>{description}</description></information>'
'</informations>'
'<adresse><position><lat>46.5</lat><lng>3.0</lng></position></adresse>'
'</poi>'
'</pois>'.format(**attrs))
class TrekWorkflowTest(TranslationResetMixin, TestCase):
def setUp(self):
call_command('update_permissions')
self.trek = TrekFactory.create(published=False)
self.user = User.objects.create_user('omer', password='<PASSWORD>')
self.user.user_permissions.add(Permission.objects.get(codename='add_trek'))
self.user.user_permissions.add(Permission.objects.get(codename='change_trek'))
self.client.login(username='omer', password='<PASSWORD>')
def tearDown(self):
self.client.logout()
def test_cannot_publish(self):
response = self.client.get('/trek/add/')
self.assertNotContains(response, 'Published')
response = self.client.get('/trek/edit/%u/' % self.trek.pk)
self.assertNotContains(response, 'Published')
def test_can_publish(self):
self.user.user_permissions.add(Permission.objects.get(codename='publish_trek'))
response = self.client.get('/trek/add/')
self.assertContains(response, 'Published')
response = self.client.get('/trek/edit/%u/' % self.trek.pk)
self.assertContains(response, 'Published')
class SyncRandoViewTest(TestCase):
def setUp(self):
self.user = User.objects.create_user('bart', password='<PASSWORD>')
def test_return_redirect(self):
response = self.client.get(reverse('trekking:sync_randos_view'))
self.assertEqual(response.status_code, 302)
def test_return_redirect_superuser(self):
self.user.is_superuser = True
response = self.client.get(reverse('trekking:sync_randos_view'))
self.assertEqual(response.status_code, 302)
def test_post_sync_redirect(self):
"""
test if sync can be launched by superuser post
"""
self.user.is_superuser = True
response = self.client.post(reverse('trekking:sync_randos'))
self.assertEqual(response.status_code, 302)
class ServiceViewsTest(CommonTest):
model = Service
modelfactory = ServiceFactory
userfactory = TrekkingManagerFactory
def get_good_data(self):
PathFactory.create()
return {
'type': ServiceTypeFactory.create().pk,
'topology': '{"lat": 5.1, "lng": 6.6}',
'structure': default_structure().pk
}
def test_empty_topology(self):
self.login()
data = self.get_good_data()
data['topology'] = ''
response = self.client.post(self.model.get_add_url(), data)
self.assertEqual(response.status_code, 200)
form = self.get_form(response)
self.assertEqual(form.errors, {'topology': [u'Topology is empty.']})
def test_listing_number_queries(self):
self.login()
# Create many instances
for i in range(100):
self.modelfactory.create()
for i in range(10):
DistrictFactory.create()
# Enable query counting
settings.DEBUG = True
for url in [self.model.get_jsonlist_url(),
self.model.get_format_list_url()]:
with self.assertNumQueries(5):
self.client.get(url)
settings.DEBUG = False
class ServiceJSONTest(TrekkingManagerTest):
def setUp(self):
self.login()
self.service = ServiceFactory.create(type__published=True)
self.pk = self.service.pk
def test_list(self):
url = '/api/en/services.json'
self.response = self.client.get(url)
self.result = json.loads(self.response.content)
self.assertEqual(len(self.result), 1)
self.assertTrue('type' in self.result[0])
def test_detail(self):
url = '/api/en/services/%s.json' % self.pk
self.response = self.client.get(url)
self.result = json.loads(self.response.content)
self.assertDictEqual(self.result['type'],
{'id': self.service.type.pk,
'name': self.service.type.name,
'pictogram': os.path.join(settings.MEDIA_URL, self.service.type.pictogram.name),
})
``` |
{
"source": "jmdejong/ratul",
"score": 3
} |
#### File: ratul/examples/static.py
```python
from ratuil.cursedscreen import Screen
from ratuil.layout import Layout
from ratuil.inputs import get_key
import time
layoutstring = """\
<?xml version="1.0"?>
<hbox>
<vbox width="20" align="right">
<bar id="health" height="1" full-char="+" empty-char="-" full-style="fg:7; bg:2" empty-style="fg:7; bg: 1;" total="10" filled="8"></bar>
<switchbox id="menus" selected="equipment" height="50%">
<border key="inventory">
<listing id="inventory">
milk
eggs
bread
</listing>
</border>
<border char="#" key="equipment">
<listing id="equipment">
cotton underwear
cotton shirt
jeans
friendship bracelet
</listing>
</border>
</switchbox>
<border char=" ">
<textbox id="info" wrap="words">
This is a great place to show some information.
Textbox lines can be wrapped!
</textbox>
</border>
</vbox>
<fill width="1" align="right" style="fg:12;bg:4">
@
</fill>
<vbox>
<hbox align="bottom" height="1">
<textbox width="2">></textbox>
<textinput id="input"></textinput>
</hbox>
<log id="messages" align="bottom" height="20%%">
Welcome to [game]
</log>
<border>
<overlay>
<field id="field" char-size="2"></field>
<border offset-x="2" align="right" width="13" offset-y="1" height="3" style="reverse">
<textbox>hello world</textbox>
</border>
</overlay>
</border>
</vbox>
</hbox>
"""
def main():
target = Screen()
try:
target.initialize_terminal()
target.clear()
layout = Layout.from_xml_str(target, layoutstring)
layout.update()
target.update()
get_key()
finally:
target.finalize_terminal()
print()
if __name__ == "__main__":
main()
```
#### File: ratul/ratuil/ansiscreen.py
```python
import sys
import shutil
import tty
import termios
from .constants import INT_INFINITY
from .drawtarget import DrawTarget
from .textstyle import TextStyle
from .strwidth import charwidth
from .basescreen import BaseScreen
from .pad import Pad
from .inputs import get_key
class Attr:
RESET = "0"
BOLD = "1"
UNDERSCORE = "4"
BLINK = "5"
REVERSE = "7"
CONCEALED = "8"
FG_DEFAULT = "39"
BG_DEFAULT = "49"
FG_COLORS = [str(i) for i in list(range(30, 38)) + list(range(90, 98))]
BG_COLORS = [str(i) for i in list(range(40, 48)) + list(range(100, 108))]
ATTRS = {
TextStyle.BOLD: BOLD,
TextStyle.REVERSE: REVERSE,
TextStyle.UNDERSCORE: UNDERSCORE,
TextStyle.BLINK: BLINK
}
class AnsiScreen(BaseScreen):
def __init__(self, out=sys.stdout, always_reset=False, blink_bright_background=False, keyin=sys.stdin, **_kwargs):
self.out = out
self.keyin = keyin
self.width = 0
self.height = 0
self.blink_bright_background = blink_bright_background # use the blink attribute for bright backgrounds
self.always_reset = always_reset or blink_bright_background # always reset if the style is different than the previous one
self.update_size()
self.fd = None
self.oldterm = None
def initialize_terminal(self):
self.fd = sys.stdin.fileno()
self.oldterm = termios.tcgetattr(self.fd)
tty.setraw(sys.stdin)
self.hide_cursor()
def finalize_terminal(self):
if self.oldterm is not None and self.fd is not None:
termios.tcsetattr(self.fd, termios.TCSADRAIN, self.oldterm)
self.finalize()
def get_key(self):
return get_key()
def create_pad(self, width, height):
return Pad(width, height)
def update_size(self):
size = shutil.get_terminal_size()
self.width = size.columns
self.height = size.lines
def move(self, x, y):
self.out.write("\033[{};{}f".format(y+1, x+1))
def addstr(self, text):
self.out.write(text)
def style(self, style, previous=None):
if style is None:
style = TextStyle.default
if style == previous:
return
parts = []
reset = False
if style.fg is None or style.bg is None or previous is None or previous != style and self.always_reset:
parts.append(Attr.RESET)
reset = True
else :
for attr, enabled in style.attr.items():
if not enabled and previous.attr[attr]:
parts.append(Attr.RESET)
reset = True
if style.fg is not None and (reset or style.fg != previous.fg):
parts.append(Attr.FG_COLORS[style.fg])
if style.bg is not None and (reset or style.bg != previous.bg):
parts.append(Attr.BG_COLORS[style.bg])
if style.bg > 7 and self.blink_bright_background:
parts.append(Attr.BLINK)
for attr, enabled in style.attr.items():
if enabled and (reset or not previous.attr[attr]):
parts.append(Attr.ATTRS[attr])
ansistyle = "\033[" + ";".join(parts) + "m"
self.out.write(ansistyle)
def write(self, x, y, text, style=None):
self.move(x, y)
self.style(style)
self.addstr(text)
def clear(self):
self.out.write("\033[0m\033[2J")
def reset(self):
self.update_size()
self.clear()
def clear_line(self):
self.out.write("\033[K")
def skip(self, amount=1):
if amount == 0:
return
if amount == 1:
stramount = ""
else:
stramount = str(abs(amount))
self.out.write("\033[{}{}".format(stramount, ("C" if amount >= 0 else "D")))
def draw_pad(self, pad, scr_x=0, scr_y=0, width=INT_INFINITY, height=INT_INFINITY, pad_x=0, pad_y=0):
screen = self
width = min(width, screen.width - scr_x, pad.width - pad_x)
height = min(height, screen.height - scr_y, pad.height - pad_y)
last_style = None
for y in range(height):
screen.move(scr_x, scr_y+y)
skip = 0
line_y = pad_y + y
for cell in pad.get_line(pad_x, line_y, width):
if cell is None:
skip += 1
continue
if skip != 0:
screen.skip(skip)
skip = 0
style, char = cell
screen.style(style, last_style)
last_style = style
screen.addstr(char)
skip += 1 - charwidth(char)
def hide_cursor(self):
self.out.write("\033[?25l")
def show_cursor(self):
self.out.write("\033[?25h")
def finalize(self):
self.style(None)
self.move(0, self.height - 1)
self.show_cursor()
self.out.flush()
def update(self):
self.out.flush()
Screen = AnsiScreen
```
#### File: ratul/ratuil/strwidth.py
```python
import unicodedata
# taken from textwrap
_whitespace = '\t\n\x0b\x0c\r '
def charwidth(char):
""" The width of a single character. Ambiguous width is considered 1"""
cat = unicodedata.category(char)
if cat == "Mn":
return 0
eaw = unicodedata.east_asian_width(char)
if eaw == "Na" or eaw == "H":
return 1
if eaw == "F" or eaw == "W":
return 2
if eaw == "A":
return 1
if eaw == "N":
return 1
raise Exception("unknown east easian width for character {}: {}".format(ord(char), char))
def strwidth(text):
""" The total width of a string """
return sum(charwidth(ch) for ch in text)
def width(text):
return stringwidth(text)
def width_index(text, width):
""" The largest index i for which the strwidth(text[:i]) <= width """
l = 0
for i, char in enumerate(text):
w = charwidth(char)
if l + w > width:
return i
l += w
return len(text)
def crop(text, width):
return text[:width_index(text, width)]
def wrap(text, width, separators=None):
lines = []
for line in text.splitlines():
while True:
cutoff = width_index(line, width)
if cutoff >= len(line):
lines.append(line)
break
if separators is not None:
last_sep = max(line.rfind(c, 0, cutoff+1) for c in separators)
if last_sep > 0:
cutoff = last_sep
lines.append(line[:cutoff])
if separators is not None:
while line[cutoff] in separators:
cutoff += 1
line = line[cutoff:]
return lines
def wrap_words(text, width):
return wrap(text, width, separators=_whitespace)
```
#### File: ratul/ratuil/textstyle.py
```python
class Attr:
RESET = "0"
BOLD = "1"
UNDERSCORE = "4"
BLINK = "5"
REVERSE = "7"
CONCEALED = "8"
#FG_BLACK = "30"
#FG_RED = "31"
#FG_GREEN = "32"
#FG_YELLOW = "33"
#FG_BLUE = "34"
#FG_MAGENTA = "35"
#FG_CYAN = "36"
#FG_WHITE = "37"
#BG_BLACK = "40"
#BG_RED = "41"
#BG_GREEN = "42"
#BG_YELLOW = "43"
#BG_BLUE = "44"
#BG_MAGENTA = "45"
#BG_CYAN = "46"
#BG_WHITE = "47"
#FG_BRIGHT_BLACK = "90"
#FG_BRIGHT_RED = "91"
#FG_BRIGHT_GREEN = "92"
#FG_BRIGHT_YELLOW = "93"
#FG_BRIGHT_BLUE = "94"
#FG_BRIGHT_MAGENTA = "95"
#FG_BRIGHT_CYAN = "96"
#FG_BRIGHT_WHITE = "97"
#BG_BRIGHT_BLACK = "100"
#BG_BRIGHT_RED = "101"
#BG_BRIGHT_GREEN = "102"
#BG_BRIGHT_YELLOW = "103"
#BG_BRIGHT_BLUE = "104"
#BG_BRIGHT_MAGENTA = "105"
#BG_BRIGHT_CYAN = "106"
#BG_BRIGHT_WHITE = "107"
FG_DEFAULT = "39"
BG_DEFAULT = "49"
FG_COLORS = [str(i) for i in list(range(30, 38)) + list(range(90, 98))]
BG_COLORS = [str(i) for i in list(range(40, 48)) + list(range(100, 108))]
class TextStyle:
BLACK = 0
RED = 1
GREEN = 2
YELLOW = 3
BLUE = 4
MAGENTA = 5
CYAN = 6
WHITE = 7
BRIGHT_BLACK = 8
BRIGHT_RED = 9
BRIGHT_GREEN = 10
BRIGHT_YELLOW = 11
BRIGHT_BLUE = 12
BRIGHT_MAGENTA = 13
BRIGHT_CYAN = 14
BRIGHT_WHITE = 15
COLORS = list(range(16))
BOLD = "bold"
REVERSE = "reverse"
UNDERSCORE = "underscore"
BLINK = "blink"
ATTRIBUTES = [BOLD, REVERSE, UNDERSCORE]
def __init__(self, fg=None, bg=None, bold=False, reverse=False, underscore=False):
self.fg = fg
self.bg = bg
self.attr = {
self.BOLD: bold,
self.REVERSE: reverse,
self.UNDERSCORE: underscore
}
self.attr_set = frozenset(key for key, value in self.attr.items() if value)
def __eq__(self, other):
return isinstance(other, TextStyle) and other.fg == self.fg and other.bg == self.bg and self.attr_set == other.attr_set
def __repr__(self):
if self == self.default:
return "TextStyle()"
return "TextStyle({}, {}, {})".format(self.fg, self.bg, ", ".join(self.attr.values()))
def add(self, other):
if other is None:
other = TextStyle()
fg = self.fg
if other.fg is not None:
fg = other.fg
bg = self.bg
if other.bg is not None:
bg = other.bg
attrs = dict(self.attr)
for key, val in other.attr.items():
if val:
attrs[key] = val
return TextStyle(fg, bg, **attrs)
@property
def bold(self):
return self.attr[self.BOLD]
@property
def underscore(self):
return self.attr[self.UNDERSCORE]
@property
def reverse(self):
return self.attr[self.REVERSE]
@classmethod
def from_str(cls, text):
if text is None:
return TextStyle.default
fg = None
bg = None
attrs = {}
parts = text.split(";")
for part in parts:
attr, _sep, value = part.partition(":")
attr = attr.strip().casefold()
value = value.strip()
if attr == "fg" and int(value) in TextStyle.COLORS:
fg = int(value)
if attr == "bg" and int(value) in TextStyle.COLORS:
bg = int(value)
if attr in TextStyle.ATTRIBUTES:
attrs[attr] = True
return cls(fg, bg, **attrs)
TextStyle.default = TextStyle()
```
#### File: ratuil/widgets/__init__.py
```python
class Widget:
_changed = True
backend = None
def change(self):
self._changed = True
def is_changed(self):
return self._changed
def unchange(self):
self._changed = False
def set_backend(self, backend):
self.backend = backend
def resize(self, screen):
self.change()
def update(self, target, force=False):
""" draw the widget onto target.
if force is false and the widget did not change since the previous update, don't do anything
return whether anything was drawn
"""
if self.is_changed() or force:
self.draw(target)
self.unchange()
return True
return False
@classmethod
def from_xml(cls, children, attr, text):
raise NotImplementedError
```
#### File: ratuil/widgets/textinput.py
```python
from . import Widget
from ..textstyle import TextStyle
from ..strwidth import strwidth, width_index
class TextInput(Widget):
def __init__(self):
self.text = ""
self.cursor = None
def set_text(self, text, cursor=None):
self.text = text
if cursor is not None:
assert cursor >= 0
self.cursor = cursor
self.change()
def draw(self, target):
target.clear()
if self.cursor is None:
target.write(0, 0, self.text)
else:
text = self.text
cursor_pos = strwidth(self.text[:self.cursor])
textwidth = strwidth(self.text)
offset = max(0, cursor_pos - target.width * 0.9)
chars_offset = width_index(text, offset)
offset_text = self.text[chars_offset:]
target.write(0, 0, offset_text)
if self.cursor < len(self.text):
c = self.text[self.cursor]
else:
c = ' '
target.write(cursor_pos - strwidth(self.text[:chars_offset]), 0, c, TextStyle(reverse=True))
@classmethod
def from_xml(cls, children, attr, text):
return cls()
``` |
{
"source": "JmDeleon2000/SAT-Logica",
"score": 3
} |
#### File: JmDeleon2000/SAT-Logica/DPLL.py
```python
def DPLL(B, I):
if len(B) == 0:
return True, I
for i in B:
if len(i) == 0:
return False, []
x = B[0][0]
if x[0] != '!':
x = '!' + x
Bp = [[j for j in i if j != x] for i in B if not(x[1:] in i)]
Ip = [i for i in I]
Ip.append('Valor de ' + x[1:] + ': ' + str(True))
V, I1 = DPLL(Bp, Ip)
if V:
return True, I1
Bp = [[j for j in i if j != x[1:]] for i in B if not(x in i)]
Ip = [i for i in I]
Ip.append('Valor de ' + x[1:] + ': ' + str(False))
V, I2 = DPLL(Bp, Ip)
if V:
return True, I2
return False, []
expresion =[['!p', '!r', '!s'], ['!q', '!p', '!s'], ['p'], ['s']]
t,r = DPLL(expresion, [])
print(t)
print(r)
``` |
{
"source": "j-m-d-h/DS9_Unit3_BW",
"score": 3
} |
#### File: j-m-d-h/DS9_Unit3_BW/test.py
```python
from flask import Flask, request
import pandas as pd
from flask_sqlalchemy import SQLAlchemy
import sqlite3
app = Flask(__name__)
DB = SQLAlchemy()
df = pd.read_csv('SpotifyAudioFeaturesApril2019.csv')
conn = sqlite3.connect('songs_df.sqlite3')
df.to_sql('songs', conn, if_exists='replace')
# class Song(DB.Model):
# """ Model for song entry in database """
# id = DB.Column(DB.STring(30))
# app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///songs_df.sqlite3'
#
# @app.route('/song', methods = ['POST'])
# def song():
# """Route for recommendations based on song selected."""
#
# #input
# song_id = request.get_json(force=True)
#
# #get parameters:
# # use song_id
# # songs_df = SELECT * from songs WHERE df_id == song_id
# danceability = songs_df['danceability']
# energy = songs_df['energy']
#
#
# #model
# model = "some pickled model"
#
# #output
# #should be 30 reccomendations
# recommendations = model.predict("parameters")
#
# return recommendations
#
#
# @app.route('/mood')
# def mood():
# """Route foor recommendations based on mood selected."""
#
# mood = request.get_json(force=True)
#
# recommendations =
#
#
# if __name__ == "__main__":
# app.run()
``` |
{
"source": "j-m-d-h/lambdata",
"score": 4
} |
#### File: lambdata/lambdata_jmdh/splitter.py
```python
from sklearn.model_selection import train_test_split
class Split:
"""
Performs a train-validate-test split on a dataframe.
Parameters
---------------------------------------
data : dataframe or array
the data to be split
test_float : float
the size (percentage) of the data to be used as test data
val_float : float
the size (percentage) of the data to be used as validation data
"""
def __init__(self, data, test_float=0.1, val_float=0.333):
self.data = data
self.test_per = test_float
self.val_per = val_float
def split_one(self):
train_val, test = train_test_split(self.data, test_size=self.test_per)
self.test = test
self.train_val = train_val
def split_two(self):
train, val = train_test_split(self.train_val, test_size=self.val_per)
self.train = train
self.val = val
def show(self):
return self.train, self.val, self.test
```
#### File: lambdata/lambdata_jmdh/statecode.py
```python
class State:
"""
Returns the full name of a state given the two-letter character code.
Parameters
-----------------------------------
code : string
two-letter state code to be translated
"""
def __init__(self, stringy):
self.stringy = stringy
def full_state(self):
dictionary = {'AL': 'Alabama', 'AK': 'Alaska', 'AZ': 'Arizona',
'AR': 'Arkansas', 'CA': 'California', 'CO': 'Colorado',
'CT': 'Connecticut', 'DE': 'Delaware', 'FL': 'Florida',
'GA': 'Georgia', 'HI': 'Hawaii', 'ID': 'Idaho',
'IL': 'Illinois', 'IN': 'Indiana', 'IA': 'Iowa',
'KS': 'Kansas', 'KY': 'Kentucky', 'LA': 'Louisiana',
'ME': 'Maine', 'MD': 'Maryland', 'MA': 'Massachusetts',
'MI': 'Michigan', 'MN': 'Minnesota', 'MS': 'Mississippi',
'MO': 'Missouri', 'MT': 'Montana', 'NE': 'Nebraska',
'NV': 'Nevada', 'NH': 'New Hampshire', 'NJ': 'New Jersey',
'NM': 'New Mexico', 'NY': 'New York', 'NC': 'North Carolina',
'ND': 'North Dakota', 'OH': 'Ohio', 'OK': 'Oklahoma',
'OR': 'Oregon', 'PA': 'Pennsylvania', 'RI': 'Rhode Island',
'SC': 'South Carolina', 'SD': 'South Dakota', 'TN': 'Tennessee',
'TX': 'Texas', 'UT': 'Utah', 'VT': 'Vermont', 'VA': 'Virginia',
'WA': 'Washington', 'WV': 'West Virginia', 'WI': 'Wisconsin',
'WY': 'Wyoming', 'DC': 'District of Columbia', 'PR': 'Puerto Rico'}
self.name = dictionary[self.stringy]
return self.name
``` |
{
"source": "jmdict-kindle/jmdict-kindle",
"score": 3
} |
#### File: jmdict-kindle/jmdict-kindle/exampleSentences.py
```python
import tarfile
import sys
import csv
import linecache
import re
from dictionary import *
class ExampleSentences:
def __init__(self, indices_tar, sentences_tar, entries):
tarfile.open(indices_tar, "r:bz2").extractall()
tarfile.open(sentences_tar, "r:bz2").extractall()
csv_file = open("jpn_indices.csv", encoding="utf-8")
self.__jpn_indices = csv.reader(csv_file, delimiter="\t")
sentences_file = open("sentences.csv", encoding="utf-8")
for self.sentences_count, l in enumerate(sentences_file):
pass
self.sentences_count += 1
sentences_file.close()
self.__entry_dictionary = {}
for entry in entries:
if entry.entry_type == VOCAB_ENTRY:
for ortho in entry.orthos:
if ortho.value in self.__entry_dictionary:
self.__entry_dictionary[ortho.value].append(entry)
else:
self.__entry_dictionary[ortho.value] = [entry]
# function to find the correct sentence corresponding to the sentence id, since lines and ids do not match
def __findSentence(self, id):
if id > self.sentences_count:
current_line = self.sentences_count
else:
current_line = id
line = linecache.getline("sentences.csv", current_line)
columns = line.split("\t")
columns[0] = int(columns[0])
old_distance = abs(columns[0] - id)
while columns[0] != id:
assert columns[0] >= current_line, "The sentence list is not ordered"
current_line = current_line - (columns[0] - id)
line = linecache.getline("sentences.csv", current_line)
columns = line.split("\t")
columns[0] = int(columns[0])
new_distance = abs(columns[0] - id)
if new_distance < old_distance:
old_distance = new_distance
else: # if it is stuck try linear search from that point on
if columns[0] > id:
while columns[0] > id:
current_line -= 1
line = linecache.getline("sentences.csv", current_line)
columns = line.split("\t")
columns[0] = int(columns[0])
elif columns[0] < id:
while columns[0] < id:
current_line += 1
line = linecache.getline("sentences.csv", current_line)
columns = line.split("\t")
columns[0] = int(columns[0])
break
if columns[0] == id:
# remove linebreak
columns[2] = columns[2].replace("\n", "")
return columns[2]
else:
return None
def addExamples(self, good_only, max_sentences):
added_sentences = 0
for jpn_index in self.__jpn_indices:
keywords = []
if good_only:
sections = jpn_index[2].split("~") # good keywords are marked with an ~
del sections[-1] # remove the last section since it did not contain a ~
for section in sections:
split = section.split(" ")
keyword = split[-1]
match_group = re.match(".+?(?=\W|$|~)", keyword)
if match_group:
keywords.append([match_group.group(0), True])
else:
for keyword in jpn_index[2].split(" "):
if len(keyword) > 0:
if keyword[-1] == "~": # good keywords are marked with an ~
good_keyword = True
else:
good_keyword = False
# only take the word after the word there can be ()[]{}~|
match_group = re.match(".+?(?=\W|$|~)", keyword)
if match_group:
keywords.append([match_group.group(0), good_keyword])
if len(keywords) > 0:
ja_id = int(jpn_index[0])
eng_id = int(jpn_index[1])
if ja_id > 0 and eng_id > 0:
japanese_sentence = self.__findSentence(ja_id)
english_sentence = self.__findSentence(eng_id)
if japanese_sentence != None and english_sentence != None:
for keyword in keywords:
if keyword[0] in self.__entry_dictionary:
for entry in self.__entry_dictionary[keyword[0]]:
if len(entry.sentences) < max_sentences:
added_sentences += 1
entry.sentences.append(
Sentence(
english_sentence,
japanese_sentence,
keyword[1],
)
)
elif keyword[1] == True:
for i in range(len(entry.sentences)):
if not entry.sentences[i].good_sentence:
entry.sentences[i] = Sentence(
english_sentence,
japanese_sentence,
keyword[1],
)
break
return added_sentences
``` |
{
"source": "jmduarte/DeepJet",
"score": 2
} |
#### File: modules/datastructures/TrainData_deepDoubleB.py
```python
from TrainDataDeepJet import TrainDataDeepJet,fileTimeOut
import numpy
class TrainData_deepDoubleB(TrainDataDeepJet):
def __init__(self):
'''
This class is meant as a base class for the FatJet studies
You will not need to edit it for trying out things
'''
TrainDataDeepJet.__init__(self)
#define truth:
self.undefTruth=['isUndefined']
self.truthclasses=['fj_isNonBB','fj_isBB']
self.referenceclass='fj_isNonBB' ## used for pt reshaping
self.registerBranches(self.truthclasses)
self.registerBranches(['fj_pt','fj_sdmass'])
self.weightbranchX='fj_pt'
self.weightbranchY='fj_sdmass'
#self.weight_binX = numpy.array([
# 300,400,500,
# 600,700,800,1000,2500],dtype=float)
self.weight_binX = numpy.array([
250,2500],dtype=float)
self.weight_binY = numpy.array(
[40,200],
dtype=float
)
self.removeUnderOverflow=True
#this is only needed because the truth definitions are different from deepFlavour
self.allbranchestoberead=[]
self.registerBranches(self.undefTruth)
self.registerBranches(self.truthclasses)
print self.allbranchestoberead
## categories to use for training
def reduceTruth(self, tuple_in):
import numpy
self.reducedtruthclasses=['fj_isNonHBB','fj_isHBB']
if tuple_in is not None:
q = tuple_in['sample_isQCD'] * tuple_in['fj_isQCD']
q = q.view(numpy.ndarray)
#t = tuple_in['fj_isTop'].view(numpy.ndarray)
#z = tuple_in['fj_isZ'].view(numpy.ndarray)
#w = tuple_in['fj_isW'].view(numpy.ndarray)
h = tuple_in['fj_isH']
h = h.view(numpy.ndarray)
return numpy.vstack((q,h)).transpose()
#######################################
class TrainData_deepDoubleB_db_pf_cpf_sv(TrainData_deepDoubleB):
def __init__(self):
'''
This is an example data format description for FatJet studies
'''
TrainData_deepDoubleB.__init__(self)
#example of how to register global branches
self.addBranches(['fj_pt',
'fj_eta',
'fj_sdmass',
'fj_n_sdsubjets',
'fj_doubleb',
'fj_tau21',
'fj_tau32',
'npv',
'npfcands',
'ntracks',
'nsv'
])
self.addBranches(['fj_jetNTracks',
'fj_nSV',
'fj_tau0_trackEtaRel_0',
'fj_tau0_trackEtaRel_1',
'fj_tau0_trackEtaRel_2',
'fj_tau1_trackEtaRel_0',
'fj_tau1_trackEtaRel_1',
'fj_tau1_trackEtaRel_2',
'fj_tau_flightDistance2dSig_0',
'fj_tau_flightDistance2dSig_1',
'fj_tau_vertexDeltaR_0',
'fj_tau_vertexEnergyRatio_0',
'fj_tau_vertexEnergyRatio_1',
'fj_tau_vertexMass_0',
'fj_tau_vertexMass_1',
'fj_trackSip2dSigAboveBottom_0',
'fj_trackSip2dSigAboveBottom_1',
'fj_trackSip2dSigAboveCharm_0',
'fj_trackSipdSig_0',
'fj_trackSipdSig_0_0',
'fj_trackSipdSig_0_1',
'fj_trackSipdSig_1',
'fj_trackSipdSig_1_0',
'fj_trackSipdSig_1_1',
'fj_trackSipdSig_2',
'fj_trackSipdSig_3',
'fj_z_ratio'
])
#example of pf candidate branches
self.addBranches(['pfcand_ptrel',
'pfcand_erel',
'pfcand_phirel',
'pfcand_etarel',
'pfcand_deltaR',
'pfcand_puppiw',
'pfcand_drminsv',
'pfcand_drsubjet1',
'pfcand_drsubjet2',
'pfcand_hcalFrac'
],
100)
self.addBranches(['track_ptrel',
'track_erel',
'track_phirel',
'track_etarel',
'track_deltaR',
'track_drminsv',
'track_drsubjet1',
'track_drsubjet2',
'track_dz',
'track_dzsig',
'track_dxy',
'track_dxysig',
'track_normchi2',
'track_quality',
'track_dptdpt',
'track_detadeta',
'track_dphidphi',
'track_dxydxy',
'track_dzdz',
'track_dxydz',
'track_dphidxy',
'track_dlambdadz',
'trackBTag_EtaRel',
'trackBTag_PtRatio',
'trackBTag_PParRatio',
'trackBTag_Sip2dVal',
'trackBTag_Sip2dSig',
'trackBTag_Sip3dVal',
'trackBTag_Sip3dSig',
'trackBTag_JetDistVal'
],
60)
self.addBranches(['sv_ptrel',
'sv_erel',
'sv_phirel',
'sv_etarel',
'sv_deltaR',
'sv_pt',
'sv_mass',
'sv_ntracks',
'sv_normchi2',
'sv_dxy',
'sv_dxysig',
'sv_d3d',
'sv_d3dsig',
'sv_costhetasvpv'
],
5)
#branches that are used directly in the following function 'readFromRootFile'
#this is a technical trick to speed up the conversion
#self.registerBranches(['Cpfcan_erel','Cpfcan_eta','Cpfcan_phi',
# 'Npfcan_erel','Npfcan_eta','Npfcan_phi',
# 'nCpfcand','nNpfcand',
# 'jet_eta','jet_phi'])
self.registerBranches(['sample_isQCD','fj_isH','fj_isQCD'])
#this function describes how the branches are converted
def readFromRootFile(self,filename,TupleMeanStd, weighter):
#the first part is standard, no changes needed
from DeepJetCore.preprocessing import MeanNormApply, MeanNormZeroPad, MeanNormZeroPadParticles, ZeroPadParticles
import numpy
import ROOT
fileTimeOut(filename,120) #give eos 2 minutes to recover
rfile = ROOT.TFile(filename)
tree = rfile.Get("deepntuplizer/tree")
self.nsamples=tree.GetEntries()
#the definition of what to do with the branches
# those are the global branches (jet pt etc)
# they should be just glued to each other in one vector
# and zero padded (and mean subtracted and normalised)
#x_global = MeanNormZeroPad(filename,TupleMeanStd,
# [self.branches[0]],
# [self.branchcutoffs[0]],self.nsamples)
# the second part (the pf candidates) should be treated particle wise
# an array with (njets, nparticles, nproperties) is created
x_glb = ZeroPadParticles(filename,None,
self.branches[0],
self.branchcutoffs[0],self.nsamples)
x_db = ZeroPadParticles(filename,None,
self.branches[1],
self.branchcutoffs[1],self.nsamples)
x_pf = ZeroPadParticles(filename,None,
self.branches[2],
self.branchcutoffs[2],self.nsamples)
x_cpf = ZeroPadParticles(filename,None,
self.branches[3],
self.branchcutoffs[3],self.nsamples)
x_sv = ZeroPadParticles(filename,None,
self.branches[4],
self.branchcutoffs[4],self.nsamples)
# now, some jets are removed to avoid pt and eta biases
Tuple = self.readTreeFromRootToTuple(filename)
if self.remove:
# jets are removed until the shapes in eta and pt are the same as
# the truth class 'fj_isNonBB'
notremoves=weighter.createNotRemoveIndices(Tuple)
#undef=Tuple[self.undefTruth]
#notremoves-=undef
if self.weight:
weights=weighter.getJetWeights(Tuple)
elif self.remove:
weights=notremoves
else:
print('neither remove nor weight')
weights=numpy.empty(self.nsamples)
weights.fill(1.)
# create all collections:
#truthtuple = Tuple[self.truthclasses]
alltruth=self.reduceTruth(Tuple)
undef=numpy.sum(alltruth,axis=1)
weights=weights[undef > 0]
x_glb=x_glb[undef > 0]
x_db=x_db[undef > 0]
x_sv=x_sv[undef > 0]
x_pf=x_pf[undef > 0]
x_cpf=x_cpf[undef > 0]
alltruth=alltruth[undef > 0]
if self.remove: notremoves=notremoves[undef > 0]
# remove the entries to get same jet shapes
if self.remove:
print('remove')
weights=weights[notremoves > 0]
x_glb=x_glb[notremoves > 0]
x_db=x_db[notremoves > 0]
x_sv=x_sv[notremoves > 0]
x_pf=x_pf[notremoves > 0]
x_cpf=x_cpf[notremoves > 0]
alltruth=alltruth[notremoves > 0]
#newnsamp=x_global.shape[0]
newnsamp=x_glb.shape[0]
print('reduced content to ', int(float(newnsamp)/float(self.nsamples)*100),'%')
self.nsamples = newnsamp
# fill everything
self.w=[weights]
self.x=[x_db,x_pf,x_cpf,x_sv]
self.z=[x_glb]
self.y=[alltruth]
class TrainData_deepDoubleB_pf_cpf_sv(TrainData_deepDoubleB):
def __init__(self):
'''
This is an example data format description for FatJet studies
'''
TrainData_deepDoubleB.__init__(self)
#example of how to register global branches
self.addBranches(['fj_pt',
'fj_eta',
'fj_sdmass',
'fj_n_sdsubjets',
'fj_doubleb',
'fj_tau21',
'fj_tau32',
'npv',
'npfcands',
'ntracks',
'nsv'
])
#example of pf candidate branches
self.addBranches(['pfcand_ptrel',
'pfcand_erel',
'pfcand_phirel',
'pfcand_etarel',
'pfcand_deltaR',
'pfcand_puppiw',
'pfcand_drminsv',
'pfcand_drsubjet1',
'pfcand_drsubjet2',
'pfcand_hcalFrac'
],
100)
self.addBranches(['track_ptrel',
'track_erel',
'track_phirel',
'track_etarel',
'track_deltaR',
'track_drminsv',
'track_drsubjet1',
'track_drsubjet2',
'track_dz',
'track_dzsig',
'track_dxy',
'track_dxysig',
'track_normchi2',
'track_quality',
'track_dptdpt',
'track_detadeta',
'track_dphidphi',
'track_dxydxy',
'track_dzdz',
'track_dxydz',
'track_dphidxy',
'track_dlambdadz',
'trackBTag_EtaRel',
'trackBTag_PtRatio',
'trackBTag_PParRatio',
'trackBTag_Sip2dVal',
'trackBTag_Sip2dSig',
'trackBTag_Sip3dVal',
'trackBTag_Sip3dSig',
'trackBTag_JetDistVal'
],
60)
self.addBranches(['sv_ptrel',
'sv_erel',
'sv_phirel',
'sv_etarel',
'sv_deltaR',
'sv_pt',
'sv_mass',
'sv_ntracks',
'sv_normchi2',
'sv_dxy',
'sv_dxysig',
'sv_d3d',
'sv_d3dsig',
'sv_costhetasvpv'
],
5)
#branches that are used directly in the following function 'readFromRootFile'
#this is a technical trick to speed up the conversion
#self.registerBranches(['Cpfcan_erel','Cpfcan_eta','Cpfcan_phi',
# 'Npfcan_erel','Npfcan_eta','Npfcan_phi',
# 'nCpfcand','nNpfcand',
# 'jet_eta','jet_phi'])
self.registerBranches(['sample_isQCD','fj_isH','fj_isQCD'])
#this function describes how the branches are converted
def readFromRootFile(self,filename,TupleMeanStd, weighter):
#the first part is standard, no changes needed
from DeepJetCore.preprocessing import MeanNormApply, MeanNormZeroPad, MeanNormZeroPadParticles, ZeroPadParticles
import numpy
import ROOT
fileTimeOut(filename,120) #give eos 2 minutes to recover
rfile = ROOT.TFile(filename)
tree = rfile.Get("deepntuplizer/tree")
self.nsamples=tree.GetEntries()
#the definition of what to do with the branches
# those are the global branches (jet pt etc)
# they should be just glued to each other in one vector
# and zero padded (and mean subtracted and normalised)
#x_global = MeanNormZeroPad(filename,TupleMeanStd,
# [self.branches[0]],
# [self.branchcutoffs[0]],self.nsamples)
# the second part (the pf candidates) should be treated particle wise
# an array with (njets, nparticles, nproperties) is created
x_glb = ZeroPadParticles(filename,None,
self.branches[0],
self.branchcutoffs[0],self.nsamples)
x_pf = ZeroPadParticles(filename,None,
self.branches[2],
self.branchcutoffs[2],self.nsamples)
x_cpf = ZeroPadParticles(filename,None,
self.branches[3],
self.branchcutoffs[3],self.nsamples)
x_sv = ZeroPadParticles(filename,None,
self.branches[4],
self.branchcutoffs[4],self.nsamples)
# now, some jets are removed to avoid pt and eta biases
Tuple = self.readTreeFromRootToTuple(filename)
if self.remove:
# jets are removed until the shapes in eta and pt are the same as
# the truth class 'fj_isNonBB'
notremoves=weighter.createNotRemoveIndices(Tuple)
#undef=Tuple[self.undefTruth]
#notremoves-=undef
if self.weight:
weights=weighter.getJetWeights(Tuple)
elif self.remove:
weights=notremoves
else:
print('neither remove nor weight')
weights=numpy.empty(self.nsamples)
weights.fill(1.)
# create all collections:
#truthtuple = Tuple[self.truthclasses]
alltruth=self.reduceTruth(Tuple)
undef=numpy.sum(alltruth,axis=1)
weights=weights[undef > 0]
x_glb=x_glb[undef > 0]
x_sv=x_sv[undef > 0]
x_pf=x_pf[undef > 0]
x_cpf=x_cpf[undef > 0]
alltruth=alltruth[undef > 0]
if self.remove: notremoves=notremoves[undef > 0]
# remove the entries to get same jet shapes
if self.remove:
print('remove')
weights=weights[notremoves > 0]
x_glb=x_glb[notremoves > 0]
x_sv=x_sv[notremoves > 0]
x_pf=x_pf[notremoves > 0]
x_cpf=x_cpf[notremoves > 0]
alltruth=alltruth[notremoves > 0]
#newnsamp=x_global.shape[0]
newnsamp=x_glb.shape[0]
print('reduced content to ', int(float(newnsamp)/float(self.nsamples)*100),'%')
self.nsamples = newnsamp
# fill everything
self.w=[weights]
self.x=[x_pf,x_cpf,x_sv]
self.z=[x_glb]
self.y=[alltruth]
class TrainData_deepDoubleB_db_cpf_sv_reduced(TrainData_deepDoubleB):
def __init__(self):
'''
This is an example data format description for FatJet studies
'''
TrainData_deepDoubleB.__init__(self)
#example of how to register global branches
self.addBranches(['fj_pt',
'fj_eta',
'fj_sdmass',
'fj_n_sdsubjets',
'fj_doubleb',
'fj_tau21',
'fj_tau32',
'npv',
'npfcands',
'ntracks',
'nsv'
])
self.addBranches(['fj_jetNTracks',
'fj_nSV',
'fj_tau0_trackEtaRel_0',
'fj_tau0_trackEtaRel_1',
'fj_tau0_trackEtaRel_2',
'fj_tau1_trackEtaRel_0',
'fj_tau1_trackEtaRel_1',
'fj_tau1_trackEtaRel_2',
'fj_tau_flightDistance2dSig_0',
'fj_tau_flightDistance2dSig_1',
'fj_tau_vertexDeltaR_0',
'fj_tau_vertexEnergyRatio_0',
'fj_tau_vertexEnergyRatio_1',
'fj_tau_vertexMass_0',
'fj_tau_vertexMass_1',
'fj_trackSip2dSigAboveBottom_0',
'fj_trackSip2dSigAboveBottom_1',
'fj_trackSip2dSigAboveCharm_0',
'fj_trackSipdSig_0',
'fj_trackSipdSig_0_0',
'fj_trackSipdSig_0_1',
'fj_trackSipdSig_1',
'fj_trackSipdSig_1_0',
'fj_trackSipdSig_1_1',
'fj_trackSipdSig_2',
'fj_trackSipdSig_3',
'fj_z_ratio'
])
self.addBranches(['trackBTag_EtaRel',
'trackBTag_PtRatio',
'trackBTag_PParRatio',
'trackBTag_Sip2dVal',
'trackBTag_Sip2dSig',
'trackBTag_Sip3dVal',
'trackBTag_Sip3dSig',
'trackBTag_JetDistVal'
],
60)
self.addBranches(['sv_d3d',
'sv_d3dsig',
],
5)
#branches that are used directly in the following function 'readFromRootFile'
#this is a technical trick to speed up the conversion
#self.registerBranches(['Cpfcan_erel','Cpfcan_eta','Cpfcan_phi',
# 'Npfcan_erel','Npfcan_eta','Npfcan_phi',
# 'nCpfcand','nNpfcand',
# 'jet_eta','jet_phi'])
self.registerBranches(['sample_isQCD','fj_isH','fj_isQCD','fj_isTop','fj_isW','fj_isZ'])
#this function describes how the branches are converted
def readFromRootFile(self,filename,TupleMeanStd, weighter):
#the first part is standard, no changes needed
from DeepJetCore.preprocessing import MeanNormApply, MeanNormZeroPad, MeanNormZeroPadParticles
import numpy
import ROOT
fileTimeOut(filename,120) #give eos 2 minutes to recover
rfile = ROOT.TFile(filename)
tree = rfile.Get("deepntuplizer/tree")
self.nsamples=tree.GetEntries()
#the definition of what to do with the branches
# those are the global branches (jet pt etc)
# they should be just glued to each other in one vector
# and zero padded (and mean subtracted and normalised)
#x_global = MeanNormZeroPad(filename,TupleMeanStd,
# [self.branches[0]],
# [self.branchcutoffs[0]],self.nsamples)
# the second part (the pf candidates) should be treated particle wise
# an array with (njets, nparticles, nproperties) is created
x_glb = MeanNormZeroPadParticles(filename,None,
self.branches[0],
self.branchcutoffs[0],self.nsamples)
x_db = MeanNormZeroPadParticles(filename,None,
self.branches[1],
self.branchcutoffs[1],self.nsamples)
x_cpf = MeanNormZeroPadParticles(filename,None,
self.branches[2],
self.branchcutoffs[2],self.nsamples)
x_sv = MeanNormZeroPadParticles(filename,None,
self.branches[3],
self.branchcutoffs[3],self.nsamples)
# now, some jets are removed to avoid pt and eta biases
Tuple = self.readTreeFromRootToTuple(filename)
if self.remove:
# jets are removed until the shapes in eta and pt are the same as
# the truth class 'fj_isNonBB'
notremoves=weighter.createNotRemoveIndices(Tuple)
#undef=Tuple[self.undefTruth]
#notremoves-=undef
if self.weight:
weights=weighter.getJetWeights(Tuple)
elif self.remove:
weights=notremoves
else:
print('neither remove nor weight')
weights=numpy.empty(self.nsamples)
weights.fill(1.)
# create all collections:
#truthtuple = Tuple[self.truthclasses]
alltruth=self.reduceTruth(Tuple)
# removing undefined truth... comment out for CMSSW validation
undef=numpy.sum(alltruth,axis=1)
weights=weights[undef > 0]
x_glb=x_glb[undef > 0]
x_db=x_db[undef > 0]
x_sv=x_sv[undef > 0]
x_cpf=x_cpf[undef > 0]
alltruth=alltruth[undef > 0]
if self.remove: notremoves=notremoves[undef > 0]
# remove the entries to get same jet shapes
if self.remove:
print('remove')
weights=weights[notremoves > 0]
x_glb=x_glb[notremoves > 0]
x_db=x_db[notremoves > 0]
x_sv=x_sv[notremoves > 0]
x_cpf=x_cpf[notremoves > 0]
alltruth=alltruth[notremoves > 0]
#newnsamp=x_global.shape[0]
newnsamp=x_glb.shape[0]
print('reduced content to ', int(float(newnsamp)/float(self.nsamples)*100),'%')
self.nsamples = newnsamp
# fill everything
self.w=[weights]
self.x=[x_db,x_cpf,x_sv]
self.z=[x_glb]
self.y=[alltruth]
class TrainData_deepDoubleB_cpf_sv_reduced(TrainData_deepDoubleB):
def __init__(self):
'''
This is an example data format description for FatJet studies
'''
TrainData_deepDoubleB.__init__(self)
#example of how to register global branches
self.addBranches(['fj_pt',
'fj_eta',
'fj_sdmass',
'fj_n_sdsubjets',
'fj_doubleb',
'fj_tau21',
'fj_tau32',
'npv',
'npfcands',
'ntracks',
'nsv'
])
self.addBranches(['trackBTag_EtaRel',
'trackBTag_PtRatio',
'trackBTag_PParRatio',
'trackBTag_Sip2dVal',
'trackBTag_Sip2dSig',
'trackBTag_Sip3dVal',
'trackBTag_Sip3dSig',
'trackBTag_JetDistVal'
],
60)
self.addBranches(['sv_d3d',
'sv_d3dsig',
],
5)
#branches that are used directly in the following function 'readFromRootFile'
#this is a technical trick to speed up the conversion
#self.registerBranches(['Cpfcan_erel','Cpfcan_eta','Cpfcan_phi',
# 'Npfcan_erel','Npfcan_eta','Npfcan_phi',
# 'nCpfcand','nNpfcand',
# 'jet_eta','jet_phi'])
self.registerBranches(['sample_isQCD','fj_isH','fj_isQCD','fj_isTop','fj_isW','fj_isZ'])
#this function describes how the branches are converted
def readFromRootFile(self,filename,TupleMeanStd, weighter):
#the first part is standard, no changes needed
from DeepJetCore.preprocessing import MeanNormApply, MeanNormZeroPad, MeanNormZeroPadParticles
import numpy
import ROOT
fileTimeOut(filename,120) #give eos 2 minutes to recover
rfile = ROOT.TFile(filename)
tree = rfile.Get("deepntuplizer/tree")
self.nsamples=tree.GetEntries()
#the definition of what to do with the branches
# those are the global branches (jet pt etc)
# they should be just glued to each other in one vector
# and zero padded (and mean subtracted and normalised)
#x_global = MeanNormZeroPad(filename,TupleMeanStd,
# [self.branches[0]],
# [self.branchcutoffs[0]],self.nsamples)
# the second part (the pf candidates) should be treated particle wise
# an array with (njets, nparticles, nproperties) is created
x_glb = MeanNormZeroPadParticles(filename,None,
self.branches[0],
self.branchcutoffs[0],self.nsamples)
x_cpf = MeanNormZeroPadParticles(filename,None,
self.branches[2],
self.branchcutoffs[2],self.nsamples)
x_sv = MeanNormZeroPadParticles(filename,None,
self.branches[3],
self.branchcutoffs[3],self.nsamples)
# now, some jets are removed to avoid pt and eta biases
Tuple = self.readTreeFromRootToTuple(filename)
if self.remove:
# jets are removed until the shapes in eta and pt are the same as
# the truth class 'fj_isNonBB'
notremoves=weighter.createNotRemoveIndices(Tuple)
#undef=Tuple[self.undefTruth]
#notremoves-=undef
if self.weight:
weights=weighter.getJetWeights(Tuple)
elif self.remove:
weights=notremoves
else:
print('neither remove nor weight')
weights=numpy.empty(self.nsamples)
weights.fill(1.)
# create all collections:
#truthtuple = Tuple[self.truthclasses]
alltruth=self.reduceTruth(Tuple)
# removing undefined truth... comment out for CMSSW validation
undef=numpy.sum(alltruth,axis=1)
weights=weights[undef > 0]
x_glb=x_glb[undef > 0]
x_sv=x_sv[undef > 0]
x_cpf=x_cpf[undef > 0]
alltruth=alltruth[undef > 0]
if self.remove: notremoves=notremoves[undef > 0]
# remove the entries to get same jet shapes
if self.remove:
print('remove')
weights=weights[notremoves > 0]
x_glb=x_glb[notremoves > 0]
x_sv=x_sv[notremoves > 0]
x_cpf=x_cpf[notremoves > 0]
alltruth=alltruth[notremoves > 0]
#newnsamp=x_global.shape[0]
newnsamp=x_glb.shape[0]
print('reduced content to ', int(float(newnsamp)/float(self.nsamples)*100),'%')
self.nsamples = newnsamp
# fill everything
self.w=[weights]
self.x=[x_cpf,x_sv]
self.z=[x_glb]
self.y=[alltruth]
class TrainData_deepDoubleB_db_cpf_sv_reduced_6label(TrainData_deepDoubleB_db_cpf_sv_reduced):
def reduceTruth(self, tuple_in):
import numpy
self.reducedtruthclasses=['fj_isNonBB','fj_isGBB','fj_isHBB','fj_isTop','fj_isW','fj_isZBB']
if tuple_in is not None:
q = tuple_in['fj_isNonBB'] * tuple_in['sample_isQCD'] * tuple_in['fj_isQCD']
q = q.view(numpy.ndarray)
g = tuple_in['fj_isBB'] * tuple_in['sample_isQCD'] * tuple_in['fj_isQCD']
g = g.view(numpy.ndarray)
t = tuple_in['fj_isTop']
t = t.view(numpy.ndarray)
w = tuple_in['fj_isW']
w = w.view(numpy.ndarray)
z = tuple_in['fj_isBB'] * tuple_in['fj_isZ']
z = z.view(numpy.ndarray)
h = tuple_in['fj_isH']
h = h.view(numpy.ndarray)
return numpy.vstack((q,g,h,t,w,z)).transpose()
```
#### File: modules/models/DeepJet_models_final.py
```python
import keras
import numpy as np
import tensorflow as tf
from keras import backend as K
from operator import *
from itertools import *
from keras.layers.normalization import BatchNormalization
from multi_gpu_model import multi_gpu_model
kernel_initializer = 'he_normal'
kernel_initializer_fc = 'lecun_uniform'
def FC(data, num_hidden, act='relu', p=None, name=''):
if act=='leakyrelu':
fc = keras.layers.Dense(num_hidden, activation='linear', name='%s_%s' % (name,act), kernel_initializer=kernel_initializer_fc)(data) # Add any layer, with the default of a linear squashing function
fc = keras.layers.advanced_activations.LeakyReLU(alpha=.001)(fc) # add an advanced activation
else:
fc = keras.layers.Dense(num_hidden, activation=act, name='%s_%s' % (name,act), kernel_initializer=kernel_initializer_fc)(data)
if not p:
return fc
else:
dropout = keras.layers.Dropout(rate=p, name='%s_dropout' % name)(fc)
return dropout
def crop(start, end):
def slicer(x):
return x[:,:,start:end]
return keras.layers.Lambda(slicer)
def groupList(listToKeep):
lumiGroups = []
for k, g in groupby(enumerate(listToKeep), lambda (i,x):i-x):
consecutiveLumis = map(itemgetter(1), g)
lumiGroups.append([consecutiveLumis[0],consecutiveLumis[-1]])
return lumiGroups
def cropInputs(inputs, datasets, removedVars):
'''
Arguments
inputs: array of inputs, in same order as mentioned declared in datasets
datasets: array of string labels for inputs; "db", "sv", "pf", "cpf"
removedVars: array of arrays of ints, with the indices of variables to be removed in each data set, if given "-1" removes all
Returns
array of Lambda layers the same as input layers without removed variables
'''
croppedLayers = []
for i in range(len(inputs)):
inputSet = inputs[i]
dataset = datasets[i]
removals = removedVars[i]
if len(removals) == 0:
croppedLayers.append(inputSet)
elif removals[0] == -1:
continue
else:
passedVars = []
start = 0
end = int(inputSet.shape[-1])
print type(inputSet.shape[-1])
print end
print removals
allVars = range(start,end)
setToKeep = set(allVars) - set(removals)
listToKeep = sorted(list(setToKeep))
print listToKeep
print groupList(listToKeep)
for group in groupList(listToKeep):
sliced = crop(group[0],group[1]+1)(inputSet)
passedVars.append(sliced)
if len(passedVars) > 1:
cut_layer = keras.layers.concatenate(passedVars, axis = -1, name = 'cut_%s'%dataset )
else:
cut_layer = passedVars[0]
print dataset, cut_layer
croppedLayers.append(cut_layer)
return croppedLayers
def conv_model_final(inputs, num_classes, num_regclasses, datasets, removedVars = None, multi_gpu=1, **kwargs):
#def conv_model_final(inputs, num_classes, num_regclasses, datasets, **kwargs):
normalizedInputs = []
for i in range(len(inputs)):
print i
#print datasets[i]
print inputs[i]
normedLayer = BatchNormalization(momentum=0.3,name = '%s_input_batchnorm'%datasets[i])(inputs[i])
normalizedInputs.append(normedLayer)
flattenLayers = []
flattenLayers.append(keras.layers.Flatten()(normalizedInputs[0]))
for ds, normalizedInput in zip(datasets[1:],normalizedInputs[1:]):
x = keras.layers.Conv1D(filters=32, kernel_size=(1,), strides=(1,), padding='same',
kernel_initializer=kernel_initializer, use_bias=False, name='%s_conv1'%ds,
activation = 'relu')(normalizedInput)
x = keras.layers.SpatialDropout1D(rate=0.1)(x)
x = keras.layers.Conv1D(filters=32, kernel_size=(1,), strides=(1,), padding='same',
kernel_initializer=kernel_initializer, use_bias=False, name='%s_conv2'%ds,
activation = 'relu')(x)
x = keras.layers.SpatialDropout1D(rate=0.1)(x)
x = keras.layers.GRU(50,go_backwards=True,implementation=2,name='%s_gru'%ds)(x)
x = keras.layers.Dropout(rate=0.1)(x)
flattenLayers.append(x)
#concat = keras.layers.concatenate(flattenLayers, name='concat')
concat = keras.layers.Concatenate()(flattenLayers)
fc = FC(concat, 100, p=0.1, name='fc1')
output = keras.layers.Dense(num_classes, activation='softmax', name='ID_pred', kernel_initializer=kernel_initializer_fc)(fc)
#print output.shape
model = keras.models.Model(inputs=inputs, outputs=[output])
if multi_gpu > 1:
model = multi_gpu_model(model, gpus=multi_gpu)
print model.summary()
return model
def deep_model_removals(inputs, num_classes, num_regclasses, datasets, removedVars = None, **kwargs):
cutInputs = inputs
if removedVars is not None:
cutInputs = cropInputs(inputs, datasets, removedVars)
flattenLayers = [keras.layers.Flatten()(cutInput) for cutInput in cutInputs]
for cutInput in cutInputs:
print cutInput.shape
for flattenLayer in flattenLayers:
print flattenLayer.shape
concat = keras.layers.concatenate(flattenLayers,name="concat")
fc = FC(concat, 64, p=0.1, name='fc1')
fc = FC(fc, 32, p=0.1, name='fc2')
fc = FC(fc, 32, p=0.1, name='fc3')
output = keras.layers.Dense(num_classes, activation='softmax', name='softmax', kernel_initializer=kernel_initializer_fc)(fc)
model = keras.models.Model(inputs=inputs, outputs=output)
print model.summary()
return model
def deep_model_removal_sv(inputs, num_classes,num_regclasses,datasets, removedVars = None, **kwargs):
#ordering of sv variables
#0 'sv_ptrel',
#1 'sv_erel',
#2 'sv_phirel',
#3 'sv_etarel',
#4 'sv_deltaR',
#5 'sv_pt',
#6 'sv_mass',
#7 'sv_ntracks',
#8 'sv_normchi2',
#9 'sv_dxy',
#10 'sv_dxysig',
#11 'sv_d3d',
#12 'sv_d3dsig',
#13 'sv_costhetasvpv'
input_db = inputs[0]
input_sv = inputs[1]
sv_shape = input_sv.shape
passedVars = []
start = 0
end = 14
index =0
for i in removedVars:
if i == start:
start +=1
if i > start:
sliced = crop(start,i)(input_sv)
print sliced.shape
passedVars.append(sliced)
start = i+1
sliced = crop(start,end)(input_sv)
passedVars.append(sliced)
print passedVars
cut_sv = keras.layers.concatenate(passedVars, axis = -1, name = 'cut_sv')
x = keras.layers.Flatten()(input_db)
sv = keras.layers.Flatten()(cut_sv)
concat = keras.layers.concatenate([x, sv], name='concat')
fc = FC(concat, 64, p=0.1, name='fc1')
fc = FC(fc, 32, p=0.1, name='fc2')
fc = FC(fc, 32, p=0.1, name='fc3')
output = keras.layers.Dense(num_classes, activation='softmax', name='softmax', kernel_initializer=kernel_initializer_fc)(fc)
model = keras.models.Model(inputs=inputs, outputs=output)
print model.summary()
return model
``` |
{
"source": "jmduarte/rhalphalib",
"score": 3
} |
#### File: rhalphalib/rhalphalib/template_morph.py
```python
from scipy.interpolate import interp1d
import numpy as np
class AffineMorphTemplate(object):
def __init__(self, hist):
'''
hist: a numpy-histogram-like tuple of (sumw, edges, name)
'''
self.sumw = hist[0]
self.edges = hist[1]
self.varname = hist[2]
self.norm = self.sumw.sum()
self.mean = (self.sumw*(self.edges[:-1] + self.edges[1:])/2).sum() / self.norm
self.cdf = interp1d(
x=self.edges,
y=np.r_[0, np.cumsum(self.sumw / self.norm)],
kind='linear',
assume_sorted=True,
bounds_error=False,
fill_value=(0, 1),
)
def get(self, shift=0., scale=1.):
'''
Return a shifted and scaled histogram
i.e. new edges = edges * scale + shift
'''
scaled_edges = (self.edges - shift) / scale
values = np.diff(self.cdf(scaled_edges)) * self.norm
return values.clip(min=0), self.edges, self.varname
``` |
{
"source": "jmdummer/torchparse",
"score": 3
} |
#### File: torchparse/torchparse/base_layers.py
```python
import torch
import torch.nn as nn
from .utils import *
class Layer(object):
"""
Base class for any layer. Safely convert its parameters and
keep track of its input shape.
"""
def __init__(self, config, in_shape):
self.config = {k:safe_conversion(v) for k,v in config.items()}
self.in_shape = in_shape
def get_module(self):
raise NotImplementedError
def get_out_shape(self):
raise NotImplementedError
class ConvolveSpatial(Layer):
"""
Base class for any layer that uses some sort of 2d convolution
(conv2d, maxpool2d for now, maybe refactor this)
"""
def __init__(self, config, in_shape):
super(ConvolveSpatial, self).__init__(config, in_shape)
self.config['stride'] = self.config.get('stride', 1)
self.config['padding'] = padding_type(self.in_shape[1:],
self.config)
def get_out_shape(self):
spatial = self._spatial_transf()
channel = self._channel_transf()
return torch.cat([channel, spatial])
def _spatial_transf(self):
return out_conv(self.in_shape[1:], self.config)
def _channel_transf(self):
raise NotImplementedError
class Dense(Layer):
"""
Base class for any layer that uses dense transformations
(i.e. matrix mult), linear and rnn.
"""
def __init__(self, config, in_shape):
super(Dense, self).__init__(config, in_shape)
self.changed_feat = None
self.out_mod = 1.
def get_out_shape(self):
out_shape = self.in_shape.clone()
out_shape[-1] = self.out_mod*self.config[self.changed_feat]
return out_shape
``` |
{
"source": "jmdwebsites/jmdwebsites_source",
"score": 2
} |
#### File: src/jmdwebsites/project.py
```python
import logging
import os
import py
from . import orderedyaml
from .error import JmdwebsitesError, PathNotFoundError
from .orderedyaml import CommentedMap
from .utils import find_path
logger = logging.getLogger(__name__)
class ProjectError(JmdwebsitesError): pass
# For get_project_dir()
class ProjectNotFoundError(ProjectError): pass
# For protected_remove()
class ProtectedRemoveError(ProjectError): pass
class PathNotAllowedError(ProtectedRemoveError): pass
class BasenameNotAllowedError(ProtectedRemoveError): pass
class PathAlreadyExists(ProjectError): pass
class WebsiteProjectAlreadyExists(ProjectError): pass
def load_specs(basename, locations=None):
try:
filepath = find_path(basename, locations=locations)
except PathNotFoundError as e:
logger.warning('Load specs: %s' % e)
data = CommentedMap()
else:
logger.info('Load specs: %s: %s' % (basename, filepath))
data = orderedyaml.load(filepath).commented_map
return data
def get_project_dir(basename):
logger.info('get_project_dir(%r): cwd: %s:' % (basename, os.getcwd()))
# Check for project file in this dir and ancestor dirs
for dirpath in py.path.local().parts(reverse=True):
for path in dirpath.listdir():
if path.basename == basename:
return path.dirpath()
raise ProjectNotFoundError(
'Not a project (or any parent directories): {} not found'.format(
basename))
def protected_remove(path, valid_basenames=None, projectdir=None):
if valid_basenames is None:
valid_basenames = set(['build'])
for disallowed in [os.getcwd(), __file__]:
if path in py.path.local(disallowed).parts():
raise PathNotAllowedError(
'Remove: {}: Path not allowed, protecting: {}'.format(
path,
disallowed))
if valid_basenames is not None and path.basename not in valid_basenames:
raise BasenameNotAllowedError(
'Remove: {}: Basename not allowed: {}: Must be one of: {}'.format(
path,
path.basename,
valid_basenames))
if projectdir is not None:
try:
#Check that path has a .jmdwebsites file somewhere in one of its
#parent directories, thus indicating it is part of a website project.
get_project_dir(projectdir)
except ProjectNotFoundError as e:
raise ProjectNotFoundError('Remove: {}'.format(e))
if not path.check():
raise PathNotFoundError(
'Remove: Path not found: {}'.format(path))
logger.info('Remove %s', path)
path.remove()
def init_project(projdir):
"""Initialize project.
"""
project_dir = py.path.local(projdir)
if project_dir.check():
raise WebsiteProjectAlreadyExists(
'Project already exists: {}'.format(project_dir))
logger.info('Create project %r: %s', projdir, project_dir.strpath)
project_dir.ensure(dir=1)
def new_project(project_pathname):
"""New project.
"""
project_path = py.path.local(project_pathname)
logger.info('Create new project %s', project_path.strpath)
if project_path.check():
raise PathAlreadyExists(
'Already exists: {}'.format(project_path))
# Use ensure for the time-being
project_path.ensure(dir=1)
logger.error('TODO:')
```
#### File: src/jmdwebsites/yaml.py
```python
from __future__ import print_function
import py
import ruamel
from ruamel.yaml.comments import CommentedMap
from ruamel.yaml.compat import ordereddict
#TODO: Review all this. Not tested yet. Didnt need in the end
def construct_yaml_str(self, node):
print(repr(node))
if node.tag == 'tag:yaml.org,2002:str':
#node.value = unicode(node.value)
if not isinstance(node.value, unicode):
assert 0
return self.construct_scalar(node)
def construct_yaml_python_str(self, node):
assert 0
def add_contructors():
ruamel.yaml.RoundTripLoader.add_constructor(u'tag:yaml.org,2002:str', construct_yaml_str)
ruamel.yaml.RoundTripLoader.add_constructor(u'tag:yaml.org,2002:python/str', construct_yaml_python_str)
def represent_yaml_str(self, data):
rdata = self.represent_str(data)
print('represent_yaml_str: ', repr(rdata))
return self.represent_str(data.encode('utf-8'))
#return self.represent_str(data)
def represent_yaml_unicode(self, data):
rdata = self.represent_unicode(data)
if rdata.tag == u'tag:yaml.org,2002:python/unicode':
rdata = self.represent_str(data.encode('utf-8'))
elif rdata.tag == u'tag:yaml.org,2002:str':
pass
else:
raise Exception('YAML representer error: {}'.format(rdata.tag))
print('represent_yaml_unicode: ', repr(rdata))
return rdata
#return self.represent_unicode(data)
def add_representers():
ruamel.yaml.Dumper.add_representer(str, represent_yaml_str)
ruamel.yaml.Dumper.add_representer(unicode, represent_yaml_unicode)
# Not needed as RoundTripDumoer has own representers for string handling
#ruamel.yaml.RoundTripDumper.add_representer(str, represent_yaml_str)
#ruamel.yaml.RoundTripDumper.add_representer(unicode, represent_yaml_unicode)
```
#### File: pyapps/tests/conftest.py
```python
import pytest
def pytest_addoption(parser):
parser.addoption("--testlogging", action='store_true')
parser.addoption("--jmddbg", action="store_true", help="Set logger level to debug")
parser.addoption("--jmdinfo", action="store_true", help="Set logger level to info")
```
#### File: tests/jmdwebsites/functional_test.py
```python
from __future__ import print_function
import pytest
import jmdwebsites
from subprocess import Popen, PIPE
import py
import filecmp
def datapath(stem):
return py.path.local(__file__).dirpath('data', stem)
def filt(f):
if f.ext in ['.DS_Store']:
return False
return True
@pytest.mark.parametrize("theme, content, expected", [
('','',datapath('brochure/expected'))
])
def tst_init_then_build(tmpcwd, loginfo, theme, content, expected):
site_dir = tmpcwd
build_dir = site_dir.join('build')
#p = Popen(['jmdwebsites new build'], stdout=PIPE, stderr=PIPE, shell=True)
p = Popen(['jmdwebsites --debug -v init'], stdout=PIPE, stderr=PIPE, shell=True)
out, err = p.communicate()
print('{}{}'.format(out, err), end='')
assert not err
for f in site_dir.visit(rec=1):
print('TST: ', f.strpath)
p = Popen(['jmdwebsites --info -v build'], stdout=PIPE, stderr=PIPE, shell=True)
out, err = p.communicate()
print('{}{}'.format(out, err), end='')
assert not err
for f in site_dir.visit(rec=1):
print('TST: ', f.strpath)
assert build_dir.check()
# Visit all files in built website and check they match the expected files
#for built_path in build_dir.visit():
# assert built_path.check(), 'Not found: {}'.format(built_path)
# expected_path = expected.join(built_path.relto(build_dir))
# assert expected_path.check(), 'Not found: {}'.format(expected_path)
# if built_path.check(file=1):
# print('Compare {}'.format(built_path))
# assert filecmp.cmp(built_path.strpath, expected_path.strpath), \
# 'Page not as expected: {}'.format(built_path)
# Visit all files in expected website and check they match the buit files
for expected_path in expected.visit(fil=filt):
built_path = build_dir.join(expected_path.relto(expected))
print('Check {}'.format(built_path))
assert built_path.check(), 'Not found: {}'.format(built_path)
if built_path.check(file=1):
print('Compare {}'.format(built_path))
assert filecmp.cmp(built_path.strpath, expected_path.strpath), \
'Page not as expected: {}'.format(built_path)
```
#### File: tests/jmdwebsites/test_content.py
```python
from __future__ import print_function
import pytest
from jmdwebsites.content import get_vars, get_content, \
MissingContentError, KEEP_FILE
from jmdwebsites.orderedyaml import CommentedMap
from .generic_test_data import spec
from .generic_test_data import vars as expected_vars
from .generic_test_data import data as expected_data
#from .generic_test_data import vars
#from .generic_test_data import data
#expected_vars = vars
#expected_data = data
#expected_vars = 1
#expected_data = 2
@pytest.mark.parametrize("spec, name, expected", [
(spec, 'vars', expected_vars),
(spec, 'data', expected_data)
])
def test_get_vars(spec, name, expected):
vars = get_vars(spec, name)
assert isinstance(vars, CommentedMap)
assert vars == expected
def test_get_content_with_no_content(tmpdir):
get_content(tmpdir)
assert tmpdir.join(KEEP_FILE).check(file=1)
tmpdir.join('tmp.txt').ensure(file=1)
get_content(tmpdir)
def test_get_content(tmpdir):
tmpdir.join('_tmp.md').ensure(file=1).write_text(u'Hello', 'utf-8')
source_content = get_content(tmpdir)
assert isinstance(source_content, dict)
``` |
{
"source": "jme2041/pylarid",
"score": 2
} |
#### File: pylarid/test/test_dset.py
```python
import larid
import numpy
import unittest
# Demonstration dataset attributes
demo3d = {'ni': 224, 'nj': 256, 'nk': 176, 'nt': 1, 'datatype': 'int16'}
demo4d = {'ni': 64, 'nj': 64, 'nk': 10, 'nt': 240, 'datatype': 'int16'}
class TestDset(unittest.TestCase):
"""Test basic functionality of the larid.Dset class"""
def test_init_noarg(self):
with self.assertRaises(TypeError):
larid.Dset()
def test_init_onearg(self):
with self.assertRaises(TypeError):
larid.Dset(1)
def test_init_twoarg(self):
with self.assertRaises(TypeError):
larid.Dset(1, 1)
def test_init_threearg(self):
with self.assertRaises(TypeError):
larid.Dset(1, 1, 1)
def test_init_fourarg(self):
with self.assertRaises(TypeError):
larid.Dset(1, 1, 1, 1)
def test_init_fivearg(self):
with self.assertRaises(TypeError):
larid.Dset(1, 1, 1, 1, 'uint8')
def test_init_dim_str(self):
x = '1'
for dim in ((x, 1, 1, 1), (1, x, 1, 1), (1, 1, x, 1), (1, 1, 1, x)):
with self.assertRaises(TypeError):
larid.Dset(*dim, datatype='uint8', morder='tkji')
def test_init_dim_undermin(self):
x = 0
for dim in ((x, 1, 1, 1), (1, x, 1, 1), (1, 1, x, 1), (1, 1, 1, x)):
with self.assertRaisesRegex(
ValueError,
'Dataset dimensions must be greater than zero'):
larid.Dset(*dim, datatype='uint8', morder='tkji')
def test_init_dim_overmax(self):
x = 2 ** 63
for dim in ((x, 1, 1, 1), (1, x, 1, 1), (1, 1, x, 1), (1, 1, 1, x)):
with self.assertRaises(OverflowError):
larid.Dset(*dim, datatype='uint8', morder='tkji')
def test_init_overflow(self):
x = 0xF0000000
with self.assertRaises(ValueError):
larid.Dset(x, x, x, x, datatype='uint8', morder='tkji')
def test_init_datatype_numeric(self):
with self.assertRaisesRegex(TypeError, 'Data type must be a string'):
larid.Dset(1, 1, 1, 1, 1, 'tkji')
def test_init_datatype_bad(self):
with self.assertRaisesRegex(ValueError, 'Invalid data type: bad'):
larid.Dset(1, 1, 1, 1, 'bad', 'tkji')
def test_init_morder_numeric(self):
with self.assertRaisesRegex(TypeError,
'Memory order must be a string'):
larid.Dset(1, 1, 1, 1, 'uint8', 1)
def test_init_morder_bad(self):
with self.assertRaisesRegex(ValueError, 'Invalid memory order: bad'):
larid.Dset(1, 1, 1, 1, 'uint8', 'bad')
def test_init_good(self):
# Create a Dset object for the 4D demonstration dataset
obj = larid.Dset(demo4d['ni'], demo4d['nj'], demo4d['nk'],
demo4d['nt'], demo4d['datatype'], 'tkji')
p1 = obj.data.__array_interface__['data'][0]
# Check array shape and data type
self.assertEqual(obj.data.shape, (demo4d['nt'], demo4d['nk'],
demo4d['nj'], demo4d['ni']))
self.assertEqual(obj.data.dtype, numpy.dtype(demo4d['datatype']))
# Check returned dimensions, data type, and memory order
self.assertEqual(obj.ni, demo4d['ni'])
self.assertEqual(obj.nj, demo4d['nj'])
self.assertEqual(obj.nk, demo4d['nk'])
self.assertEqual(obj.nt, demo4d['nt'])
self.assertEqual(obj.datatype, demo4d['datatype'])
self.assertEqual(obj.morder, 'tkji')
# Check returned pixdims and toffset
self.assertEqual(obj.di, 1.0)
self.assertEqual(obj.dj, 1.0)
self.assertEqual(obj.dk, 1.0)
self.assertEqual(obj.dt, 1.0)
self.assertEqual(obj.toffset, 0.0)
# Check returned NIfTI intent information
self.assertEqual(obj.intent_code, 'NIFTI_INTENT_NONE')
self.assertEqual(obj.intent_p1, 0.0)
self.assertEqual(obj.intent_p2, 0.0)
self.assertEqual(obj.intent_p3, 0.0)
self.assertIsNone(obj.intent_name)
# Attempt to set read-only attributes
with self.assertRaises(AttributeError):
obj.data = numpy.zeros((1, 1, 1, 1), obj.data.dtype)
with self.assertRaises(AttributeError):
obj.ni = 1
with self.assertRaises(AttributeError):
obj.nj = 1
with self.assertRaises(AttributeError):
obj.nk = 1
with self.assertRaises(AttributeError):
obj.nt = 1
with self.assertRaises(AttributeError):
obj.datatype = 'uint8'
with self.assertRaises(AttributeError):
obj.di = 3.0
with self.assertRaises(AttributeError):
obj.dj = 3.0
with self.assertRaises(AttributeError):
obj.dk = 3.0
with self.assertRaises(AttributeError):
obj.dt = 3.0
with self.assertRaises(AttributeError):
obj.toffset = 3.0
with self.assertRaises(AttributeError):
obj.intent_code = 'NIFTI_INTENT_NONE'
with self.assertRaises(AttributeError):
obj.intent_p1 = 0.0
with self.assertRaises(AttributeError):
obj.intent_p2 = 0.0
with self.assertRaises(AttributeError):
obj.intent_p3 = 0.0
with self.assertRaises(AttributeError):
obj.intent_name = 'test'
# Attempt to delete read-write attributes
with self.assertRaisesRegex(TypeError,
'Cannot delete the morder attribute'):
delattr(obj, 'morder')
# Attributes should be preserved after failed re-initialization
with self.assertRaises(ValueError):
obj.__init__(demo4d['ni'], demo4d['nj'], demo4d['nk'], 0,
demo4d['datatype'], 'tkji')
p2 = obj.data.__array_interface__['data'][0]
self.assertEqual(p1, p2)
self.assertEqual(obj.data.shape, (demo4d['nt'], demo4d['nk'],
demo4d['nj'], demo4d['ni']))
self.assertEqual(obj.data.dtype, numpy.dtype(demo4d['datatype']))
self.assertEqual(obj.ni, demo4d['ni'])
self.assertEqual(obj.nj, demo4d['nj'])
self.assertEqual(obj.nk, demo4d['nk'])
self.assertEqual(obj.nt, demo4d['nt'])
self.assertEqual(obj.datatype, demo4d['datatype'])
self.assertEqual(obj.morder, 'tkji')
self.assertEqual(obj.di, 1.0)
self.assertEqual(obj.dj, 1.0)
self.assertEqual(obj.dk, 1.0)
self.assertEqual(obj.dt, 1.0)
self.assertEqual(obj.toffset, 0.0)
self.assertEqual(obj.intent_code, 'NIFTI_INTENT_NONE')
self.assertEqual(obj.intent_p1, 0.0)
self.assertEqual(obj.intent_p2, 0.0)
self.assertEqual(obj.intent_p3, 0.0)
self.assertIsNone(obj.intent_name)
# Re-initialize the object for the 3D demonstration dataset
# Attributes should change after successful re-initialization
obj.__init__(demo3d['ni'], demo3d['nj'], demo3d['nk'], demo3d['nt'],
demo3d['datatype'], 'tkji')
p3 = obj.data.__array_interface__['data'][0]
self.assertNotEqual(p1, p3)
self.assertEqual(obj.data.shape, (demo3d['nt'], demo3d['nk'],
demo3d['nj'], demo3d['ni']))
self.assertEqual(obj.data.dtype, numpy.dtype(demo3d['datatype']))
self.assertEqual(obj.ni, demo3d['ni'])
self.assertEqual(obj.nj, demo3d['nj'])
self.assertEqual(obj.nk, demo3d['nk'])
self.assertEqual(obj.nt, demo3d['nt'])
self.assertEqual(obj.datatype, demo3d['datatype'])
self.assertEqual(obj.morder, 'tkji')
self.assertEqual(obj.di, 1.0)
self.assertEqual(obj.dj, 1.0)
self.assertEqual(obj.dk, 1.0)
self.assertEqual(obj.dt, 0.0)
self.assertEqual(obj.toffset, 0.0)
self.assertEqual(obj.intent_code, 'NIFTI_INTENT_NONE')
self.assertEqual(obj.intent_p1, 0.0)
self.assertEqual(obj.intent_p2, 0.0)
self.assertEqual(obj.intent_p3, 0.0)
self.assertIsNone(obj.intent_name)
def test_copy(self):
# Start with the demonstration 4D dataset
obj1 = larid.Dset(demo4d['ni'], demo4d['nj'], demo4d['nk'],
demo4d['nt'], demo4d['datatype'], 'tkji')
p1 = obj1.data.__array_interface__['data'][0]
# Do the copy
obj2 = obj1.copy()
p2 = obj2.data.__array_interface__['data'][0]
# Voxel data should not be the same array object
self.assertNotEqual(p1, p2)
# Voxel data should have the same shape, dtype, and data
self.assertEqual(obj1.data.shape, obj2.data.shape)
self.assertEqual(obj1.data.dtype, obj2.data.dtype)
self.assertTrue(numpy.allclose(obj1.data, obj2.data))
# Attributes should be the same
self.assertEqual(obj1.ni, obj2.ni)
self.assertEqual(obj1.nj, obj2.nj)
self.assertEqual(obj1.nk, obj2.nk)
self.assertEqual(obj1.nt, obj2.nt)
self.assertEqual(obj1.datatype, obj2.datatype)
self.assertEqual(obj1.morder, obj2.morder)
self.assertEqual(obj1.di, obj2.di)
self.assertEqual(obj1.dj, obj2.dj)
self.assertEqual(obj1.dk, obj2.dk)
self.assertEqual(obj1.dt, obj2.dt)
self.assertEqual(obj1.toffset, obj2.toffset)
self.assertEqual(obj1.intent_code, obj2.intent_code)
self.assertEqual(obj1.intent_p1, obj2.intent_p1)
self.assertEqual(obj1.intent_p2, obj2.intent_p2)
self.assertEqual(obj1.intent_p3, obj2.intent_p3)
self.assertEqual(obj1.intent_name, obj2.intent_name)
if __name__ == '__main__':
unittest.main()
###############################################################################
```
#### File: pylarid/test/test_morder.py
```python
import larid
import numpy
import unittest
# Demonstration dataset attributes
demo4d = {'ni': 64, 'nj': 64, 'nk': 10, 'nt': 240, 'datatype': 'int16'}
class TestMorder(unittest.TestCase):
"""Test switching memory orders"""
def test_ramp(self):
for datatype in ('uint8', 'int16', 'int32', 'float32', 'float64'):
with self.subTest(datatype=datatype):
obj = larid.Dset(demo4d['ni'], demo4d['nj'], demo4d['nk'],
demo4d['nt'], datatype, 'tkji')
p1 = obj.data.__array_interface__['data'][0]
# Start with a ramp image
for j in range(0, demo4d['nj']):
obj.data[:, :, j, :] = j
# Transpose to kjit
obj.morder = 'kjit'
p2 = obj.data.__array_interface__['data'][0]
self.assertEqual(p1, p2)
# Check
for j in range(0, demo4d['nj']):
self.assertTrue(numpy.allclose(obj.data[:, j, :, :], j))
# Transpose back to tkji
obj.morder = 'tkji'
p3 = obj.data.__array_interface__['data'][0]
self.assertEqual(p1, p3)
# Check
for j in range(0, demo4d['nj']):
self.assertTrue(numpy.allclose(obj.data[:, :, j, :], j))
# Redundant request should have no effect
obj.morder = 'tkji'
p4 = obj.data.__array_interface__['data'][0]
self.assertEqual(p1, p4)
# Check
for j in range(0, demo4d['nj']):
self.assertTrue(numpy.allclose(obj.data[:, :, j, :], j))
def test_tseries(self):
for datatype in ('uint8', 'int16', 'int32', 'float32', 'float64'):
with self.subTest(datatype=datatype):
obj = larid.Dset(demo4d['ni'], demo4d['nj'], demo4d['nk'],
demo4d['nt'], datatype, 'kjit')
p1 = obj.data.__array_interface__['data'][0]
# Start with an ascending time series
for t in range(0, demo4d['nt']):
obj.data[:, :, :, t] = t
# Transpose to tkji
obj.morder = 'tkji'
p2 = obj.data.__array_interface__['data'][0]
self.assertEqual(p1, p2)
# Check
for t in range(0, demo4d['nt']):
self.assertTrue(numpy.allclose(obj.data[t, :, :, :], t))
# Transpose back to kjit
obj.morder = 'kjit'
p3 = obj.data.__array_interface__['data'][0]
self.assertEqual(p1, p3)
# Check
for t in range(0, demo4d['nt']):
self.assertTrue(numpy.allclose(obj.data[:, :, :, t], t))
# Redundant request should have no effect
obj.morder = 'kjit'
p4 = obj.data.__array_interface__['data'][0]
self.assertEqual(p1, p4)
# Check
for t in range(0, demo4d['nt']):
self.assertTrue(numpy.allclose(obj.data[:, :, :, t], t))
if __name__ == '__main__':
unittest.main()
###############################################################################
```
#### File: pylarid/test/test_rescale.py
```python
import larid
import numpy
import unittest
# Demonstration dataset attributes
demo3d = {'ni': 224, 'nj': 256, 'nk': 176, 'nt': 1, 'datatype': 'int16'}
class TestRescale(unittest.TestCase):
"""Test rescaling a dataset"""
def setUp(self):
self.obj = larid.Dset(demo3d['ni'], demo3d['nj'], demo3d['nk'],
demo3d['nt'], demo3d['datatype'], 'tkji')
def test_rescale_noarg(self):
with self.assertRaises(TypeError):
self.obj.rescale()
def test_rescale_onearg(self):
with self.assertRaises(TypeError):
self.obj.rescale(1)
def test_rescale_min_bad(self):
with self.assertRaises(TypeError):
self.obj.rescale('1')
def test_rescale_max_bad(self):
with self.assertRaises(TypeError):
self.obj.rescale(1, '2')
def test_rescale_min_eq_max(self):
with self.assertRaisesRegex(ValueError,
'new_min must be less than new_max'):
self.obj.rescale(1, 1)
def test_rescale_min_gt_max(self):
with self.assertRaisesRegex(ValueError,
'new_min must be less than new_max'):
self.obj.rescale(2, 1)
def test_rescale_good(self):
# Start with a ramp image [0, 255]
for j in range(0, self.obj.nj):
self.obj.data[:, :, j, :] = j
# Rescale up to [-255, 255]
self.obj.rescale(-255, 255)
for j in range(0, self.obj.nj):
self.assertTrue(numpy.allclose(
self.obj.data[:, :, j, :], -255 + 2 * j))
# Rescale up to [-510, 510]
self.obj.rescale(-510, 510)
for j in range(0, self.obj.nj):
self.assertTrue(
numpy.allclose(self.obj.data[:, :, j, :], -510 + 4 * j))
# Rescale back to [0, 255]
self.obj.rescale(0, 255)
for j in range(0, self.obj.nj):
self.assertTrue(numpy.allclose(self.obj.data[:, :, j, :], j))
if __name__ == '__main__':
unittest.main()
###############################################################################
``` |
{
"source": "jmeadows99/Tableau-Utilities",
"score": 2
} |
#### File: Tableau-Utilities/storyBuilder/image2twb.py
```python
import os
import argparse
import subprocess
# Returns a list of sorted image files within directory ("path") base on their time stamp of creation
def image_filenames(path):
image_extensions = ('jpg','jpeg', 'png', 'bmp')
file_names = [fn for fn in os.listdir(path) if fn.lower().endswith(image_extensions)]
return list(sorted(file_names, key=lambda f: os.stat(os.path.join(path, f)).st_mtime))
def write_dashboard(file, size, image_path, num):
file.write("""
<dashboard name='Dashboard {slide_num}'>
<style />
<size {sizing_string} />
<zones>
<zone h='100000' id='4' type='layout-basic' w='100000' x='0' y='0'>
<zone h='97090' id='3' is-centered='0' is-scaled='1' param='{image_folder_image_path}' type='bitmap' w='98316' x='842' y='1455'>
<zone-style>
<format attr='border-color' value='#000000' />
<format attr='border-style' value='none' />
<format attr='border-width' value='0' />
<format attr='margin' value='4' />
</zone-style>
</zone>
<zone-style>
<format attr='border-color' value='#000000' />
<format attr='border-style' value='none' />
<format attr='border-width' value='0' />
<format attr='margin' value='8' />
</zone-style>
</zone>
</zones>
<devicelayouts>
<devicelayout auto-generated='true' name='Phone'>
<size maxheight='700' minheight='700' sizing-mode='vscroll' />
<zones>
<zone h='100000' id='6' type='layout-basic' w='100000' x='0' y='0'>
<zone h='97090' id='5' param='vert' type='layout-flow' w='98316' x='842' y='1455'>
<zone fixed-size='280' h='97090' id='3' is-centered='0' is-fixed='true' is-scaled='1' param='{image_folder_image_path}' type='bitmap' w='98316' x='842' y='1455'>
<zone-style>
<format attr='border-color' value='#000000' />
<format attr='border-style' value='none' />
<format attr='border-width' value='0' />
<format attr='margin' value='4' />
<format attr='padding' value='0' />
</zone-style>
</zone>
</zone>
<zone-style>
<format attr='border-color' value='#000000' />
<format attr='border-style' value='none' />
<format attr='border-width' value='0' />
<format attr='margin' value='8' />
</zone-style>
</zone>
</zones>
</devicelayout>
</devicelayouts>
<simple-id uuid='{{4D058E49-AB62-4056-BA04-B1F1036B{end_id}}}' />
</dashboard>""".format(image_folder_image_path = image_path , slide_num = num, end_id = num + 1000, sizing_string = size))
def write_story_point(file, num):
file.write("""
<window class='dashboard' hidden='true' maximized='true' name='Dashboard {slide_num}'>
<viewpoints />
<active id='-1' />
<simple-id uuid='{{B37FC551-7DBC-47F4-8E07-908C28F9{end_id}}}' />
</window>""".format(slide_num = num, end_id = num + 1000))
def create_story_workbook(args):
if args.fixed is None:
sizing_string = "sizing-mode='automatic'"
story_sizing_string = "sizing-mode='automatic'"
else:
height = args.fixed[0][0]
width = args.fixed[0][1]
sizing_string = "maxheight='{height}' maxwidth='{width}' minheight='{height}' minwidth='{width}' sizing-mode='fixed'".format(height = height, width = width)
story_sizing_string = "maxheight='964' maxwidth='1016' minheight='964' minwidth='1016'"
if args.tableau_path_name.endswith(".twb"):
tableau_file_path = args.tableau_path_name
else:
tableau_file_path = args.tableau_path_name + ".twb"
if os.path.exists(tableau_file_path) and not args.replace:
print("File {path} already exists. If you wish to replace an existing file, use the -r or --replace flag.".format(path = tableau_file_path))
exit(0)
if not os.path.exists(args.images_folder_path):
print("No folder named {dir}".format(dir = args.images_folder_path))
exit(0)
directory = os.path.abspath(args.images_folder_path)
image_list = image_filenames(directory)
if not image_list:
print("Folder {dir} does not contain any image files.".format(dir = args.images_folder_path))
exit(0)
with open(tableau_file_path, 'w') as f:
f.write(
"""<?xml version='1.0' encoding='utf-8' ?>
<!-- build 20194.19.1010.1202 -->
<workbook original-version='18.1' source-build='2019.4.0 (20194.19.1010.1202)' source-platform='mac' version='18.1' xmlns:user='http://www.tableausoftware.com/xml/user'>
<document-format-change-manifest>
<AutoCreateAndUpdateDSDPhoneLayouts ignorable='true' predowngraded='true' />
<SheetIdentifierTracking ignorable='true' predowngraded='true' />
<WindowsPersistSimpleIdentifiers />
</document-format-change-manifest>
<preferences>
<preference name='ui.encoding.shelf.height' value='24' />
<preference name='ui.shelf.height' value='26' />
</preferences>
<datasources />
<dashboards>""")
dashboard_num = 1
for image_file in image_list:
write_dashboard(f, sizing_string, os.path.join(directory, image_file), dashboard_num)
dashboard_num += 1
# Story header
f.write("""
<dashboard name='Story 1' type='storyboard'>
<style />
<size {story_sizing_string}/>
<zones>
<zone h='100000' id='2' type='layout-basic' w='100000' x='0' y='0'>
<zone h='98340' id='1' param='vert' removable='false' type='layout-flow' w='98426' x='787' y='830'>
<zone h='3423' id='3' type='title' w='98426' x='787' y='830' />
<zone h='10477' id='4' is-fixed='true' paired-zone-id='5' removable='false' type='flipboard-nav' w='98426' x='787' y='4253' />
<zone h='84440' id='5' paired-zone-id='4' removable='false' type='flipboard' w='98426' x='787' y='14730'>
<flipboard active-id='2' nav-type='caption' show-nav-arrows='true'>
<story-points>""".format(story_sizing_string = story_sizing_string))
for i in range(len(image_list)):
f.write("""
<story-point captured-sheet='Dashboard {slide_num}' id='{slide_num}' />""".format(slide_num = i + 1))
# Story trailer
f.write("""
</story-points>
</flipboard>
</zone>
</zone>
<zone-style>
<format attr='border-color' value='#000000' />
<format attr='border-style' value='none' />
<format attr='border-width' value='0' />
<format attr='margin' value='8' />
</zone-style>
</zone>
</zones>
<simple-id uuid='{503D6677-4C88-47BE-9B70-D9B6504FB60B}' />
</dashboard>""")
f.write("""
</dashboards>
<windows>""")
### Create a unique id for each window created (per dashboard and per slide)
for i in range(len(image_list)):
write_story_point(f, i + 1)
f.write("""
<window class='dashboard' maximized='true' name='Story 1'>
<viewpoints />
<active id='-1' />
<simple-id uuid='{C8E3C7C3-8B64-490D-8564-A7EA63E551AE}' />
</window>""")
f.write("""
</windows>
</workbook>""")
if args.open:
subprocess.call(['open', tableau_file_path])
if __name__ == "__main__":
parser = argparse.ArgumentParser(description = "Creates a Tableau workbook with a story containing one storypoint for each image in a folder")
parser.add_argument("images_folder_path", help = "Absolute pathname of the folder containing images to include in the Tableau workbook")
parser.add_argument("tableau_path_name", help = "Pathname of the Tableau workbook to create, .twb extension optional")
parser.add_argument("-f", "--fixed", metavar = ('HEIGHT', 'WIDTH'), help = "Use a fixed size for dashboards and storypoints (the default is an automatic/responsive size). Requires a height and width size in pixels.", type = int, nargs = 2, action = "append")
parser.add_argument("-o", "--open", help = "Open the generated workbook after creating it.", action = "store_true")
parser.add_argument("-r", "--replace", help = "Replaces a Tableau workbook if one already exists with the same name.", action = "store_true")
create_story_workbook(parser.parse_args())
``` |
{
"source": "jmecki/jCMIP",
"score": 2
} |
#### File: jCMIP/jCMIP/CMIPread.py
```python
from netCDF4 import Dataset
import numpy as np
import copy
import calendar
import cftime
# Determine dimensions using a given file and variable:
def getDims(infile,var):
ncid = Dataset(infile,'r')
dims = ncid.variables[var].get_dims()
ncid.close
return dims
# Checks and fixes time if calendars don't match:
def fixTime(units,units2,cal,time,time_bnds):
if units2 != units:
yr_init = int(units.split(' ')[2][0:4])
yr_new = int(units2.split(' ')[2][0:4])
nleap = 0
if ((cal == 'standard') | (cal == 'gregorian')):
days = 365
for yy in range(yr_init,yr_new):
if calendar.isleap(yy):
nleap = nleap + 1
elif cal == 'noleap':
days = 365
else:
days = int(cal[0:3])
offset = days*(yr_new-yr_init) + nleap
time = time + offset
time_bnds = time_bnds + offset
return time, time_bnds
# Determine starting file and position:
def timeFile(Model,cal,units2,tt,Files):
ff = 0
nn = np.nan
found = False
dd = cftime.num2date(tt,units2,cal)
nf = len(Files)
while(((ff < nf) & (found == False))):
ncid = Dataset(Files[ff],'r')
ttf = ncid.variables['time'][:]
units = ncid.variables['time'].units
ncid.close()
if ((Model.name == 'FGOALS-g2')):
units = (units + '-01')
dd2 = cftime.date2num(dd,units2,cal)
if(((dd2 >= ttf[0]) & (dd2 <= ttf[-1]))):
nn = 0
while(((nn < len(ttf)) & (found == False))):
if (ttf[nn] == dd2):
found = True
else:
nn = nn + 1
else:
ff = ff + 1
return ff, nn
# Reads in lat and lon data from ocean:
def Olatlon(Model,infile,var):
ncid = Dataset(infile,'r')
if (Model.name == 'MPI-ESM1-2-XR'):
if (var == 'uo'):
if Model.Oreg:
lon = ncid.variables['lon_2'][:]
lat = ncid.variables['lat_2'][:]
else:
lon = ncid.variables['lon_2'][:,:]
lat = ncid.variables['lat_2'][:,:]
elif (var == 'vo'):
if Model.Oreg:
lon = ncid.variables['lon_3'][:]
lat = ncid.variables['lat_3'][:]
else:
lon = ncid.variables['lon_3'][:,:]
lat = ncid.variables['lat_3'][:,:]
else:
if Model.Oreg:
lon = ncid.variables[Model.Olon][:]
lat = ncid.variables[Model.Olat][:]
else:
lon = ncid.variables[Model.Olon][:,:]
lat = ncid.variables[Model.Olat][:,:]
else:
if Model.Oreg:
lon = ncid.variables[Model.Olon][:]
lat = ncid.variables[Model.Olat][:]
else:
lon = ncid.variables[Model.Olon][:,:]
lat = ncid.variables[Model.Olat][:,:]
ncid.close()
# Flip North-South:
if Model.OflipNS:
lat = np.flip(lat,axis=0)
if not Model.Oreg:
lon = np.flip(lon,axis=0)
# Extra row in T fields (coded only for regular grid):
if Model.OextraT:
if ((var == 'vo') | (var == 'uo') | (var == 'tauuo') | (var == 'tauvo')):
if Model.Oreg:
lat = np.concatenate((lat,[-90,]),0)
else:
lat = np.concatenate((lat,-90*np.ones((1,np.size(lat,axis=1)),'float')),0)
lon = np.concatenate((lon,lon[-1:,:]),0)
# Remove extra W-E columns:
if Model.Oreg:
ni = np.size(lon,axis=0)
lon = lon[Model.OextraWE[0]:(ni-Model.OextraWE[1])]
else:
ni = np.size(lon,axis=1)
lon = lon[:,Model.OextraWE[0]:(ni-Model.OextraWE[1])]
lat = lat[:,Model.OextraWE[0]:(ni-Model.OextraWE[1])]
return lon,lat
# Reads in lat and lon data from atmosphere:
def Alatlon(Model,infile,var):
ncid = Dataset(infile,'r')
if Model.Areg:
lon = ncid.variables[Model.Alon][:]
lat = ncid.variables[Model.Alat][:]
else:
lon = ncid.variables[Model.Alon][:,:]
lat = ncid.variables[Model.Alat][:,:]
ncid.close()
# Flip North-South:
if Model.AflipNS:
lat = np.flip(lat,axis=0)
if not Model.Areg:
lon = np.flip(lon,axis=0)
# Extra row in u and v fields (coded only for regular grid):
if Model.AextraT:
print('Need to code for AextraUV')
# Remove extra W-E columns:
if Model.Areg:
ni = np.size(lon,axis=0)
lon = lon[Model.AextraWE[0]:(ni-Model.AextraWE[1])]
else:
ni = np.size(lon,axis=1)
lon = lon[:,Model.AextraWE[0]:(ni-Model.AextraWE[1])]
lat = lat[:,Model.AextraWE[0]:(ni-Model.AextraWE[1])]
return lon,lat
# Reads in data from a 2D ocean field:
def Oread2Ddata(Model,infile,var,time=None,lev=None,mask=False):
ncid = Dataset(infile,'r')
# Flip Up-Down:
if ((lev != None) & ((Model.name == 'CFSv2-2011') | (Model.name == 'FGOALS-gl') | (Model.name == 'HadGEM2-AO'))):
nk = len(ncid.variables['lev'][:])
lev = nk - 1 - lev
if mask:
if time == None:
if lev == None:
data = 1-np.squeeze(ncid.variables[var][:,:]).mask
else:
data = 1-np.squeeze(ncid.variables[var][lev,:,:]).mask
else:
if lev == None:
data = 1-np.squeeze(ncid.variables[var][time,:,:]).mask
else:
data = 1-np.squeeze(ncid.variables[var][time,lev,:,:]).mask
else:
if time == None:
if lev == None:
data = np.squeeze(ncid.variables[var][:,:]).data
else:
data = np.squeeze(ncid.variables[var][lev,:,:]).data
else:
if lev == None:
data = np.squeeze(ncid.variables[var][time,:,:]).data
else:
data = np.squeeze(ncid.variables[var][time,lev,:,:]).data
ncid.close()
# Flip North-South:
if Model.OflipNS:
data = np.flip(data,axis=0)
# Extra row in u and v fields:
if Model.OextraT:
if ((var == 'vo') | (var == 'uo') | (var == 'tauvo') | (var == 'tauuo')):
data = np.concatenate((data,np.expand_dims(data[-1,:],0)),0)
# Remove extra W-E columns:
ni = np.size(data,axis=1)
data = data[:,Model.OextraWE[0]:(ni-Model.OextraWE[1])]
return data
# Reads in data from a 3D ocean field:
def Oread3Ddata(Model,infile,var,time=None,mask=False):
ncid = Dataset(infile,'r')
if mask:
if time == None:
data = 1-np.squeeze(ncid.variables[var][:,:,:]).mask
else:
data = 1-np.squeeze(ncid.variables[var][time,:,:,:]).mask
else:
if time == None:
data = np.squeeze(ncid.variables[var][:,:,:]).data
else:
data = np.squeeze(ncid.variables[var][time,:,:,:]).data
ncid.close()
# Flip North-South:
if Model.OflipNS:
data = np.flip(data,axis=1)
# Extra row in u and v fields:
if Model.OextraT:
if ((var == 'vo') | (var == 'uo') | (var == 'tauvo') | (var == 'tauuo')):
data = np.concatenate((data,np.expand_dims(data[:,-1,:],1)),1)
# Remove extra W-E columns:
ni = np.size(data,axis=2)
data = data[:,:,Model.OextraWE[0]:(ni-Model.OextraWE[1])]
# Flip Up-Down:
if ((Model.name == 'CFSv2-2011') | (Model.name == 'FGOALS-gl') | (Model.name == 'HadGEM2-AO')):
data = data[::-1,:,:]
return data
# Reads in data from a 2D atmosphere field:
def Aread2Ddata(Model,infile,var,time=None,lev=None,mask=False):
ncid = Dataset(infile,'r')
if mask:
if time == None:
if lev == None:
data = 1-np.squeeze(ncid.variables[var][:,:]).mask
else:
data = 1-np.squeeze(ncid.variables[var][lev,:,:]).mask
else:
if lev == None:
data = np.squeeze(ncid.variables[var][time,:,:]).mask
else:
data = np.squeeze(ncid.variables[var][time,lev,:,:]).mask
else:
if time == None:
if lev == None:
data = np.squeeze(ncid.variables[var][:,:]).data
else:
data = np.squeeze(ncid.variables[var][lev,:,:]).data
else:
if lev == None:
data = np.squeeze(ncid.variables[var][time,:,:]).data
else:
data = np.squeeze(ncid.variables[var][time,lev,:,:]).data
ncid.close()
# Flip North-South:
if Model.AflipNS:
data = np.flip(data,axis=0)
# Remove extra W-E columns:
ni = np.size(data,axis=1)
data = data[:,Model.AextraWE[0]:(ni-Model.AextraWE[1])]
return data
# Move data onto different grid points:
def moveData(Model,grid1,grid2,data,computation='mean'):
if ((grid1 == 'T') & (grid2 == 'U')):
if Model.Ogrid[0] == 'B':
if np.size(np.shape(data)) == 2:
tmp = np.tile(data,(4,1,1))
ncid = Dataset(Model.Omeshmask,'r')
umask = ncid.variables['umask'][0,:,:]
ncid.close()
if Model.Ogrid[1] == 'b':
tmp[2,:-1,:] = data[1:,:]
tmp[3,:-1,:] = data[1:,:]
elif Model.Ogrid[1] == 't':
tmp[2,1:,:] = data[:-1,:]
tmp[3,1:,:] = data[:-1,:]
if Model.Ogrid[2] == 'l':
tmp[1,:,:] = np.roll(tmp[0,:,:],1,axis=1)
tmp[3,:,:] = np.roll(tmp[2,:,:],1,axis=1)
elif Model.Ogrid[2] == 'r':
tmp[1,:,:] = np.roll(tmp[0,:,:],-1,axis=1)
tmp[3,:,:] = np.roll(tmp[2,:,:],-1,axis=1)
elif np.size(np.shape(data)) == 3:
tmp = np.tile(data,(4,1,1,1))
ncid = Dataset(Model.Omeshmask,'r')
umask = ncid.variables['umask'][:,:,:]
ncid.close()
if Model.Ogrid[1] == 'b':
tmp[2,:,:-1,:] = data[:,1:,:]
tmp[3,:,:-1,:] = data[:,1:,:]
elif Model.Ogrid[1] == 't':
tmp[2,:,1:,:] = data[:,:-1,:]
tmp[3,:,1:,:] = data[:,:-1,:]
if Model.Ogrid[2] == 'l':
tmp[1,:,:,:] = np.roll(tmp[0,:,:,:],1,axis=2)
tmp[3,:,:,:] = np.roll(tmp[2,:,:,:],1,axis=2)
elif Model.Ogrid[2] == 'r':
tmp[1,:,:,:] = np.roll(tmp[0,:,:,:],-1,axis=2)
tmp[3,:,:,:] = np.roll(tmp[2,:,:,:],-1,axis=2)
elif Model.Ogrid[0] == 'C':
if np.size(np.shape(data)) == 2:
tmp = np.tile(data,(2,1,1))
ncid = Dataset(Model.Omeshmask,'r')
umask = ncid.variables['umask'][0,:,:]
ncid.close()
if Model.Ogrid[2] == 'l':
tmp[1,:,:] = np.roll(tmp[0,:,:],1,axis=1)
elif Model.Ogrid[2] == 'r':
tmp[1,:,:] = np.roll(tmp[0,:,:],-1,axis=1)
elif np.size(np.shape(data)) == 3:
tmp = np.tile(data,(2,1,1,1))
ncid = Dataset(Model.Omeshmask,'r')
umask = ncid.variables['umask'][:,:,:]
ncid.close()
if Model.Ogrid[2] == 'l':
tmp[1,:,:,:] = np.roll(tmp[0,:,:,:],1,axis=2)
elif Model.Ogrid[2] == 'r':
tmp[1,:,:,:] = np.roll(tmp[0,:,:,:],-1,axis=2)
elif Model.Ogrid[0] == 'A':
if np.size(np.shape(data)) == 2:
tmp = np.tile(data,(1,1,1))
ncid = Dataset(Model.Omeshmask,'r')
umask = ncid.variables['umask'][0,:,:]
ncid.close()
elif np.size(np.shape(data)) == 3:
tmp = np.tile(data,(1,1,1,1))
ncid = Dataset(Model.Omeshmask,'r')
umask = ncid.variables['umask'][:,:,:]
ncid.close()
if computation == 'mean':
datanew = np.squeeze(np.mean(tmp,axis=0)*umask)
elif computation == 'min':
datanew = np.squeeze(np.min(tmp,axis=0)*umask)
elif computation == 'max':
datanew = np.squeeze(np.max(tmp,axis=0)*umask)
elif ((grid1 == 'T') & (grid2 == 'VT')):
# Data won't be masked:
ncid = Dataset(Model.Omeshmask,'r')
dyt = ncid.variables['dyt'][:,:]
ncid.close()
if ((Model.Ogrid[0] == 'A') | (Model.Ogrid[1] == 'b')):
if np.size(np.shape(data)) == 2:
tmp = np.tile(data,(2,1,1))
dyt = np.tile(dyt,(2,1,1))
tmp[1,1:,:] = tmp[0,:-1,:]
dyt[1,1:,:] = dyt[0,:-1,:]
elif np.size(np.shape(data)) == 3:
tmp = np.tile(data,(2,1,1,1))
dyt = np.tile(dyt,(2,np.size(tmp,1),1,1))
tmp[1,:,1:,:] = tmp[0,:,:-1,:]
dyt[1,:,1:,:] = dyt[0,:,:-1,:]
else:
if np.size(np.shape(data)) == 2:
tmp = np.tile(data,(2,1,1))
dyt = np.tile(dyt,(2,1,1))
tmp[1,:-1,:] = tmp[0,1:,:]
dyt[1,:-1,:] = dyt[0,1:,:]
elif np.size(np.shape(data)) == 3:
tmp = np.tile(data,(2,1,1,1))
dyt = np.tile(dyt,(2,np.size(tmp,1),1,1))
tmp[1,:,:-1,:] = tmp[0,:,1:,:]
dyt[1,:,:-1,:] = dyt[0,:,1:,:]
if computation == 'mean':
datanew = np.squeeze(np.sum(tmp*dyt,axis=0)/np.sum(dyt,axis=0))
elif computation == 'min':
datanew = np.squeeze(np.min(tmp,axis=0))
elif computation == 'max':
datanew = np.squeeze(np.max(tmp,axis=0))
elif ((grid1 == 'V') & (grid2 == 'VT')):
# Data won't be masked:
if Model.Ogrid[0] == 'A':
ncid = Dataset(Model.Omeshmask,'r')
dyv = ncid.variables['dyv'][:,:]
ncid.close()
if np.size(np.shape(data)) == 2:
tmp = np.tile(data,(2,1,1))
dyv = np.tile(dyv,(2,1,1))
tmp[1,1:,:] = tmp[0,:-1,:]
dyv[1,1:,:] = dyv[0,:-1,:]
elif np.size(np.shape(data)) == 3:
tmp = np.tile(data,(2,1,1,1))
dyv = np.tile(dyv,(2,np.size(tmp,1),1,1))
tmp[1,:,1:,:] = tmp[0,:,:-1,:]
dyv[1,:,1:,:] = dyv[0,:,:-1,:]
if computation == 'mean':
datanew = np.squeeze(np.sum(tmp*dyv,axis=0)/np.sum(dyv,axis=0))
elif computation == 'min':
datanew = np.squeeze(np.min(tmp,axis=0))
elif computation == 'max':
datanew = np.squeeze(np.max(tmp,axis=0))
elif Model.Ogrid[0] == 'B':
if Model.Ogrid[2] == 'r':
if np.size(np.shape(data)) == 2:
tmp = np.tile(data,(2,1,1))
tmp[1,:,:] = np.roll(tmp[0,:,:],1,axis=1)
elif np.size(np.shape(data)) == 3:
tmp = np.tile(data,(2,1,1,1))
tmp[1,:,:,:] = np.roll(tmp[0,:,:,:],1,axis=2)
if Model.Ogrid[2] == 'l':
if np.size(np.shape(data)) == 2:
tmp = np.tile(data,(2,1,1))
tmp[1,:,:] = np.roll(tmp[0,:,:],-1,axis=1)
elif np.size(np.shape(data)) == 3:
tmp = np.tile(data,(2,1,1,1))
tmp[1,:,:,:] = np.roll(tmp[0,:,:,:],-1,axis=2)
if computation == 'mean':
datanew = np.squeeze(np.mean(tmp,axis=0))
elif computation == 'min':
datanew = np.squeeze(np.min(tmp,axis=0))
elif computation == 'max':
datanew = np.squeeze(np.max(tmp,axis=0))
elif Model.Ogrid[0] == 'C':
datanew = tmp
elif ((grid1 == 'T') & (grid2 == 'V')):
# Data won't be masked:
if (Model.Ogrid[0] == 'A'):
datanew = data
elif (Model.Ogrid[0] == 'B'):
if np.size(np.shape(data)) == 2:
tmp = np.tile(data,(4,1,1))
elif np.size(np.shape(data)) == 3:
tmp = np.tile(data,(4,1,1,1))
if (Model.Ogrid[1] == 't'):
if np.size(np.shape(data)) == 2:
tmp[2,:-1,:] = tmp[0,1:,:]
tmp[3,:-1,:] = tmp[0,1:,:]
elif np.size(np.shape(data)) == 3:
tmp[2,:,:-1,:] = tmp[0,:,1:,:]
tmp[3,:,:-1,:] = tmp[0,:,1:,:]
elif (Model.Ogrid[1] == 'b'):
if np.size(np.shape(data)) == 2:
tmp[2,1:,:] = tmp[0,:-1,:]
tmp[3,1:,:] = tmp[0,:-1,:]
elif np.size(np.shape(data)) == 3:
tmp[2,:,1:,:] = tmp[0,:,:-1,:]
tmp[3,:,1:,:] = tmp[0,:,:-1,:]
if (Model.Ogrid[2] == 'r'):
if np.size(np.shape(data)) == 2:
tmp[1,:,:] = np.roll(tmp[0,:,:],-1,axis=1)
tmp[2,:,:] = np.roll(tmp[3,:,:],-1,axis=1)
elif np.size(np.shape(data)) == 3:
tmp[1,:,:,:] = np.roll(tmp[0,:,:,:],-1,axis=2)
tmp[2,:,:,:] = np.roll(tmp[3,:,:,:],-1,axis=2)
elif (Model.Ogrid[2] == 'l'):
if np.size(np.shape(data)) == 2:
tmp[1,:,:] = np.roll(tmp[0,:,:],1,axis=1)
tmp[2,:,:] = np.roll(tmp[3,:,:],1,axis=1)
elif np.size(np.shape(data)) == 3:
tmp[1,:,:,:] = np.roll(tmp[0,:,:,:],1,axis=2)
tmp[2,:,:,:] = np.roll(tmp[3,:,:,:],1,axis=2)
if computation == 'mean':
datanew = np.squeeze(np.mean(tmp,axis=0))
elif computation == 'min':
datanew = np.squeeze(np.min(tmp,axis=0))
elif computation == 'max':
datanew = np.squeeze(np.max(tmp,axis=0))
else:
ncid = Dataset(Model.Omeshmask,'r')
dyt = ncid.variables['dyt'][:,:]
ncid.close()
if ((Model.Ogrid[1] == 'b')):
if np.size(np.shape(data)) == 2:
tmp = np.tile(data,(2,1,1))
dyt = np.tile(dyt,(2,1,1))
tmp[1,1:,:] = tmp[0,:-1,:]
dyt[1,1:,:] = dyt[0,:-1,:]
elif np.size(np.shape(data)) == 3:
tmp = np.tile(data,(2,1,1,1))
dyt = np.tile(dyt,(2,np.size(tmp,1),1,1))
tmp[1,:,1:,:] = tmp[0,:,:-1,:]
dyt[1,:,1:,:] = dyt[0,:,:-1,:]
else:
if np.size(np.shape(data)) == 2:
tmp = np.tile(data,(2,1,1))
dyt = np.tile(dyt,(2,1,1))
tmp[1,:-1,:] = tmp[0,1:,:]
dyt[1,:-1,:] = dyt[0,1:,:]
elif np.size(np.shape(data)) == 3:
tmp = np.tile(data,(2,1,1,1))
dyt = np.tile(dyt,(2,np.size(tmp,1),1,1))
tmp[1,:,:-1,:] = tmp[0,:,1:,:]
dyt[1,:,:-1,:] = dyt[0,:,1:,:]
if computation == 'mean':
datanew = np.squeeze(np.sum(tmp*dyt,axis=0)/np.sum(dyt,axis=0))
elif computation == 'min':
datanew = np.squeeze(np.min(tmp,axis=0))
elif computation == 'max':
datanew = np.squeeze(np.max(tmp,axis=0))
else:
print('Need to code')
return datanew
``` |
{
"source": "jmedina0911/genieparser",
"score": 3
} |
#### File: parser/iosxe/show_ap.py
```python
import re
from genie.metaparser import MetaParser
from genie.metaparser.util.schemaengine import Any, Optional
# ====================
# Schema for:
# * 'show ap summary'
# ====================
class ShowApSummarySchema(MetaParser):
"""Schema for show ap summary."""
schema = {
"ap_neighbor_count": int,
"ap_name": {
str: {
"slots_count": int,
"ap_model": str,
"ethernet_mac": str,
"radio_mac": str,
"location": str,
"ap_ip_address": str,
"state": str
}
}
}
# ====================
# Parser for:
# * 'show ap summary'
# ====================
class ShowApSummary(ShowApSummarySchema):
"""Parser for show ap summary"""
cli_command = 'show ap summary'
def cli(self, output=None):
if output is None:
out = self.device.execute(self.cli_command)
else:
out = output
ap_summary_dict = {}
# Number of APs: 149
#
# AP Name Slots AP Model Ethernet MAC Radio MAC Location Country IP Address State
# -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
# a121-cap22 2 9130AXI a4b2.3291.9b28 2c57.4119.a060 Fab A UK 10.6.33.106 Registered
# a132-cap15 2 9130AXI a4b2.3291.2244 2c57.4120.d2a0 Fab A UK 10.6.32.146 Registered
# a111-cap27 2 9130AXI a4b2.3291.225c 2c57.4120.d360 Fab A UK 10.6.32.118. Registered
# a112-cap11 2 9130AXI a4b2.3291.22d0 2c57.4120.d700 Fab A UK 10.6.33.160 Registered
# a112-cap10 2 9130AXI a4b2.3291.2420 2c57.4120.b180 Fab A UK 10.6.33.102 Registered
# a112-cap17 2 9130AXI a4b2.3291.2434 2c57.4120.b220 Fab A UK 10.6.32.203 Registered
# a112-cap14 2 9130AXI a4b2.3291.2438 2c57.4120.b240 Fab A UK 10.6.32.202 Registered
# a122-cap09 2 9130AXI a4b2.3291.2450 2c57.4120.b300 Fab A UK 10.6.33.133 Registered
# a131-cap43 2 9130AXI a4b2.3291.2454 2c57.4120.b320 Fab A UK 10.6.33.93 Registered
# a122-cap08 2 9130AXI a4b2.3291.2458 2c57.4120.b340 Fab A UK 10.6.32.166 Registered
# Number of APs: 149
ap_neighbor_count_capture = re.compile(r"^Number\s+of\s+APs:\s+(?P<ap_neighbor_count>\d+)")
# a121-cap22 2 9130AXI a4b2.3291.9b28 2c57.4119.a060 Fab A UK 10.6.33.106 Registered
ap_neighbor_info_capture = re.compile(
r"^(?P<ap_name>\S+)\s+(?P<slots_count>\d+)\s+(?P<ap_model>\S+)\s+(?P<ethernet_mac>\S+)\s+(?P<radio_mac>\S+)(?P<location>.*)\s+(?P<ap_ip_address>\d+\.\d+\.\d+\.\d+)\s+(?P<state>(Registered))")
remove_lines = ('AP Name', '----')
# Remove unwanted lines from raw text
def filter_lines(raw_output, remove_lines):
# Remove empty lines
clean_lines = list(filter(None, raw_output.splitlines()))
rendered_lines = []
for clean_line in clean_lines:
clean_line_strip = clean_line.strip()
# Remove lines unwanted lines from list of "remove_lines"
if not clean_line_strip.startswith(remove_lines):
rendered_lines.append(clean_line_strip)
return rendered_lines
out_filter = filter_lines(raw_output=out, remove_lines=remove_lines)
ap_summary_data = {}
for line in out_filter:
# Number of APs: 149
if ap_neighbor_count_capture.match(line):
ap_neighbor_count_match = ap_neighbor_count_capture.match(line)
groups = ap_neighbor_count_match.groupdict()
ap_neighbor_count = int(groups['ap_neighbor_count'])
ap_summary_dict['ap_neighbor_count'] = ap_neighbor_count
# a121-cap22 2 9130AXI a4b2.3291.9b28 2c57.4119.a060 Fab A UK 10.6.33.106 Registered
elif ap_neighbor_info_capture.match(line):
ap_neighbor_info_match = ap_neighbor_info_capture.match(line)
groups = ap_neighbor_info_match.groupdict()
# ap name is the key to place all the ap neighbor info
ap_name = ''
# Loop over all regex matches found
for k, v in groups.items():
# If the key value is ap_name, update the outer ap_name variable with the ap_name regex match
if k == 'ap_name':
ap_name = v
else:
# ap_model can sometimes be a digit e.g., '4800'. This needs to be a string.
if k != 'ap_model' and v.isdigit():
v = int(v)
elif str(v):
# The location value can be any value as a string but need to strip the whitespace
v = v.strip()
if not ap_summary_dict.get("ap_name", {}):
ap_summary_dict["ap_name"] = {}
ap_summary_dict['ap_name'][ap_name] = {}
ap_summary_data.update({k: v})
ap_summary_dict['ap_name'][ap_name].update(ap_summary_data)
ap_summary_data = {}
continue
return ap_summary_dict
# ===============================
# Schema for:
# * 'show ap rf-profile summary'
# ===============================
class ShowApRfProfileSummarySchema(MetaParser):
"""Schema for show ap rf-profile summary."""
schema = {
"rf_profile_summary": {
"rf_profile_count": int,
"rf_profiles": {
str: {
"rf_profile_name": str,
"band": str,
"description": str,
"state": str
}
}
}
}
# ===============================
# Parser for:
# * 'show ap rf-profile summary'
# ===============================
class ShowApRfProfileSummary(ShowApRfProfileSummarySchema):
"""Parser for show ap rf-profile summary"""
cli_command = 'show ap rf-profile summary'
def cli(self, output=None):
if output is None:
out = self.device.execute(self.cli_command)
else:
out = output
rf_profile_summary_dict = {}
# Number of RF-profiles: 14
#
# RF Profile Name Band Description State
# ------------------------------------------------------------------------------------
# Custom-RF_a 5 GHz Custom-RF_a_Desc Up
# Custom-RF_b 2.4 GHz Custom-RF_b_Desc Up
# Low_Client_Density_rf_5gh 5 GHz pre configured Low Client Density rf Up
# High_Client_Density_rf_5gh 5 GHz pre configured High Client Density r Up
# Low-Client-Density-802.11a 5 GHz Up
# Low_Client_Density_rf_24gh 2.4 GHz pre configured Low Client Density rf Up
# High-Client-Density-802.11a 5 GHz Up
# High_Client_Density_rf_24gh 2.4 GHz pre configured High Client Density r Up
# Low-Client-Density-802.11bg 2.4 GHz Up
# High-Client-Density-802.11bg 2.4 GHz Up
# Typical_Client_Density_rf_5gh 5 GHz pre configured Typical Density rfpro Up
# Typical-Client-Density-802.11a 5 GHz Up
# Typical_Client_Density_rf_24gh 2.4 GHz pre configured Typical Client Densit Up
# Typical-Client-Density-802.11bg 2.4 GHz Up
#
# Number of RF-profiles: 14
rf_profile_count_capture = re.compile(r"^Number\s+of\s+RF-profiles:\s+(?P<rf_profile_count>\d+)")
# Custom-RF_a 5 GHz Custom-RF_a_Desc Up
rf_profile_info_capture = re.compile(
r"^(?P<rf_profile_name>\S+)\s+(?P<band>\S+\s+\S+)\s+(?P<description>.*)(?P<state>(Up|Down))")
# RF Profile Name Band Description State
# ------------------------------------------------------------------------------------
rf_profile_data = {}
for line in out.splitlines():
line = line.strip()
# Number of RF-profiles: 14
if rf_profile_count_capture.match(line):
rf_profile_count_match = rf_profile_count_capture.match(line)
groups = rf_profile_count_match.groupdict()
rf_profile_count = int(groups['rf_profile_count'])
if not rf_profile_summary_dict.get('rf_profile_summary', {}):
rf_profile_summary_dict['rf_profile_summary'] = {}
rf_profile_summary_dict['rf_profile_summary']['rf_profile_count'] = rf_profile_count
continue
elif line.startswith('RF Profile Name'):
continue
elif line.startswith('-----'):
continue
# Custom-RF_a 5 GHz Custom-RF_a_Desc Up
elif rf_profile_info_capture.match(line):
rf_profile_info_match = rf_profile_info_capture.match(line)
groups = rf_profile_info_match.groupdict()
rf_profile_name = ''
for k, v in groups.items():
if k == 'rf_profile_name':
rf_profile_name = v
v = v.strip()
if not rf_profile_summary_dict['rf_profile_summary'].get('rf_profiles', {}):
rf_profile_summary_dict['rf_profile_summary']['rf_profiles'] = {}
rf_profile_summary_dict['rf_profile_summary']['rf_profiles'][rf_profile_name] = {}
rf_profile_data.update({k: v})
rf_profile_summary_dict['rf_profile_summary']['rf_profiles'][rf_profile_name].update(rf_profile_data)
rf_profile_data = {}
continue
return rf_profile_summary_dict
# ====================================
# Schema for:
# * 'show ap dot11 dual-band summary'
# ====================================
class ShowApDot11DualBandSummarySchema(MetaParser):
"""Schema for show ap dot11 dual-band summary."""
schema = {
"ap_dot11_dual-band_summary": {
"index": {
int: {
"ap_name": str,
"ap_mac_address": str,
"slot_id": int,
"admin_state": str,
"oper_state": str,
"width": int,
"tx_pwr": str,
"mode": str,
"subband": str,
"channel": str
}
}
}
}
# ====================================
# Parser for:
# * 'show ap dot11 dual-band summary'
# ====================================
class ShowApDot11DualBandSummary(ShowApDot11DualBandSummarySchema):
"""Parser for show ap dot11 dual-band summary"""
cli_command = 'show ap dot11 dual-band summary'
def cli(self, output=None):
if not output:
output = self.device.execute(self.cli_command)
ret_dict = {}
# aa-test-4800 64d8.14ec.1120 0 Enabled Down 20 *1/8 (23 dBm) Local All (6)*
ap_info_capture = re.compile(
r"^(?P<ap_name>\S+)\s+(?P<ap_mac_address>\S+)\s+(?P<slot_id>\d+)\s+(?P<admin_state>(Enabled|Disabled))\s+(?P<oper_state>\S+)\s+(?P<width>\d+)\s+(?P<tx_pwr>(N\/A|\*.*m\)))\s+(?P<mode>\S+)\s+(?P<subband>\S+)\s+(?P<channel>\S+)$")
ap_index = 0
for line in output.splitlines():
line = line.strip()
# aa-test-4800 64d8.14ec.1120 0 Enabled Down 20 *1/8 (23 dBm) Local All (6)*
m = ap_info_capture.match(line)
if m:
groups = m.groupdict()
ap_index += 1
if not ret_dict.get('ap_dot11_dual-band_summary'):
ret_dict['ap_dot11_dual-band_summary'] = {"index": {}}
ret_dict['ap_dot11_dual-band_summary']["index"][ap_index] = {
'ap_name': groups['ap_name'],
'ap_mac_address': groups['ap_mac_address'],
'slot_id': int(groups['slot_id']),
'admin_state': groups['admin_state'],
'oper_state': groups['oper_state'],
'width': int(groups['width']),
'tx_pwr': groups['tx_pwr'],
'mode': groups['mode'],
'subband': groups['subband'],
'channel': groups['channel']
}
return ret_dict
```
#### File: parser/iosxe/show_sdwan_software.py
```python
from genie.libs.parser.viptela.show_software import ShowSoftwaretab as ShowSoftwaretab_viptela
import re
# =====================================
# Parser for 'show sdwan software'
# =====================================
class ShowSdwanSoftware(ShowSoftwaretab_viptela):
""" Parser for "show sdwan software" """
cli_command = 'show sdwan software'
def cli(self, output = None):
if output is None:
show_output = self.device.execute(self.cli_command)
else:
show_output = output
if re.search('Total Space:',show_output):
fin=re.search('Total Space:.*',show_output)
show_output=show_output.replace(fin.group(0),' ')
return super().cli(output = show_output)
```
#### File: parser/junos/show_configuration.py
```python
import re
# Metaparser
from genie.metaparser import MetaParser
from pyats.utils.exceptions import SchemaError
from genie.metaparser.util.schemaengine import Any, Optional, Use, Schema
class ShowConfigurationProtocolsMplsLabelSwitchedPathSchema(MetaParser):
""" Schema for:
* show configuration protocols mpls label-switched-path {path}
"""
schema = {
"configuration": {
"protocols": {
"mpls": {
"label-switched-path": {
"to": str,
"revert-timer": str,
Optional("no-cspf"): bool,
"setup-priority": str,
"reservation-priority": str,
Optional("record"): bool,
Optional("inter-domain"): bool,
"primary": {
"name": str,
}
}
}
}
}
}
class ShowConfigurationProtocolsMplsLabelSwitchedPath(ShowConfigurationProtocolsMplsLabelSwitchedPathSchema):
""" Parser for:
* show configuration protocols mpls label-switched-path {path}
"""
cli_command = 'show configuration protocols mpls label-switched-path {path}'
def cli(self, path, output=None):
if not output:
out = self.device.execute(self.cli_command.format(path=path))
else:
out = output
ret_dict = {}
# to 10.49.194.125;
p1 = re.compile(r'^to +(?P<to>[\S]+);$')
# revert-timer 0;
p2 = re.compile(r'^revert-timer +(?P<revert_timer>[\S]+);$')
# priority 3 3;
p3 = re.compile(r'^priority +(?P<setup_priority>[\S]+) +(?P<reservation_priority>[\S]+);$')
# primary test_path_01;
p4 = re.compile(r'^primary +(?P<primary>[\S]+);$')
# no-cspf;
# record;
# inter-domain;
p5 = re.compile(r'^(?P<flag>[^\s;]+);$')
for line in out.splitlines():
line = line.strip()
# to 10.49.194.125;
m = p1.match(line)
if m:
group = m.groupdict()
path_dict = ret_dict.setdefault('configuration', {})\
.setdefault('protocols', {})\
.setdefault('mpls', {})\
.setdefault('label-switched-path', {})
path_dict['to'] = group.get('to')
# revert-timer 0;
m = p2.match(line)
if m:
group = m.groupdict()
path_dict['revert-timer'] = group.get('revert_timer')
# priority 3 3;
m = p3.match(line)
if m:
group = m.groupdict()
path_dict['setup-priority'] = group.get('setup_priority')
path_dict['reservation-priority'] = group.get('reservation_priority')
# primary test_path_01;
m = p4.match(line)
if m:
group = m.groupdict()
path_dict['primary'] = {
"name": group.get('primary')
}
# no-cspf;
# record;
# inter-domain;
m = p5.match(line)
if m:
group = m.groupdict()
path_dict.update({
v: True for v in group.values()
})
return ret_dict
class ShowConfigurationProtocolsMplsPathSchema(MetaParser):
""" Schema for:
show configuration protocols mpls path {path}
"""
def validate_path_list_schema(value):
if not isinstance(value, list):
raise SchemaError('path list schema is not a list')
path_list_schema = Schema({
'name': str,
'type': str,
})
for item in value:
path_list_schema.validate(item)
return value
schema = {
"configuration": {
"protocols": {
"mpls": {
"path": {
"path-list": Use(validate_path_list_schema)
}
}
}
}
}
class ShowConfigurationProtocolsMplsPath(ShowConfigurationProtocolsMplsPathSchema):
""" Parser for:
* show configuration protocols mpls path {path}
"""
cli_command = 'show configuration protocols mpls path {path}'
def cli(self, path, output=None):
if not output:
out = self.device.execute(self.cli_command.format(path=path))
else:
out = output
ret_dict = {}
# 10.0.0.1 strict;
p1 = re.compile(r'^(?P<name>\S+) +(?P<type>[\S]+);$')
for line in out.splitlines():
line = line.strip()
# 10.0.0.1 strict;
m = p1.match(line)
if m:
group = m.groupdict()
path_list = ret_dict.setdefault('configuration', {})\
.setdefault('protocols', {})\
.setdefault('mpls', {})\
.setdefault('path', {})\
.setdefault('path-list', [])
path_dict = {}
path_dict.update({
k.replace('_', '-'): v for k, v in group.items() if v is not None
})
path_list.append(path_dict)
return ret_dict
```
#### File: parser/junos/show_interface.py
```python
import re
# metaparser
from genie.metaparser import MetaParser
from pyats.utils.exceptions import SchemaError
from genie.metaparser.util.schemaengine import Schema, Any, Optional, Use, Or
# import parser utils
from genie.libs.parser.utils.common import Common
# =======================================================
# Schema for 'show interfaces terse [| match <interface>]
# =======================================================
class ShowInterfacesTerseSchema(MetaParser):
"""Schema for show interfaces terse [| match <interface>]"""
schema = {
Any(): {
'oper_status': str,
Optional('link_state'): str,
Optional('admin_state'): str,
Optional('phys_address'): str,
'enabled': bool,
Optional('protocol'): {
Any():{
Optional(Any()): {
'local': str,
Optional('remote'): str,
},
},
},
}
}
# =======================================================
# Parser for 'show interfaces terse [| match <interface>]
# =======================================================
class ShowInterfacesTerse(ShowInterfacesTerseSchema):
""" Parser for:
- show interfaces terse
- show interfaces {interface} terse
- show interfaces terse {interface}
"""
cli_command = [
'show interfaces terse',
'show interfaces {interface} terse'
]
exclude = [
'duration'
]
def cli(self, interface=None, output=None):
# execute the command
if output is None:
if interface:
cmd = self.cli_command[1]
cmd = cmd.format(interface=interface)
else:
cmd = self.cli_command[0]
out = self.device.execute(cmd)
else:
out = output
ret_dict = {}
# Interface Admin Link Proto Local Remote
# lo0.0 up up inet 10.1.1.1 --> 0/0
# em1.0 up up inet 10.0.0.4/8
# fxp0 up up
p1 = re.compile(r'^(?P<interface>\S+) +(?P<admin_state>\w+) +(?P<link_state>\w+) *'
'(?P<protocol>\S+)? *(?P<local>[\w\.\:\/]+)?( *'
'[\-\>]+? *(?P<remote>[\w\.\:\/]+))?$')
# 172.16.64.1/2
# inet6 fe80::250:56ff:fe82:ba52/64
# 2001:db8:8d82:0:a::4/64
# tnp 0x4
# 10.11.11.11 --> 0/0
p2 = re.compile(r'^((?P<protocol>\S+) +)?(?P<local>((\d+\.[\d\.\/]+)|(\w+\:[\w\:\/]+)|(0x\d+))+)'
' *(([\-\>]+) *(?P<remote>[\w\.\:\/]+))?$')
# multiservice
p3 = re.compile(r'^((?P<protocol>\S+))$')
for line in out.splitlines():
line = line.replace('\t', ' ')
line = line.strip()
if 'show interfaces terse' in line:
continue
# fxp0 up up
# em1.0 up up inet 10.0.0.4/8
# lo0.0 up up inet 10.1.1.1 --> 0/0
m = p1.match(line)
if m:
groups = m.groupdict()
interface = groups['interface']
intf_dict = ret_dict.setdefault(interface, {})
intf_dict.update({'admin_state': groups['admin_state'],
'link_state': groups['link_state'],
'oper_status': groups['link_state'],
'enabled': 'up' in groups['admin_state']})
if groups['protocol']:
protocol = groups['protocol']
pro_dict = intf_dict.setdefault('protocol', {}).setdefault(groups['protocol'], {})
if groups['local']:
pro_dict = pro_dict.setdefault(groups['local'], {})
pro_dict['local'] = groups['local']
if groups['remote']:
pro_dict['remote'] = groups['remote']
continue
# 172.16.64.1/2
# inet6 fe80::250:56ff:fe82:ba52/64
# 2001:db8:8d82:0:a::4/64
# tnp 0x4
# 10.11.11.11 --> 0/0
m = p2.match(line)
if m:
groups = m.groupdict()
try:
protocol = groups['protocol'] or protocol
except Exception:
continue
pro_dict = intf_dict.setdefault('protocol', {}).setdefault(protocol, {}).setdefault(groups['local'], {})
pro_dict['local'] = groups['local']
if groups['remote']:
pro_dict['remote'] = groups['remote']
continue
# multiservice
m = p3.match(line)
if m:
groups = m.groupdict()
protocol = m.groupdict()['protocol']
pro_dict = intf_dict.setdefault('protocol', {}).setdefault(protocol, {})
continue
return ret_dict
class ShowInterfacesTerseMatch(ShowInterfacesTerse):
""" Parser for:
- show interfaces terse | match {interface}
"""
cli_command = 'show interfaces terse | match {interface}'
def cli(self, interface, output=None):
if output is None:
out = self.device.execute(self.cli_command.format(interface=interface))
else:
out = output
return super().cli(output=out)
class ShowInterfacesTerseInterface(ShowInterfacesTerse):
""" Parser for:
- 'show interfaces terse {interface}'
"""
cli_command = 'show interfaces terse {interface}'
def cli(self, interface, output=None):
if output is None:
out = self.device.execute(self.cli_command.format(interface=interface))
else:
out = output
return super().cli(output=out)
class ShowInterfacesDescriptionsSchema(MetaParser):
""" Schema for:
* show interfaces descriptions
"""
def validate_physical_interface_list(value):
if not isinstance(value, list):
raise SchemaError('physical-interface is not a list')
entry_schema = Schema(
{
"admin-status": str,
"description": str,
"name": str,
"oper-status": str
}
)
for item in value:
entry_schema.validate(item)
return value
schema = {
"interface-information": {
"physical-interface": Use(validate_physical_interface_list)
}
}
class ShowInterfacesDescriptions(ShowInterfacesDescriptionsSchema):
""" Parser for:
* show interfaces descriptions
"""
cli_command = 'show interfaces descriptions'
def cli(self, output=None):
if not output:
out = self.device.execute(self.cli_command)
else:
out = output
ret_dict = {}
# Interface Admin Link Description
p1 = re.compile(r'^Interface +Admin +Link +Description$')
# ge-0/0/0 up up none/100G/in/hktGCS002_ge-0/0/0
p2 = re.compile(r'^(?P<name>\S+) +(?P<admin_status>\S+) +(?P<oper_status>\S+) +(?P<description>\S+)$')
for line in out.splitlines():
line = line.strip()
# Interface Admin Link Description
m = p1.match(line)
if m:
continue
# ge-0/0/0 up up none/100G/in/hktGCS002_ge-0/0/0
m = p2.match(line)
if m:
group = m.groupdict()
entry_list = ret_dict.setdefault("interface-information", {}).setdefault("physical-interface", [])
entry = {}
for group_key, group_value in group.items():
entry_key = group_key.replace('_','-')
entry[entry_key] = group_value
entry_list.append(entry)
continue
return ret_dict
class ShowInterfacesSchema(MetaParser):
""" Parser for:
'show interfaces'
"""
# schema = {
# Optional("@xmlns:junos"): str,
# "interface-information": {
# Optional("@junos:style"): str,
# Optional("@xmlns"): str,
# "physical-interface": [
# {
# "active-alarms": {
# "interface-alarms": {
# "alarm-not-present": str,
# "ethernet-alarm-link-down": str
# }
# },
# "active-defects": {
# "interface-alarms": {
# "alarm-not-present": str,
# "ethernet-alarm-link-down": str
# }
# },
# "admin-status": {
# "#text": str,
# Optional("@junos:format"): str
# },
# "bpdu-error": str,
# "clocking": str,
# "current-physical-address": str,
# "description": str,
# "eth-switch-error": str,
# "ethernet-fec-mode": {
# Optional("@junos:style"): str,
# "enabled_fec_mode": str
# },
# "ethernet-fec-statistics": {
# Optional("@junos:style"): str,
# "fec_ccw_count": str,
# "fec_ccw_error_rate": str,
# "fec_nccw_count": str,
# "fec_nccw_error_rate": str
# },
# "ethernet-pcs-statistics": {
# Optional("@junos:style"): str,
# "bit-error-seconds": str,
# "errored-blocks-seconds": str
# },
# "hardware-physical-address": str,
# "if-auto-negotiation": str,
# "if-config-flags": str,
# "if-device-flags": {
# "ifdf-present": str,
# "ifdf-running": str
# },
# "if-flow-control": str,
# "if-media-flags": {
# "ifmf-none": str
# },
# "if-remote-fault": str,
# "if-type": str,
# "ifd-specific-config-flags": {
# "internal-flags": str
# },
# "interface-flapped": {
# "#text": str,
# Optional("@junos:seconds"): str
# },
# "interface-transmit-statistics": str,
# "l2pt-error": str,
# "ld-pdu-error": str,
# "link-level-type": str,
# "link-type": str,
# "local-index": str,
# "logical-interface": {
# "address-family": [
# {
# "address-family-flags": {
# "ifff-is-primary": str,
# "ifff-no-redirects": str,
# "ifff-none": str,
# "ifff-sendbcast-pkt-to-re": str,
# "internal-flags": str
# },
# "address-family-name": str,
# "interface-address": {
# "ifa-broadcast": str,
# "ifa-destination": str,
# "ifa-flags": {
# "ifaf-current-preferred": str,
# "ifaf-current-primary": str
# },
# "ifa-local": str
# },
# "intf-curr-cnt": str,
# "intf-dropcnt": str,
# "intf-unresolved-cnt": str,
# "max-local-cache": str,
# "maximum-labels": str,
# "mtu": str,
# "new-hold-limit": str
# }
# ],
# "encapsulation": str,
# "filter-information": str,
# "if-config-flags": {
# "iff-snmp-traps": str,
# "iff-up": str,
# "internal-flags": str
# },
# "local-index": str,
# "logical-interface-bandwidth": str,
# "name": str,
# "policer-overhead": str,
# "snmp-index": str,
# "traffic-statistics": {
# Optional("@junos:style"): str,
# "input-packets": str,
# "output-packets": str
# }
# },
# "loopback": str,
# "mru": str,
# "mtu": str,
# "name": str,
# "oper-status": str,
# "pad-to-minimum-frame-size": str,
# "physical-interface-cos-information": {
# "physical-interface-cos-hw-max-queues": str,
# "physical-interface-cos-use-max-queues": str
# },
# "snmp-index": str,
# "sonet-mode": str,
# "source-filtering": str,
# "speed": str,
# "traffic-statistics": {
# Optional("@junos:style"): str,
# "input-bps": str,
# "input-packets": str,
# "input-pps": str,
# "output-bps": str,
# "output-packets": str,
# "output-pps": str
# }
# }
# ]
# }
# }
def verify_physical_interface_list(value):
# Pass physical-interface list of dict in value
if not isinstance(value, list):
raise SchemaError('physical interface is not a list')
def verify_logical_interface_list(value):
# Pass address-family list of dict in value
if not isinstance(value, list):
raise SchemaError('logical-interface is not a list')
def verify_address_family_list(value):
# Pass address-family list of dict in value
if not isinstance(value, list):
raise SchemaError('address-family is not a list')
def verify_interface_address_list(value):
# Pass physical-interface list of dict in value
if not isinstance(value, list) and not isinstance(value, dict):
raise SchemaError('interface-address is not a list/dict')
interface_address_schema = Schema({
Optional("ifa-broadcast"): str,
Optional("ifa-destination"): str,
"ifa-flags": {
Optional("ifaf-current-preferred"): bool,
Optional("ifaf-current-primary"): bool,
Optional("ifaf-is-primary"): bool,
Optional("ifaf-is-preferred"): bool,
Optional("ifaf-kernel"): bool,
Optional("ifaf-preferred"): bool,
Optional("ifaf-primary"): bool,
Optional("ifaf-is-default"): bool,
Optional("ifaf-none"): bool,
Optional("ifaf-dest-route-down"): bool,
},
Optional("ifa-local"): str
})
# Validate each dictionary in list
if isinstance(value, dict):
value = [value]
for item in value:
interface_address_schema.validate(item)
return value
af_schema = Schema({
Optional("address-family-flags"): {
Optional("ifff-is-primary"): bool,
Optional("ifff-no-redirects"): bool,
Optional("ifff-none"): bool,
Optional("ifff-sendbcast-pkt-to-re"): bool,
Optional("internal-flags"): bool,
Optional("ifff-primary"): bool,
Optional("ifff-receive-ttl-exceeded"): bool,
Optional("ifff-receive-options"): bool,
Optional("ifff-encapsulation"): str,
},
"address-family-name": str,
Optional("interface-address"): Use(verify_interface_address_list),
Optional("intf-curr-cnt"): str,
Optional("intf-dropcnt"): str,
Optional("intf-unresolved-cnt"): str,
Optional("generation"): str,
Optional("route-table"): str,
Optional("max-local-cache"): str,
Optional("maximum-labels"): str,
"mtu": str,
Optional("new-hold-limit"): str
})
# Validate each dictionary in list
for item in value:
af_schema.validate(item)
return value
l_i_schema = Schema({
Optional("address-family"): Use(verify_address_family_list),
Optional("encapsulation"): str,
Optional("filter-information"): str,
"if-config-flags": {
"iff-snmp-traps": bool,
"iff-up": bool,
Optional("internal-flags"): str
},
"local-index": str,
Optional("logical-interface-bandwidth"): str,
"name": str,
Optional("policer-overhead"): str,
Optional("snmp-index"): str,
Optional("traffic-statistics"): {
Optional("@junos:style"): str,
"input-packets": str,
Optional("input-bytes"): str,
"output-packets": str,
Optional("output-bytes"): str,
Optional("ipv6-transit-statistics"): {
"input-bytes": str,
"input-packets": str,
"output-bytes": str,
"output-packets": str,
},
},
Optional("transit-traffic-statistics"): {
"input-bps": str,
"input-bytes": str,
"input-packets": str,
"input-pps": str,
Optional("ipv6-transit-statistics"): {
"input-bps": str,
"input-bytes": str,
"input-packets": str,
"input-pps": str,
"output-bps": str,
"output-bytes": str,
"output-packets": str,
"output-pps": str
},
"output-bps": str,
"output-bytes": str,
"output-packets": str,
"output-pps": str
}
})
# Validate each dictionary in list
for item in value:
l_i_schema.validate(item)
return value
def verify_queue_list(value):
# Pass address-family list of dict in value
if not isinstance(value, list):
raise SchemaError('queue is not a list')
queue_schema = Schema({
"queue-counters-queued-packets": str,
"queue-counters-total-drop-packets": str,
"queue-counters-trans-packets": str,
"queue-number": str
})
# Validate each dictionary in list
for item in value:
queue_schema.validate(item)
return value
# Create physical-interface Schema
physical_interface_schema = Schema({
Optional("active-alarms"): {
Optional("interface-alarms"): {
Optional("alarm-not-present"): bool,
Optional("ethernet-alarm-link-down"): bool,
}
},
Optional("active-defects"): {
Optional("interface-alarms"): {
Optional("alarm-not-present"): bool,
Optional("ethernet-alarm-link-down"): bool
}
},
Optional("admin-status"): {
Optional("#text"): str,
Optional("@junos:format"): str
},
Optional("bpdu-error"): str,
Optional("clocking"): str,
Optional("current-physical-address"): str,
Optional("description"): str,
Optional("eth-switch-error"): str,
Optional("ethernet-fec-mode"): {
Optional("@junos:style"): str,
"enabled_fec_mode": str
},
Optional("ethernet-fec-statistics"): {
Optional("@junos:style"): str,
"fec_ccw_count": str,
"fec_ccw_error_rate": str,
"fec_nccw_count": str,
"fec_nccw_error_rate": str
},
Optional("ethernet-pcs-statistics"): {
Optional("@junos:style"): str,
"bit-error-seconds": str,
"errored-blocks-seconds": str
},
Optional("hardware-physical-address"): str,
Optional("if-config-flags"): {
Optional("internal-flags"): str,
"iff-snmp-traps": bool,
Optional("iff-hardware-down"): bool,
},
Optional("if-auto-negotiation"): str,
"if-device-flags": {
"ifdf-present": bool,
"ifdf-running": bool,
Optional("ifdf-loopback"): bool,
Optional("ifdf-down"): bool,
},
Optional("if-flow-control"): str,
Optional("if-media-flags"): {
"ifmf-none": bool
},
Optional("if-remote-fault"): str,
Optional("if-type"): str,
Optional("ifd-specific-config-flags"): {
Optional("internal-flags"): str
},
Optional("interface-flapped"): {
"#text": str,
Optional("@junos:seconds"): str
},
Optional("interface-transmit-statistics"): str,
Optional("l2pt-error"): str,
Optional("ld-pdu-error"): str,
Optional("link-level-type"): str,
Optional("link-type"): str,
Optional("link-mode"): str,
Optional("local-index"): str,
Optional("logical-interface"): Use(verify_logical_interface_list),
Optional("loopback"): str,
Optional("lsi-traffic-statistics"): {
Optional("@junos:style"): str,
"input-bps": str,
"input-bytes": str,
"input-packets": str,
"input-pps": str
},
Optional("mru"): str,
Optional("mtu"): str,
"name": str,
Optional("oper-status"): str,
Optional("pad-to-minimum-frame-size"): str,
Optional("physical-interface-cos-information"): {
"physical-interface-cos-hw-max-queues": str,
"physical-interface-cos-use-max-queues": str
},
Optional("snmp-index"): str,
Optional("sonet-mode"): str,
Optional("source-filtering"): str,
Optional("speed"): str,
Optional("stp-traffic-statistics"): {
Optional("@junos:style"): str,
Optional("stp-input-bytes-dropped"): str,
Optional("stp-input-packets-dropped"): str,
Optional("stp-output-bytes-dropped"): str,
Optional("stp-output-packets-dropped"): str
},
Optional("traffic-statistics"): {
Optional("@junos:style"): str,
Optional("input-bps"): str,
Optional("output-bytes"): str,
Optional("input-bytes"): str,
Optional("input-packets"): str,
Optional("input-pps"): str,
Optional("output-bps"): str,
Optional("output-packets"): str,
Optional("output-pps"): str,
Optional("ipv6-transit-statistics"): {
Optional("input-bps"): str,
Optional("input-bytes"): str,
Optional("input-packets"): str,
Optional("input-pps"): str,
Optional("output-bps"): str,
Optional("output-bytes"): str,
Optional("output-packets"): str,
Optional("output-pps"): str
},
},
Optional("output-error-list"): {
Optional("aged-packets"): str,
Optional("carrier-transitions"): str,
Optional("hs-link-crc-errors"): str,
Optional("mtu-errors"): str,
Optional("output-collisions"): str,
Optional("output-drops"): str,
Optional("output-errors"): str,
Optional("output-fifo-errors"): str,
Optional("output-resource-errors"): str
},
Optional("ethernet-mac-statistics"): {
Optional("@junos:style"): str,
"input-broadcasts": str,
"input-bytes": str,
"input-code-violations": str,
"input-crc-errors": str,
"input-fifo-errors": str,
"input-fragment-frames": str,
"input-jabber-frames": str,
"input-mac-control-frames": str,
"input-mac-pause-frames": str,
"input-multicasts": str,
"input-oversized-frames": str,
"input-packets": str,
Optional("input-total-errors"): str,
"input-unicasts": str,
"input-vlan-tagged-frames": str,
"output-broadcasts": str,
"output-bytes": str,
"output-crc-errors": str,
"output-fifo-errors": str,
"output-mac-control-frames": str,
"output-mac-pause-frames": str,
"output-multicasts": str,
"output-packets": str,
Optional("output-total-errors"): str,
"output-unicasts": str
},
Optional("input-error-list"): {
Optional("framing-errors"): str,
Optional("input-discards"): str,
Optional("input-drops"): str,
Optional("input-errors"): str,
Optional("input-fifo-errors"): str,
Optional("input-giants"): str,
Optional("input-l2-channel-errors"): str,
Optional("input-l2-mismatch-timeouts"): str,
Optional("input-l3-incompletes"): str,
Optional("input-resource-errors"): str,
Optional("input-runts"): str
},
Optional("transit-traffic-statistics"): {
"input-bps": str,
"input-bytes": str,
"input-packets": str,
"input-pps": str,
Optional("ipv6-transit-statistics"): {
"input-bps": str,
"input-bytes": str,
"input-packets": str,
"input-pps": str,
"output-bps": str,
"output-bytes": str,
"output-packets": str,
"output-pps": str
},
"output-bps": str,
"output-bytes": str,
"output-packets": str,
"output-pps": str
},
Optional("queue-counters"): {
Optional("@junos:style"): str,
"interface-cos-short-summary": {
"intf-cos-num-queues-in-use": str,
"intf-cos-num-queues-supported": str,
},
"queue": Use(verify_queue_list)
},
})
# Validate each dictionary in list
for item in value:
physical_interface_schema.validate(item)
return value
schema = {
Optional("@xmlns:junos"): str,
"interface-information": {
Optional("@junos:style"): str,
Optional("@xmlns"): str,
"physical-interface": Use(verify_physical_interface_list)
}
}
class ShowInterfaces(ShowInterfacesSchema):
cli_command = ['show interfaces']
def cli(self, output=None):
if not output:
out = self.device.execute(self.cli_command[0])
else:
out = output
ret_dict = {}
statistics_type = None
# Physical interface: ge-0/0/0, Enabled, Physical link is Up
p1 = re.compile(r'^Physical +interface: +(?P<name>\S+), +'
r'(?P<admin_status>\S+), +Physical +link +is +(?P<oper_status>\S+)$')
# Interface index: 148, SNMP ifIndex: 526
p2 = re.compile(r'^Interface +index: +(?P<local_index>\d+), +'
r'SNMP +ifIndex: +(?P<snmp_index>\d+)'
r'(, +Generation: +\S+)$')
# Description: none/100G/in/hktGCS002_ge-0/0/0
p3 = re.compile(r'^Description: +(?P<description>\S+)$')
# Link-level type: Ethernet, MTU: 1514, MRU: 1522, LAN-PHY mode, Speed: 1000mbps, BPDU Error: None,
# Link-level type: Ethernet, MTU: 1514, Link-mode: Full-duplex, Speed: 1000mbps,
p4 = re.compile(r'^(Type: +\S+, )?Link-level +type: +'
r'(?P<link_level_type>\S+), +MTU: +(?P<mtu>\S+)'
r'(, +MRU: +(?P<mru>\d+))?(, +(?P<sonet_mode>\S+) +mode)?'
r'(, +Link-mode: +(?P<link_mode>\S+))?'
r'(, +Speed: +(?P<speed>\S+))?(, +BPDU +Error: +'
r'(?P<bpdu_error>\S+),)?$')
# Speed: 1000mbps, BPDU Error: None, Loop Detect PDU Error: None,
p4_1 = re.compile(r'^(Speed: +(?P<speed>[^\s,]+))(, +)?'
r'(BPDU +Error: +(?P<bpdu_error>[^\s,]+))?(, +)?'
r'(Loop +Detect +PDU +Error: +(?P<ld_pdu_error>[^\s,]+))?(, +)?')
# Link-level type: Ethernet, MTU: 1514, MRU: 1522, LAN-PHY mode, Speed: 1000mbps, BPDU Error: None, Loop Detect PDU Error: None, Ethernet-Switching Error: None, MAC-REWRITE Error: None,
p4_2 = re.compile(r'^Link-level +type: +(?P<link_level_type>\S+), +MTU: +(?P<mtu>\S+)'
r'(, +MRU: +(?P<mru>\d+))?(, +(?P<sonet_mode>\S+) +mode)?'
r'(, +Speed: +(?P<speed>\S+))?(, +BPDU +Error: +(?P<bpdu_error>\S+),)?'
r'( +Loop +Detect +PDU +Error: +(?P<ld_pdu_error>\S+),)?'
r'( +Ethernet-Switching +Error: +(?P<eth_switch_error>\S+),)?'
r'( +MAC-REWRITE +Error: +\S+)?$')
# Loop Detect PDU Error: None, Ethernet-Switching Error: None, MAC-REWRITE Error: None, Loopback: Disabled,
p5 = re.compile(r'^Loop +Detect +PDU +Error: +(?P<ld_pdu_error>\S+), +'
r'Ethernet-Switching +Error: +(?P<eth_switch_error>\S+), +MAC-REWRITE +'
r'Error: +\S+, +Loopback: +(?P<loopback>\S+),$')
# Ethernet-Switching Error: None, MAC-REWRITE Error: None, Loopback: Disabled,
p5_1 = re.compile(r'^(Ethernet-Switching +Error: +(?P<eth_switch_error>[^\s,]+))'
r'(, +)?(MAC-REWRITE +Error: +[^\s,]+)?(, +)?'
r'(Loopback: +(?P<loopback>[^\s,]+))(, +)?')
# Loopback: Disabled, Source filtering: Disabled, Flow control: Enabled, Auto-negotiation: Enabled, Remote fault: Online
p5_2 = re.compile(r'^(Loopback: +(?P<loopback>\S+),)?'
r'( +Source +filtering: +(?P<source_filtering>\S+),)?'
r'( +Flow +control: +(?P<if_flow_control>\S+),)?'
r'( +Auto-negotiation: +(?P<if_auto_negotiation>\S+),)?'
r'( +Remote +fault: +(?P<if_remote_fault>\S+))$')
# Source filtering: Disabled, Flow control: Enabled, Auto-negotiation: Enabled, Remote fault: Online
p6 = re.compile(r'^Source +filtering: +(?P<source_filtering>\S+), +'
r'Flow +control: +(?P<if_flow_control>\S+), +'
r'Auto-negotiation: +(?P<if_auto_negotiation>\S+), +'
r'Remote +fault: +(?P<if_remote_fault>\S+)$')
# Pad to minimum frame size: Disabled
p7 = re.compile(r'^Pad +to +minimum +frame +size: +'
r'(?P<pad_to_minimum_frame_size>\S+)$')
# Device flags : Present Running
p8 = re.compile(r'^Device +flags +: +(?P<if_device_flags>[\S\s]+)$')
# Interface flags: SNMP-Traps Internal: 0x4000
p9 = re.compile(r'^Interface +flags:( +(?P<hardware_down>Hardware-Down))? +'
r'(?P<iff_snmp_traps>\S+)( +Internal: +(?P<internal_flags>\S+))?$')
# Link flags : None
p10 = re.compile(r'^Link +flags +: +(?P<if_media_flags>\S+)$')
# Link type : Full-Duplex
p10_1 = re.compile(r'^Link +type +: +(?P<link_type>\S+)$')
# CoS queues : 8 supported, 8 maximum usable queues
p11 = re.compile(r'^CoS +queues +: +(?P<physical_interface_cos_hw_max_queues>\d+) +'
r'supported, +(?P<physical_interface_cos_use_max_queues>\d+) maximum +'
r'usable +queues$')
# Current address: 00:50:56:ff:56:b6, Hardware address: 00:50:56:ff:56:b6
p12 = re.compile(r'^Current +address: +(?P<current_physical_address>\S+), +'
r'Hardware +address: +(?P<hardware_physical_address>\S+)$')
# Last flapped : 2019-08-29 09:09:19 UTC (29w6d 18:56 ago)
p13 = re.compile(r'^Last +flapped +: +(?P<interface_flapped>[\S\s]+)$')
# Input rate : 2952 bps (5 pps)
p14 = re.compile(r'^Input +rate +: +(?P<input_bps>\d+) +'
r'bps +\((?P<input_pps>\d+) +pps\)$')
# Input bytes : 19732539397 3152 bps
p14_1 = re.compile(r'^Input +bytes *: +(?P<input_bytes>\S+)'
r'( +(?P<input_bps>\S+) +bps)?$')
# Output bytes : 16367814635 3160 bps
p14_2 = re.compile(r'^Output +bytes *: +(?P<output_bytes>\S+)'
r'( +(?P<output_bps>\S+) +bps)?$')
# Input packets: 133726363 5 pps
p14_3 = re.compile(r'^Input +packets *: +(?P<input_packets>\S+)'
r'( +(?P<input_pps>\S+) +pps)?$')
# Output packets: 129306863 4 pps
p14_4 = re.compile(r'^Output +packets *: +(?P<output_packets>\S+)'
r'( +(?P<output_pps>\S+) +pps)?$')
# Output rate : 3080 bps (3 pps)
p15 = re.compile(r'^Output +rate +: +(?P<output_bps>\d+) +'
r'bps +\((?P<output_pps>\d+) +pps\)$')
# Active alarms : None
p16 = re.compile(r'^Active +alarms *: +(?P<active_alarms>\S+)$')
# Active defects : None
p17 = re.compile(r'^Active +defects *: +(?P<active_defects>\S+)$')
# PCS statistics Seconds
p18 = re.compile(r'^PCS +statistics +Seconds$')
# Bit errors 0
p19 = re.compile(r'^Bit +errors +(?P<bit_error_seconds>\d+)$')
# Errored blocks 0
p20 = re.compile(r'^Errored +blocks +(?P<errored_blocks_seconds>\d+)$')
# Ethernet FEC statistics Errors
p21 = re.compile(r'^Ethernet +FEC +statistics +Errors$')
# FEC Corrected Errors 0
# FEC Uncorrected Errors 0
# FEC Corrected Errors Rate 0
# FEC Uncorrected Errors Rate 0
p22 = re.compile(r'^FEC +Corrected +Errors +(?P<fec_ccw_count>\d+)$')
p22_1 = re.compile(r'^FEC +Uncorrected +Errors +(?P<fec_nccw_count>\d+)$')
p22_2 = re.compile(r'^FEC +Corrected +Errors +Rate +(?P<fec_ccw_error_rate>\d+)$')
p22_3 = re.compile(r'^FEC +Uncorrected +Errors +Rate +(?P<fec_nccw_error_rate>\d+)$')
# Interface transmit statistics: Disabled
p23 = re.compile(r'^Interface +transmit +statistics: +'
r'(?P<interface_transmit_statistics>\S+)$')
# Logical interface ge-0/0/0.0 (Index 333) (SNMP ifIndex 606)
p24 = re.compile(r'^Logical +interface +(?P<name>\S+) +'
r'\(Index +(?P<local_index>\d+)\) +\(SNMP +ifIndex +'
r'(?P<snmp_index>\d+)\)( +\(Generation +\S+\))?$')
# Flags: Up SNMP-Traps 0x4004000 Encapsulation: ENET2
# Flags: Up SNMP-Traps 0x4000 VLAN-Tag [ 0x8100.1 ] Encapsulation: ENET2
p25 = re.compile(r'^Flags: +(?P<iff_up>\S+)( +SNMP-Traps)?'
r'( +(?P<internal_flags>\S+))?( +VLAN-Tag +\[[\S\s]+\])? +'
r'Encapsulation: +(?P<encapsulation>\S+)$')
# Input packets : 133657033
p26 = re.compile(r'^Input +packets *: +(?P<input_packets>\S+)$')
# Output packets: 129243982
p27 = re.compile(r'^Output +packets *: +(?P<output_packets>\S+)$')
# Protocol inet, MTU: 1500, Maximum labels: 2
# Protocol inet, MTU: 1500, Generation: 150, Route table: 0
p28 = re.compile(r'^Protocol +(?P<address_family_name>\S+), +'
r'MTU: +(?P<mtu>\S+)(, +Maximum labels: +'
r'(?P<maximum_labels>\S+))?(, +Generation: +'
r'(?P<generation>\S+))?(, +Route table: +'
r'(?P<route_table>\S+))?$')
# Max nh cache: 75000, New hold nh limit: 75000, Curr nh cnt: 1, Curr new hold cnt: 0, NH drop cnt: 0
p30 = re.compile(r'^Max +nh +cache: +(?P<max_local_cache>\d+), +'
r'New +hold +nh +limit: +(?P<new_hold_limit>\d+)'
r', Curr +nh +cnt: +(?P<intf_curr_cnt>\d+), +'
r'Curr +new +hold +cnt: +(?P<intf_unresolved_cnt>\d+)'
r', +NH +drop +cnt: +(?P<intf_dropcnt>\d+)$')
# Flags: No-Redirects, Sendbcast-pkt-to-re
p31 = re.compile(r'^Flags: +(?P<flags>[\S\s]+)')
# Addresses, Flags: Is-Preferred Is-Primary
p32 = re.compile(r'^Addresses, +Flags: +(?P<flags>[\S\s]+)$')
# Destination: 10.189.5.92/30, Local: 10.189.5.93, Broadcast: 10.189.5.95
p33 = re.compile(r'^Destination: +(?P<ifa_destination>\S+)'
r', +Local: +(?P<ifa_local>\S+)'
r'(, +Broadcast: +(?P<ifa_broadcast>\S+))?$')
# Bandwidth: 0
p34 = re.compile(r'^Bandwidth: +(?P<logical_interface_bandwidth>\S+)$')
# Local: fe80::250:560f:fc8d:7c08
p35 = re.compile(r'^Local: +(?P<ifa_local>\S+)$')
# IPv6 transit statistics:
p36 = re.compile(r'^IPv6 +transit +statistics:$')
# Dropped traffic statistics due to STP State:
p37 = re.compile(r'^Dropped +traffic +statistics +due +to +'
r'STP +State:$')
# Transit statistics:
p38 = re.compile(r'^Transit +statistics:$')
# Hold-times : Up 2000 ms, Down 0 ms
p39 = re.compile(r'^Hold-times +: +Up +\d+ +ms, +Down +\d+ +ms$')
# Damping : half-life: 0 sec, max-suppress: 0 sec, reuse: 0, suppress: 0, state: unsuppressed
p40 = re.compile(r'^Damping +: +half-life: +\d+ +sec, +max-suppress: +'
r'\d+ +sec, +reuse: +\d+, +suppress: +\d+, +state: +\S+$')
# Input errors:
p41 = re.compile(r'^Input +errors:$')
# Output errors:
p42 = re.compile(r'^Output +errors:$')
# Errors: 0, Drops: 0, Framing errors: 0, Runts: 0, Policed discards: 0, L3 incompletes: 0, L2 channel errors: 0,
# Errors: 0, Drops: 0, Framing errors: 0, Runts: 0, Policed discards: 0, L3 incompletes: 0, L2 channel errors: 0, L2 mismatch timeouts: 0, FIFO errors: 0, Resource errors: 0
p43_1 = re.compile(r'^Errors: +(?P<input_errors>\d+), +'
r'Drops: +(?P<input_drops>\d+), +Framing +errors: +(?P<framing_errors>\d+), +'
r'Runts: +(?P<input_runts>\d+), Policed +discards: +(?P<input_discards>\d+),'
r'( +L3 +incompletes: +(?P<input_l3_incompletes>\d+), +'
r'L2 +channel +errors: +(?P<input_l2_channel_errors>\d+),)?'
r'( +L2 +mismatch +timeouts: +(?P<input_l2_mismatch_timeouts>\d+),?)?'
r'( +FIFO +errors: +(?P<input_fifo_errors>\d+),?)?'
r'( +Resource +errors: +(?P<input_resource_errors>\d+))?$')
# L2 mismatch timeouts: 0, FIFO errors: 0, Resource errors: 0
p43_2 = re.compile(r'^L2 +mismatch +timeouts: +'
r'(?P<input_l2_mismatch_timeouts>\d+), +FIFO +errors: +'
r'(?P<input_fifo_errors>\d+), +Resource +errors: +'
r'(?P<input_resource_errors>\d+)')
# Carrier transitions: 1, Errors: 0, Drops: 0, Collisions: 0, Aged packets: 0, FIFO errors: 0, HS link CRC errors: 0,
# Carrier transitions: 0, Errors: 0, Drops: 0, Collisions: 0, Aged packets: 0,
# Carrier transitions: 0, Errors: 0, Drops: 0, Collisions: 0, Aged packets: 0, FIFO errors: 0, HS link CRC errors: 0, MTU errors: 0, Resource errors: 0
p44_1 = re.compile(r'^Carrier +transitions: +(?P<carrier_transitions>\d+), +'
r'Errors: +(?P<output_errors>\d+), +Drops: +(?P<output_drops>\d+), +'
r'Collisions: +(?P<output_collisions>\d+), +Aged+ packets: +'
r'(?P<aged_packets>\d+),( +FIFO +errors: +(?P<output_fifo_errors>\d+), +'
r'HS +link +CRC +errors: +(?P<hs_link_crc_errors>\d+),)?'
r'( +MTU +errors: +(?P<mtu_errors>\d+),?)?'
r'( +Resource +errors: +(?P<output_resource_errors>\d+))?$')
# MTU errors: 0, Resource errors: 0
p44_2 = re.compile(r'^MTU +errors: +(?P<mtu_errors>\d+), +Resource +'
r'errors: +(?P<output_resource_errors>\d+)$')
# Total octets 21604601324 16828244544
p45 = re.compile(r'^Total +octets +(?P<input_bytes>\d+) +'
r'(?P<output_bytes>\d+)$')
# MAC statistics: Receive Transmit
p45_1 = re.compile(r'^MAC +statistics: +Receive +Transmit$')
# Total packets 133726919 129183374
p46 = re.compile(r'^Total +packets +(?P<input_packets>\d+) +'
r'(?P<output_packets>\d+)')
# Unicast packets 133726908 129183361
p47 = re.compile(r'^Unicast +packets +(?P<input_unicasts>\d+) +'
r'(?P<output_unicasts>\d+)$')
# Broadcast packets 0 0
p48 = re.compile(r'^Broadcast +packets +(?P<input_broadcasts>\d+) +'
r'(?P<output_broadcasts>\d+)$')
# Multicast packets 0 0
p49 = re.compile(r'^Multicast +packets +(?P<input_multicasts>\d+) +'
r'(?P<output_multicasts>\d+)$')
# CRC/Align errors 0 0
p50 = re.compile(r'^CRC\/Align +errors +(?P<input_crc_errors>\d+) +'
r'(?P<output_crc_errors>\d+)$')
# FIFO errors 0 0
p51 = re.compile(r'^FIFO +errors +(?P<input_fifo_errors>\d+) +'
r'(?P<output_fifo_errors>\d+)$')
# MAC control frames 0 0
p52 = re.compile(r'^MAC +control +frames +(?P<input_mac_control_frames>\d+) +'
r'(?P<output_mac_control_frames>\d+)$')
# MAC pause frames 0 0
p53 = re.compile(r'^MAC +pause +frames +(?P<input_mac_pause_frames>\d+) +'
r'(?P<output_mac_pause_frames>\d+)$')
# Oversized frames 0
p54 = re.compile(r'^Oversized +frames +(?P<input_oversized_frames>\d+)$')
# Jabber frames 0
p56 = re.compile(r'^Jabber +frames +(?P<input_jabber_frames>\d+)$')
# Fragment frames 0
p57 = re.compile(r'^Fragment +frames +(?P<input_fragment_frames>\d+)$')
# VLAN tagged frames 0
p58 = re.compile(r'^VLAN +tagged +frames +(?P<input_vlan_tagged_frames>\d+)$')
# Code violations 0
p59 = re.compile(r'^Code +violations +(?P<input_code_violations>\d+)$')
# Total errors 0 0
p60 = re.compile(r'^Total +errors +(?P<input_total_errors>\d+)$')
# Label-switched interface (LSI) traffic statistics:
p61 = re.compile(r'^Label-switched +interface +\(LSI\) +traffic +statistics:$')
# Egress queues: 8 supported, 4 in use
p62 = re.compile(r'^Egress +queues: +(?P<intf_cos_num_queues_supported>\d+) +'
r'supported, +(?P<intf_cos_num_queues_in_use>\d+) +in +use$')
# 0 0 0 0
p63 = re.compile(r'^(?P<queue_number>\d+) +(?P<queue_counters_queued_packets>\d+) +'
r'(?P<queue_counters_trans_packets>\d+) +(?P<drop_packets>\d+)$')
cnt = 0
for line in out.splitlines():
line = line.strip()
cnt += 1
# Physical interface: ge-0/0/0, Enabled, Physical link is Up
m = p1.match(line)
if m:
group = m.groupdict()
statistics_type = 'physical'
interface_info_dict = ret_dict.setdefault('interface-information', {})
physical_interface_list = interface_info_dict.setdefault('physical-interface', [])
physical_interface_dict = {}
physical_interface_dict.update({'name': group['name']})
admin_status = group['admin_status']
admin_status_dict = physical_interface_dict.setdefault('admin-status', {})
admin_status_dict.update({'@junos:format': admin_status})
physical_interface_list.append(physical_interface_dict)
continue
# Interface index: 148, SNMP ifIndex: 526
m = p2.match(line)
if m:
group = m.groupdict()
physical_interface_dict.update({k.replace('_','-'):
v for k, v in group.items() if v is not None})
continue
# Description: none/100G/in/hktGCS002_ge-0/0/0
m = p3.match(line)
if m:
group = m.groupdict()
physical_interface_dict.update({k.replace('_','-'):
v for k, v in group.items() if v is not None})
continue
# Link-level type: Ethernet, MTU: 1514, MRU: 1522, LAN-PHY mode, Speed: 1000mbps, BPDU Error: None,
m = p4.match(line)
if m:
group = m.groupdict()
physical_interface_dict.update({k.replace('_','-'):
v for k, v in group.items() if v is not None})
continue
# Speed: 1000mbps, BPDU Error: None, Loop Detect PDU Error: None,
m = p4_1.match(line)
if m:
group = m.groupdict()
physical_interface_dict.update({k.replace('_','-'):
v for k, v in group.items() if v is not None})
continue
# Link-level type: Ethernet, MTU: 1514, MRU: 1522, LAN-PHY mode, Speed: 1000mbps, BPDU Error: None, Loop Detect PDU Error: None, Ethernet-Switching Error: None, MAC-REWRITE Error: None,
m = p4_2.match(line)
if m:
group = m.groupdict()
physical_interface_dict.update({k.replace('_','-'):
v for k, v in group.items() if v is not None})
continue
# Loop Detect PDU Error: None, Ethernet-Switching Error: None, MAC-REWRITE Error: None, Loopback: Disabled,
m = p5.match(line)
if m:
group = m.groupdict()
physical_interface_dict.update({k.replace('_','-'):
v for k, v in group.items() if v is not None})
continue
# Loopback: Disabled, Source filtering: Disabled, Flow control: Enabled, Auto-negotiation: Enabled, Remote fault: Online
m = p5_2.match(line)
if m:
group = m.groupdict()
physical_interface_dict.update({k.replace('_','-'):
v for k, v in group.items() if v is not None})
continue
# Source filtering: Disabled, Flow control: Enabled, Auto-negotiation: Enabled, Remote fault: Online
m = p6.match(line)
if m:
group = m.groupdict()
physical_interface_dict.update({k.replace('_','-'):
v for k, v in group.items() if v is not None})
continue
# Pad to minimum frame size: Disabled
m = p7.match(line)
if m:
group = m.groupdict()
physical_interface_dict.update({k.replace('_','-'):
v for k, v in group.items() if v is not None})
continue
# Device flags : Present Running
m = p8.match(line)
if m:
group = m.groupdict()
if_device_flags = group['if_device_flags']
if_device_flags_dict = physical_interface_dict.setdefault('if-device-flags', {})
for flag in if_device_flags.split(' '):
key = 'ifdf-{}'.format(flag.lower())
if_device_flags_dict.update({key: True})
continue
# Interface flags: SNMP-Traps Internal: 0x4000
m = p9.match(line)
if m:
group = m.groupdict()
if_config_flags_dict = physical_interface_dict.setdefault('if-config-flags', {})
if_config_flags_dict.update({'iff-snmp-traps': True})
if group['hardware_down']:
if_config_flags_dict.update({'iff-hardware-down': True})
if group['internal_flags']:
if_config_flags_dict.update({'internal-flags': group['internal_flags']})
continue
# Link flags : None
m = p10.match(line)
if m:
group = m.groupdict()
if_media_flags = group['if_media_flags']
if_media_flags_dict = physical_interface_dict.setdefault('if-media-flags', {})
for flag in if_media_flags.split(' '):
key = 'ifmf-{}'.format(flag.lower())
if_media_flags_dict.update({key: True})
continue
# Link type : Full-Duplex
m = p10_1.match(line)
if m:
group = m.groupdict()
physical_interface_dict.update({k.replace('_','-'):
v for k, v in group.items() if v is not None})
continue
# CoS queues : 8 supported, 8 maximum usable queues
m = p11.match(line)
if m:
group = m.groupdict()
cos_dict = physical_interface_dict.setdefault('physical-interface-cos-information', {})
cos_dict.update({k.replace('_','-'):
v for k, v in group.items() if v is not None})
continue
# Current address: 00:50:56:ff:56:b6, Hardware address: 00:50:56:ff:56:b6
m = p12.match(line)
if m:
group = m.groupdict()
physical_interface_dict.update({k.replace('_','-'):
v for k, v in group.items() if v is not None})
continue
# Last flapped : 2019-08-29 09:09:19 UTC (29w6d 18:56 ago)
m = p13.match(line)
if m:
group = m.groupdict()
intf_flapped_dict = physical_interface_dict.setdefault('interface-flapped', {})
intf_flapped_dict.update({'#text': group['interface_flapped']})
continue
# IPv6 transit statistics:
m = p36.match(line)
if m:
group = m.groupdict()
traffic_statistics_dict = traffic_statistics_dict.setdefault('ipv6-transit-statistics', {})
statistics_type = 'ipv6_transit'
continue
# Dropped traffic statistics due to STP State:
m = p37.match(line)
if m:
statistics_type = 'dropped_stp_state'
group = m.groupdict()
traffic_statistics_dict = physical_interface_dict.setdefault('stp-traffic-statistics', {})
continue
# Transit statistics:
m = p38.match(line)
if m:
group = m.groupdict()
if statistics_type == 'physical':
traffic_statistics_dict = physical_interface_dict.setdefault('transit-traffic-statistics', {})
else:
traffic_statistics_dict = logical_interface_dict.setdefault('transit-traffic-statistics', {})
statistics_type = 'transit_statistics'
continue
# Input rate : 2952 bps (5 pps)
m = p14.match(line)
if m:
if statistics_type == 'physical':
traffic_statistics_dict = physical_interface_dict.setdefault('traffic-statistics', {})
elif statistics_type == 'logical':
traffic_statistics_dict = logical_interface_dict.setdefault('traffic-statistics', {})
group = m.groupdict()
traffic_statistics_dict.update({k.replace('_','-'):
v for k, v in group.items() if v is not None})
continue
# Input bytes : 19732539397 3152 bps
m = p14_1.match(line)
if m:
group = m.groupdict()
if statistics_type == 'physical':
traffic_statistics_dict = physical_interface_dict.setdefault('traffic-statistics', {})
elif statistics_type == 'logical':
traffic_statistics_dict = logical_interface_dict.setdefault('traffic-statistics', {})
if statistics_type == 'dropped_stp_state':
traffic_statistics_dict.update({'stp-{}-dropped'.format(k.replace('_','-')):
v for k, v in group.items() if v is not None})
else:
traffic_statistics_dict.update({k.replace('_','-'):
v for k, v in group.items() if v is not None})
continue
# Output bytes : 16367814635 3160 bps
m = p14_2.match(line)
if m:
group = m.groupdict()
if statistics_type == 'physical':
traffic_statistics_dict = physical_interface_dict.setdefault('traffic-statistics', {})
elif statistics_type == 'logical':
traffic_statistics_dict = logical_interface_dict.setdefault('traffic-statistics', {})
if statistics_type == 'dropped_stp_state':
traffic_statistics_dict.update({'stp-{}-dropped'.format(k.replace('_','-')):
v for k, v in group.items() if v is not None})
else:
traffic_statistics_dict.update({k.replace('_','-'):
v for k, v in group.items() if v is not None})
continue
# Input packets: 133726363 5 pps
m = p14_3.match(line)
if m:
group = m.groupdict()
if statistics_type == 'physical':
traffic_statistics_dict = physical_interface_dict.setdefault('traffic-statistics', {})
elif statistics_type == 'logical':
traffic_statistics_dict = logical_interface_dict.setdefault('traffic-statistics', {})
if statistics_type == 'dropped_stp_state':
traffic_statistics_dict.update({'stp-{}-dropped'.format(k.replace('_','-')):
v for k, v in group.items() if v is not None})
else:
traffic_statistics_dict.update({k.replace('_','-'):
v for k, v in group.items() if v is not None})
continue
# Output packets: 129306863 4 pps
m = p14_4.match(line)
if m:
group = m.groupdict()
if statistics_type == 'physical':
traffic_statistics_dict = physical_interface_dict.setdefault('traffic-statistics', {})
elif statistics_type == 'logical':
traffic_statistics_dict = logical_interface_dict.setdefault('traffic-statistics', {})
if statistics_type == 'dropped_stp_state':
traffic_statistics_dict.update({'stp-{}-dropped'.format(k.replace('_','-')):
v for k, v in group.items() if v is not None})
else:
traffic_statistics_dict.update({k.replace('_','-'):
v for k, v in group.items() if v is not None})
continue
# Output rate : 3080 bps (3 pps)
m = p15.match(line)
if m:
group = m.groupdict()
if statistics_type == 'physical':
traffic_statistics_dict = physical_interface_dict.setdefault('traffic-statistics', {})
elif statistics_type == 'logical':
traffic_statistics_dict = logical_interface_dict.setdefault('traffic-statistics', {})
traffic_statistics_dict.update({k.replace('_','-'):
v for k, v in group.items() if v is not None})
continue
# Active alarms : None
m = p16.match(line)
if m:
group = m.groupdict()
active_alarms = group['active_alarms']
active_alarms_dict = physical_interface_dict.setdefault('active-alarms', {})
if active_alarms == 'None':
active_alarms_dict.setdefault('interface-alarms', {}). \
setdefault('alarm-not-present', True)
else:
active_alarms_dict.setdefault('interface-alarms', {}). \
setdefault('ethernet-alarm-link-down', True)
continue
# Active defects : None
m = p17.match(line)
if m:
group = m.groupdict()
active_defects = group['active_defects']
active_defects_dict = physical_interface_dict.setdefault('active-defects', {})
if active_defects == 'None':
active_defects_dict.setdefault('interface-alarms', {}). \
setdefault('alarm-not-present', True)
else:
active_defects_dict.setdefault('interface-alarms', {}). \
setdefault('ethernet-alarm-link-down', True)
continue
# PCS statistics Seconds
m = p18.match(line)
if m:
group = m.groupdict()
statistics_dict = physical_interface_dict.setdefault('ethernet-pcs-statistics', {})
continue
# Bit errors 0
m = p19.match(line)
if m:
group = m.groupdict()
statistics_dict.update({k.replace('_','-'):
v for k, v in group.items() if v is not None})
continue
# Errored blocks 0
m = p20.match(line)
if m:
group = m.groupdict()
statistics_dict.update({k.replace('_','-'):
v for k, v in group.items() if v is not None})
continue
# Ethernet FEC statistics Errors
m = p21.match(line)
if m:
statistics_dict = physical_interface_dict.setdefault('ethernet-fec-statistics', {})
continue
# FEC Corrected Errors 0
m = p22.match(line)
if m:
group = m.groupdict()
statistics_dict.update({k:
v for k, v in group.items() if v is not None})
continue
# FEC Uncorrected Errors 0
m = p22_1.match(line)
if m:
group = m.groupdict()
statistics_dict.update({k:
v for k, v in group.items() if v is not None})
continue
# FEC Corrected Errors Rate 0
m = p22_2.match(line)
if m:
group = m.groupdict()
statistics_dict.update({k:
v for k, v in group.items() if v is not None})
continue
# FEC Uncorrected Errors Rate 0
m = p22_3.match(line)
if m:
group = m.groupdict()
statistics_dict.update({k:
v for k, v in group.items() if v is not None})
continue
# Interface transmit statistics: Disabled
m = p23.match(line)
if m:
group = m.groupdict()
inft_transmit = group['interface_transmit_statistics']
physical_interface_dict.update({'interface-transmit-statistics': inft_transmit})
continue
# Logical interface ge-0/0/0.0 (Index 333) (SNMP ifIndex 606)
m = p24.match(line)
if m:
statistics_type = 'logical'
group = m.groupdict()
logical_interface_dict = {}
logical_interface_list = physical_interface_dict.setdefault('logical-interface', [])
logical_interface_dict.update({k.replace('_','-'):
v for k, v in group.items() if v is not None})
logical_interface_list.append(logical_interface_dict)
continue
# Flags: Up SNMP-Traps 0x4004000 Encapsulation: ENET2
m = p25.match(line)
if m:
group = m.groupdict()
if_config_flags_dict = logical_interface_dict.setdefault('if-config-flags', {})
if_config_flags_dict.update({'iff-up': True})
if_config_flags_dict.update({'iff-snmp-traps': True})
if group['internal_flags']:
if_config_flags_dict.update({'internal-flags': group['internal_flags']})
logical_interface_dict.update({'encapsulation': group['encapsulation']})
continue
# Input packets : 133657033
m = p26.match(line)
if m:
group = m.groupdict()
traffic_statistics_dict.update({k.replace('_','-'):
v for k, v in group.items() if v is not None})
continue
# Output packets: 129243982
m = p27.match(line)
if m:
group = m.groupdict()
traffic_statistics_dict.update({k.replace('_','-'):
v for k, v in group.items() if v is not None})
continue
# Protocol inet, MTU: 1500
# Protocol mpls, MTU: 1488, Maximum labels: 3
m = p28.match(line)
if m:
group = m.groupdict()
address_family_list = logical_interface_dict.setdefault('address-family', [])
address_family_dict = {k.replace('_','-'):
v for k, v in group.items() if v is not None}
address_family_list.append(address_family_dict)
continue
# Max nh cache: 75000, New hold nh limit: 75000, Curr nh cnt: 1, Curr new hold cnt: 0, NH drop cnt: 0
m = p30.match(line)
if m:
group = m.groupdict()
address_family_dict.update({k.replace('_','-'):
v for k, v in group.items() if v is not None})
continue
# Flags: No-Redirects, Sendbcast-pkt-to-re
m = p31.match(line)
if m:
group = m.groupdict()
address_family_flags_dict = address_family_dict.setdefault('address-family-flags', {})
for flag in group['flags'].split(','):
if "encapsulation" in flag.lower():
value = flag.split(":")[-1].strip().lower()
address_family_flags_dict.update({"ifff-encapsulation": value})
else:
key = 'ifff-{}'.format(flag.strip().lower())
address_family_flags_dict.update({key: True})
continue
# Addresses, Flags: Is-Preferred Is-Primary
m = p32.match(line)
if m:
group = m.groupdict()
af_check = address_family_dict.get('interface-address', None)
interface_address_dict = {}
ifa_flags_dict = interface_address_dict.setdefault('ifa-flags', {})
# ifa_flags_dict.update({'ifaf-current-preferred': True})
# ifa_flags_dict.update({'ifaf-current-primary': True})
for flag in group['flags'].split(' '):
key = 'ifaf-{}'.format(flag.lower())
ifa_flags_dict.update({key: True})
if af_check:
if isinstance(af_check, dict):
address_family_dict.update({'interface-address': []})
interface_address_list = address_family_dict['interface-address']
interface_address_list.append(af_check)
interface_address_list.append(interface_address_dict)
else:
interface_address_list.append(interface_address_dict)
else:
address_family_dict.setdefault('interface-address', interface_address_dict)
continue
# Destination: 10.189.5.92/30, Local: 10.189.5.93, Broadcast: 10.189.5.95
m = p33.match(line)
if m:
group = m.groupdict()
interface_address_dict.update({k.replace('_','-'):
v for k, v in group.items() if v is not None})
continue
# Bandwidth: 0
m = p34.match(line)
if m:
group = m.groupdict()
logical_interface_dict.update({k.replace('_','-'):
v for k, v in group.items() if v is not None})
continue
# Local: fe80::250:560f:fc8d:7c08
m = p35.match(line)
if m:
group = m.groupdict()
interface_address_dict.update({k.replace('_','-'):
v for k, v in group.items() if v is not None})
continue
# Input errors:
m = p41.match(line)
if m:
input_error_list_dict = physical_interface_dict.setdefault('input-error-list', {})
continue
# Output errors:
m = p42.match(line)
if m:
output_error_list_dict = physical_interface_dict.setdefault('output-error-list', {})
continue
# Errors: 0, Drops: 0, Framing errors: 0, Runts: 0, Policed discards: 0, L3 incompletes: 0, L2 channel errors: 0,
m = p43_1.match(line)
if m:
group = m.groupdict()
input_error_list_dict.update({k.replace('_','-'):
v for k, v in group.items() if v is not None})
continue
# L2 mismatch timeouts: 0, FIFO errors: 0, Resource errors: 0
m = p43_2.match(line)
if m:
group = m.groupdict()
input_error_list_dict.update({k.replace('_','-'):
v for k, v in group.items() if v is not None})
continue
# Carrier transitions: 1, Errors: 0, Drops: 0, Collisions: 0, Aged packets: 0, FIFO errors: 0, HS link CRC errors: 0,
m = p44_1.match(line)
if m:
group = m.groupdict()
output_error_list_dict.update({k.replace('_','-'):
v for k, v in group.items() if v is not None})
continue
# MTU errors: 0, Resource errors: 0
m = p44_2.match(line)
if m:
group = m.groupdict()
output_error_list_dict.update({k.replace('_','-'):
v for k, v in group.items() if v is not None})
continue
# Total octets 21604601324 16828244544
m = p45.match(line)
if m:
group = m.groupdict()
ethernet_mac_statistics.update({k.replace('_','-'):
v for k, v in group.items() if v is not None})
continue
m = p45_1.match(line)
if m:
ethernet_mac_statistics = physical_interface_dict.setdefault('ethernet-mac-statistics', {})
continue
# Total packets 133726919 129183374
m = p46.match(line)
if m:
group = m.groupdict()
ethernet_mac_statistics.update({k.replace('_','-'):
v for k, v in group.items() if v is not None})
continue
# Unicast packets 133726908 129183361
m = p47.match(line)
if m:
group = m.groupdict()
ethernet_mac_statistics.update({k.replace('_','-'):
v for k, v in group.items() if v is not None})
continue
# Broadcast packets 0 0
m = p48.match(line)
if m:
group = m.groupdict()
ethernet_mac_statistics.update({k.replace('_','-'):
v for k, v in group.items() if v is not None})
continue
# Multicast packets 0 0
m = p49.match(line)
if m:
group = m.groupdict()
ethernet_mac_statistics.update({k.replace('_','-'):
v for k, v in group.items() if v is not None})
continue
# CRC/Align errors 0 0
m = p50.match(line)
if m:
group = m.groupdict()
ethernet_mac_statistics.update({k.replace('_','-'):
v for k, v in group.items() if v is not None})
continue
# FIFO errors 0 0
m = p51.match(line)
if m:
group = m.groupdict()
ethernet_mac_statistics.update({k.replace('_','-'):
v for k, v in group.items() if v is not None})
continue
# MAC control frames 0 0
m = p52.match(line)
if m:
group = m.groupdict()
ethernet_mac_statistics.update({k.replace('_','-'):
v for k, v in group.items() if v is not None})
continue
# MAC pause frames 0 0
m = p53.match(line)
if m:
group = m.groupdict()
ethernet_mac_statistics.update({k.replace('_','-'):
v for k, v in group.items() if v is not None})
continue
# Oversized frames 0
m = p54.match(line)
if m:
group = m.groupdict()
ethernet_mac_statistics.update({k.replace('_','-'):
v for k, v in group.items() if v is not None})
continue
# Jabber frames 0
m = p56.match(line)
if m:
group = m.groupdict()
ethernet_mac_statistics.update({k.replace('_','-'):
v for k, v in group.items() if v is not None})
continue
# Fragment frames 0
m = p57.match(line)
if m:
group = m.groupdict()
ethernet_mac_statistics.update({k.replace('_','-'):
v for k, v in group.items() if v is not None})
continue
# VLAN tagged frames 0
m = p58.match(line)
if m:
group = m.groupdict()
ethernet_mac_statistics.update({k.replace('_','-'):
v for k, v in group.items() if v is not None})
continue
# Code violations 0
m = p59.match(line)
if m:
group = m.groupdict()
ethernet_mac_statistics.update({k.replace('_','-'):
v for k, v in group.items() if v is not None})
continue
# Total errors 0 0
m = p60.match(line)
if m:
group = m.groupdict()
ethernet_mac_statistics.update({k.replace('_','-'):
v for k, v in group.items() if v is not None})
continue
# Label-switched interface (LSI) traffic statistics:
m = p61.match(line)
if m:
statistics_type = 'lsi_traffic_statistics'
traffic_statistics_dict = physical_interface_dict.setdefault('lsi-traffic-statistics', {})
continue
# Egress queues: 8 supported, 4 in use
m = p62.match(line)
if m:
group = m.groupdict()
cos_short_summary_dict = physical_interface_dict.setdefault('queue-counters', {}). \
setdefault('interface-cos-short-summary', {})
cos_short_summary_dict.update({k.replace('_','-'):
v for k, v in group.items() if v is not None})
continue
# 0 0 0 0
m = p63.match(line)
if m:
group = m.groupdict()
queue_list = physical_interface_dict.setdefault('queue-counters', {}). \
setdefault('queue', [])
queue_dict = {}
queue_dict.update({'queue-number': group['queue_number']})
queue_dict.update({'queue-counters-queued-packets': group['queue_counters_queued_packets']})
queue_dict.update({'queue-counters-trans-packets': group['queue_counters_trans_packets']})
queue_dict.update({'queue-counters-total-drop-packets': group['drop_packets']})
queue_list.append(queue_dict)
continue
return ret_dict
class ShowInterfacesExtensive(ShowInterfaces):
cli_command = ['show interfaces extensive',
'show interfaces {interface} extensive']
def cli(self, interface=None, output=None):
if not output:
if interface:
out = self.device.execute(self.cli_command[1].format(
interface=interface
))
else:
out = self.device.execute(self.cli_command[0])
else:
out = output
return super().cli(output=out)
class ShowInterfacesExtensiveNoForwarding(ShowInterfacesExtensive):
cli_command = ['show interfaces extensive no-forwarding']
def cli(self, output=None):
if not output:
out = self.device.execute(self.cli_command[0])
else:
out = output
return super().cli(output=out)
class ShowInterfacesStatisticsSchema(MetaParser):
""" Schema for:
* show interfaces statistics
* show interfaces statistics {interface}
"""
def validate_physical_interface_list(value):
if not isinstance(value, list):
raise SchemaError('physical-interface is not a list')
def validate_logical_interface_list(value):
if not isinstance(value, list):
raise SchemaError('logical-interface is not a list')
def validate_address_family_list(value):
if not isinstance(value, list):
raise SchemaError('address-family is not a list')
def validate_interface_address_list(value):
if not isinstance(value, list):
raise SchemaError('interface-address is not a list')
interface_address_schema = Schema ({
"ifa-flags": {
Optional("ifaf-current-preferred"): bool,
Optional("ifaf-current-primary"): bool,
Optional("ifaf-current-default"): bool,
},
Optional("ifa-destination"): str,
Optional("ifa-local"): str,
Optional("ifa-broadcast"): str,
})
for item in value:
interface_address_schema.validate(item)
return value
address_family_schema = Schema({
"address-family-name": str,
"mtu": str,
Optional("address-family-flags"): {
Optional("ifff-is-primary"): bool,
Optional("ifff-sendbcast-pkt-to-re"): bool,
},
Optional("interface-address"): Use(validate_interface_address_list),
})
for item in value:
address_family_schema.validate(item)
return value
logical_interface_schema = Schema (
{
"name": str,
Optional("local-index"): str,
Optional("snmp-index"): str,
Optional("if-config-flags"): {
"iff-snmp-traps": bool,
"internal-flags": str,
},
Optional("encapsulation"): str,
"traffic-statistics": {
"input-packets": str,
"output-packets": str,
},
Optional("filter-information"): str,
Optional("logical-interface-zone-name"): str,
Optional("allowed-host-inbound-traffic"): {
Optional("inbound-dhcp"): bool,
Optional("inbound-http"): bool,
Optional("inbound-https"): bool,
Optional("inbound-ssh"): bool,
Optional("inbound-telnet"): bool,
},
Optional("address-family"): Use(validate_address_family_list),
}
)
for item in value:
logical_interface_schema.validate(item)
return value
physical_interface_schema = Schema(
{
"name": str,
"admin-status": str,
"oper-status": str,
"local-index": str,
"snmp-index": str,
Optional("link-level-type"): str,
Optional("mtu"): str,
Optional("source-filtering"): str,
Optional("link-mode"): str,
Optional("speed"): str,
Optional("bpdu-error"): str,
Optional("l2pt-error"): str,
Optional("loopback"): str,
Optional("if-flow-control"): str,
Optional("if-auto-negotiation"): str,
Optional("if-remote-fault"): str,
Optional("if-device-flags"): {
Optional("ifdf-present"): bool,
Optional("ifdf-running"): bool,
Optional("ifdf-none"): bool,
},
Optional("if-config-flags"): {
Optional("iff-snmp-traps"): bool,
Optional("internal-flags"): str,
},
Optional("if-media-flags"): {
Optional("ifmf-none"): bool,
},
Optional("physical-interface-cos-information"): {
"physical-interface-cos-hw-max-queues": str,
"physical-interface-cos-use-max-queues": str,
},
Optional("current-physical-address"): str,
Optional("hardware-physical-address"): str,
Optional("interface-flapped"): str,
Optional("statistics-cleared"): str,
Optional("traffic-statistics"): {
"input-bps": str,
"input-pps": str,
"output-bps": str,
"output-pps": str,
},
Optional("input-error-count"): str,
Optional("output-error-count"): str,
Optional("active-alarms"): {
"interface-alarms": {
Optional("alarm-not-present"): bool,
},
},
Optional("active-defects"): {
"interface-alarms": {
Optional("alarm-not-present"): bool,
},
},
Optional("interface-transmit-statistics"): str,
Optional("logical-interface"): Use(validate_logical_interface_list)
}
)
for item in value:
physical_interface_schema.validate(item)
return value
schema = {
"interface-information": {
"physical-interface": Use(validate_physical_interface_list)
}
}
class ShowInterfacesStatistics(ShowInterfacesStatisticsSchema):
""" Parser for:
* show interfaces statistics
* show interfaces statistics {interface}
"""
cli_command = [
"show interfaces statistics",
"show interfaces statistics {interface}"
]
def cli(self, interface=None, output=None):
if output is None:
if interface:
out = self.device.execute(self.cli_command[1].format(
interface=interface
))
else:
out = self.device.execute(self.cli_command[0])
else:
out = output
ret_dict = {}
# Physical interface: ge-0/0/0, Enabled, Physical link is Up
p1 = re.compile(r'^Physical +interface: +(?P<name>[^\s,]+), +'
r'(?P<admin_status>[^\s,]+), +Physical +link +is +'
r'(?P<oper_status>\S+)$')
# Interface index: 133, SNMP ifIndex: 506
p2 = re.compile(r'^Interface +index: +(?P<local_index>\d+), +'
r'SNMP +ifIndex: +(?P<snmp_index>\d+)$')
# Link-level type: Ethernet, MTU: 1514, Link-mode: Full-duplex, Speed: 1000mbps,
p3 = re.compile(r'^Link-level +type: +(?P<link_level_type>[^\s,]+), +'
r'MTU: +(?P<mtu>\d+), +Link-mode: +(?P<link_mode>[^\s,]+), +'
r'Speed: +(?P<speed>[^\s,]+),$')
# BPDU Error: None, MAC-REWRITE Error: None, Loopback: Disabled,
p4 = re.compile(r'^BPDU +Error: +(?P<bpdu_error>[^\s,]+), +'
r'MAC-REWRITE +Error: +(?P<l2pt_error>[^\s,]+), +'
r'Loopback: +(?P<loopback>[^\s,]+),$')
# Source filtering: Disabled, Flow control: Enabled, Auto-negotiation: Enabled,
p5 = re.compile(r'^Source +filtering: +(?P<source_filtering>[^\s,]+), +'
r'Flow +control: +(?P<if_flow_control>[^\s,]+), +'
r'Auto-negotiation: +(?P<if_auto_negotiation>[^\s,]+),$')
# Remote fault: Online
p6 = re.compile(r'^Remote +fault: +(?P<if_remote_fault>[^\s,]+)$')
# Device flags : Present Running
p7 = re.compile(r'^Device +flags *: +((?P<ifdf_none>None+) *)?'
r'((?P<ifdf_present>Present+) *)?'
r'((?P<ifdf_running>Running+) *)?$')
# Interface flags: SNMP-Traps Internal: 0x4000
p8 = re.compile(r'^Interface +flags: +(?P<iff_snmp_traps>\S+) +'
r'Internal: +(?P<internal_flags>\S+)$')
# Link flags : None
p9 = re.compile(r'^Link +flags *: +((?P<ifmf_none>None) *)?$')
# CoS queues : 8 supported, 8 maximum usable queues
p10 = re.compile(r'^CoS +queues *: +(?P<physical_interface_cos_hw_max_queues>\d+) +'
r'supported, +(?P<physical_interface_cos_use_max_queues>\d+) +'
r'maximum +usable +queues$')
# Current address: 5e:00:40:ff:00:00, Hardware address: 5e:00:40:ff:00:00
p11 = re.compile(r'^Current +address: +(?P<current_physical_address>[^\s,]+), +'
r'Hardware +address: +(?P<hardware_physical_address>[^\s,]+)$')
# Last flapped : 2020-06-22 22:33:51 EST (4d 06:59 ago)
p12 = re.compile(r'^Last +flapped *: +(?P<interface_flapped>.*)$')
# Statistics last cleared: 2020-06-27 05:22:04 EST (00:11:36 ago)
p13 = re.compile(r'^Statistics last cleared: +(?P<statistics_cleared>.*)$')
# Input rate : 2144 bps (4 pps)
p14 = re.compile(r'^Input rate *: +(?P<input_bps>\d+) +bps +'
r'\((?P<input_pps>\d+) +pps\)$')
# Output rate : 0 bps (0 pps)
p15 = re.compile(r'^Output rate *: +(?P<output_bps>\d+) +bps +'
r'\((?P<output_pps>\d+) +pps\)$')
# Input errors: 552, Output errors: 0
p16 = re.compile(r'^Input errors: +(?P<input_error_count>\d+), +'
r'Output errors: +(?P<output_error_count>\d+)$')
# Active alarms : None
p17 = re.compile(r'^Active +alarms *: +(?P<alarm_not_present>None)?$')
# Active defects : None
p18 = re.compile(r'^Active +defects *: +(?P<alarm_not_present>None)?$')
# Interface transmit statistics: Disabled
p19 = re.compile(r'^Interface +transmit +statistics: +'
r'(?P<interface_transmit_statistics>\S+)$')
# Logical interface ge-0/0/0.0 (Index 70) (SNMP ifIndex 507)
p20 = re.compile(r'^Logical +interface +(?P<name>\S+) +'
r'\(Index +(?P<local_index>\d+)\) +'
r'\(SNMP +ifIndex +(?P<snmp_index>\d+)\)$')
# Flags: SNMP-Traps 0x4000 Encapsulation: ENET2
p21 = re.compile(r'^Flags: +(?P<iff_snmp_traps>SNMP-Traps) +'
r'(?P<internal_flags>\S+) +Encapsulation: +'
r'(?P<encapsulation>\S+)$')
# Input packets : 1684
p22 = re.compile(r'^Input +packets *: +(?P<input_packets>\d+)$')
# Output packets: 49
p23 = re.compile(r'^Output +packets *: +(?P<output_packets>\d+)$')
# Security: Zone: trust
p24 = re.compile(r'^Security: +Zone: (?P<logical_interface_zone_name>\S+)$')
# Allowed host-inbound traffic : dhcp http https ssh telnet
p25 = re.compile(r'^Allowed +host-inbound +traffic *: +'
r'(?P<inbound_dhcp>dhcp)( +)?(?P<inbound_http>http)( +)?'
r'(?P<inbound_https>https)( +)?(?P<inbound_ssh>ssh)( +)?'
r'(?P<inbound_telnet>telnet)( +)?$')
# Protocol inet, MTU: 1500
p26 = re.compile(r'^Protocol +(?P<address_family_name>[^\s,]+), +MTU: +(?P<mtu>\S+)$')
# Flags: Sendbcast-pkt-to-re, Is-Primary
p27 = re.compile(r'^Flags: +(?P<ifff_sendbcast_pkt_to_re>Sendbcast-pkt-to-re)'
r'(, +)?(?P<ifff_is_primary>Is-Primary)?$')
# Addresses, Flags: Is-Preferred Is-Primary
p28 = re.compile(r'^Addresses(, Flags: +)?(?P<flags>.+)?$')
# Destination: 172.16.1/24, Local: 172.16.1.55, Broadcast: 172.16.1.255
p29 = re.compile(r'^(Destination: +(?P<ifa_destination>[^\s,]+))?(, +)?'
r'(Local: +(?P<ifa_local>[^\s,]+))?(, +)?'
r'(Broadcast: +(?P<ifa_broadcast>[^\s,]+))?$')
for line in out.splitlines():
line = line.strip()
m = p1.match(line)
if m:
group = m.groupdict()
interface_info_dict = ret_dict.setdefault('interface-information', {})
physical_interface_list = interface_info_dict.setdefault('physical-interface', [])
physical_interface_dict = {}
physical_interface_dict.update({k.replace('_','-'):
v for k, v in group.items() if v is not None})
physical_interface_list.append(physical_interface_dict)
continue
m = p2.match(line)
if m:
group = m.groupdict()
physical_interface_dict.update({k.replace('_','-'):
v for k, v in group.items() if v is not None})
continue
m = p3.match(line)
if m:
group = m.groupdict()
physical_interface_dict.update({k.replace('_','-'):
v for k, v in group.items() if v is not None})
continue
m = p4.match(line)
if m:
group = m.groupdict()
physical_interface_dict.update({k.replace('_','-'):
v for k, v in group.items() if v is not None})
continue
m = p5.match(line)
if m:
group = m.groupdict()
physical_interface_dict.update({k.replace('_','-'):
v for k, v in group.items() if v is not None})
continue
m = p6.match(line)
if m:
group = m.groupdict()
physical_interface_dict.update({k.replace('_','-'):
v for k, v in group.items() if v is not None})
continue
m = p7.match(line)
if m:
group = m.groupdict()
if_device_flags = physical_interface_dict.setdefault('if-device-flags', {})
if_device_flags.update({k.replace('_', '-'):
True for k, v in group.items() if v is not None})
continue
m = p8.match(line)
if m:
group = m.groupdict()
if_config_flags = physical_interface_dict.setdefault('if-config-flags', {})
if_config_flags.update({k.replace('_', '-'):
True for k, v in group.items() if v is not None and k != "internal_flags"})
if "internal_flags" in group:
if_config_flags.update({"internal-flags": group["internal_flags"]})
continue
m = p9.match(line)
if m:
group = m.groupdict()
if_media_flags = physical_interface_dict.setdefault('if-media-flags', {})
if_media_flags.update({k.replace('_', '-'):
True for k, v in group.items() if v is not None})
continue
m = p10.match(line)
if m:
group = m.groupdict()
phys_cos_info = physical_interface_dict.setdefault('physical-interface-cos-information', {})
phys_cos_info.update({k.replace('_', '-'):
v for k, v in group.items() if v is not None})
continue
m = p11.match(line)
if m:
group = m.groupdict()
physical_interface_dict.update({k.replace('_','-'):
v for k, v in group.items() if v is not None})
continue
m = p12.match(line)
if m:
group = m.groupdict()
physical_interface_dict.update({k.replace('_','-'):
v for k, v in group.items() if v is not None})
continue
m = p13.match(line)
if m:
group = m.groupdict()
physical_interface_dict.update({k.replace('_','-'):
v for k, v in group.items() if v is not None})
continue
m = p14.match(line)
if m:
group = m.groupdict()
traffic_stats = physical_interface_dict.setdefault('traffic-statistics', {})
traffic_stats.update({k.replace('_', '-'):
v for k, v in group.items() if v is not None})
continue
m = p15.match(line)
if m:
group = m.groupdict()
traffic_stats.update({k.replace('_', '-'):
v for k, v in group.items() if v is not None})
continue
m = p16.match(line)
if m:
group = m.groupdict()
physical_interface_dict.update({k.replace('_','-'):
v for k, v in group.items() if v is not None})
continue
m = p17.match(line)
if m:
group = m.groupdict()
active_alarm = physical_interface_dict.setdefault('active-alarms', {})
interface_alarm = active_alarm.setdefault('interface-alarms', {})
interface_alarm.update({k.replace('_', '-'):
True for k, v in group.items() if v is not None})
continue
m = p18.match(line)
if m:
group = m.groupdict()
active_alarm = physical_interface_dict.setdefault('active-defects', {})
interface_defect = active_alarm.setdefault('interface-alarms', {})
interface_defect.update({k.replace('_', '-'):
True for k, v in group.items() if v is not None})
continue
m = p19.match(line)
if m:
group = m.groupdict()
physical_interface_dict.update({k.replace('_','-'):
v for k, v in group.items() if v is not None})
continue
m = p20.match(line)
if m:
group = m.groupdict()
logical_interface_list = physical_interface_dict.setdefault('logical-interface', [])
logical_interface_dict = {}
logical_interface_dict.update({k.replace('_','-'):
v for k, v in group.items() if v is not None})
logical_interface_list.append(logical_interface_dict)
continue
m = p21.match(line)
if m:
group = m.groupdict()
if_config_flags = logical_interface_dict.setdefault('if-config-flags', {})
if_config_flags.update({k.replace('_','-'):
True for k, v in group.items() if v is not None and k not in [
"encapsulation",
"internal_flags"]})
if "encapsulation" in group and group["encapsulation"]:
logical_interface_dict.update({"encapsulation": group["encapsulation"]})
if "internal_flags" in group and group["internal_flags"]:
if_config_flags.update({"internal-flags": group["internal_flags"]})
continue
m = p22.match(line)
if m:
group = m.groupdict()
traffic_stats = logical_interface_dict.setdefault('traffic-statistics', {})
traffic_stats.update({k.replace('_','-'):
v for k, v in group.items() if v is not None})
continue
m = p23.match(line)
if m:
group = m.groupdict()
traffic_stats.update({k.replace('_','-'):
v for k, v in group.items() if v is not None})
continue
m = p24.match(line)
if m:
group = m.groupdict()
logical_interface_dict.update({k.replace('_','-'):
v for k, v in group.items() if v is not None})
continue
m = p25.match(line)
if m:
group = m.groupdict()
allowed_in_traffic = logical_interface_dict.setdefault('allowed-host-inbound-traffic', {})
allowed_in_traffic.update({k.replace('_','-'):
True for k, v in group.items() if v is not None})
continue
m = p26.match(line)
if m:
group = m.groupdict()
address_family_list = logical_interface_dict.setdefault('address-family', [])
address_family_dict = {}
address_family_dict.update({k.replace('_','-'):
v for k, v in group.items() if v is not None})
address_family_list.append(address_family_dict)
continue
m = p27.match(line)
if m:
group = m.groupdict()
address_family_flags = address_family_dict.setdefault('address-family-flags', {})
address_family_flags.update({k.replace('_','-'):
True for k, v in group.items() if v is not None})
continue
m = p28.match(line)
if m:
group = m.groupdict()
interface_address_list = address_family_dict.setdefault('interface-address', [])
interface_address_dict = {}
ifa_flags = interface_address_dict.setdefault('ifa-flags', {})
if 'flags' in group and group['flags']:
flags = group['flags'].split(' ')
ifa_flags.update({"ifaf-current-{}".format(f.split('-')[-1].lower()):
True for f in flags})
interface_address_list.append(interface_address_dict)
continue
m = p29.match(line)
if m:
group = m.groupdict()
if group['ifa_destination']:
interface_address_dict.update({'ifa-destination': group['ifa_destination']})
if group['ifa_local']:
interface_address_dict.update({'ifa-local': group['ifa_local']})
if group['ifa_broadcast']:
interface_address_dict.update({'ifa-broadcast': group['ifa_broadcast']})
continue
return ret_dict
# =======================================================
# Schema for 'show interfaces policers {interface}'
# =======================================================
class ShowInterfacesPolicersInterfaceSchema(MetaParser):
"""Schema for show interfaces policers {interface}"""
'''schema = {
Optional("@xmlns:junos"): str,
"interface-policer-information": {
Optional("@junos:style"): str,
Optional("@xmlns"): str,
"physical-interface": [
"admin-status": str,
"logical-interface": [
"admin-status": str,
"name": str,
"oper-status": str,
"policer-information": [
{
"policer-family": str,
"policer-input": str,
"policer-output": str
}
]
],
"name": str,
"oper-status": str
]
}
}'''
def validate_policer_information_list(value):
# Pass ospf3-interface list as value
if not isinstance(value, list):
raise SchemaError('policer-information is not a list')
policer_information_schema = Schema({
"policer-family": str,
"policer-input": str,
Optional("policer-output"): Or(str,None)
})
# Validate each dictionary in list
for item in value:
policer_information_schema.validate(item)
return value
def validate_logical_interface_list(value):
# Pass ospf3-interface list as value
if not isinstance(value, list):
raise SchemaError('logical-interface is not a list')
logical_interface_schema = Schema({
"admin-status": str,
"name": str,
"oper-status": str,
"policer-information": Use(ShowInterfacesPolicersInterface.validate_policer_information_list)
})
# Validate each dictionary in list
for item in value:
logical_interface_schema.validate(item)
return value
def validate_physical_interface_list(value):
# Pass ospf3-interface list as value
if not isinstance(value, list):
raise SchemaError('physical-interface is not a list')
physical_interface_schema = Schema({
"admin-status": str,
"logical-interface": Use(ShowInterfacesPolicersInterface.validate_logical_interface_list),
"name": str,
"oper-status": str
})
# Validate each dictionary in list
for item in value:
physical_interface_schema.validate(item)
return value
schema = {
Optional("@xmlns:junos"): str,
"interface-policer-information": {
Optional("@junos:style"): str,
Optional("@xmlns"): str,
"physical-interface": Use(validate_physical_interface_list)
}
}
# =======================================================
# Parser for 'show interfaces policers {interface}'
# =======================================================
class ShowInterfacesPolicersInterface(ShowInterfacesPolicersInterfaceSchema):
""" Parser for:
- show interfaces policers {interface}
"""
cli_command = 'show interfaces policers {interface}'
def cli(self, interface=None, output=None):
# execute the command
if output is None:
out = self.device.execute(self.cli_command.format(interface=interface))
else:
out = output
ret_dict = {}
# ge-0/0/2 up up
p1 = re.compile(r'^(?P<interface>[a-zA-Z\-\d\/]+)((?P<physical_interface_value>[\.\d]+))? +(?P<admin>\w+) +(?:(?P<link>\w+))?$')
# inet GE_1M-ge-0/0/2.0-log_int-i GE_1M-ge-0/0/2.0-log_int-o
# multiservice __default_arp_policer__
p2 = re.compile(r'^(?P<policer_family>\w+) +(?P<policer_input>\S+)( +((?P<policer_output>\S+)))?$')
for line in out.splitlines():
line = line.strip()
# ge-0/0/2 up up
# ge-0/0/2.0 up up
m = p1.match(line)
if m:
interface_policer_info = ret_dict.setdefault(
"interface-policer-information", {}).setdefault("physical-interface",[])
#logical_interface_list = interface_policer_info.setdefault("logical-interface",[])
#policer_information_list = logical_interface_list.setdefault("policer-information", [])
exists = False
policer_information_list = None
group = m.groupdict()
for group_key, group_value in group.items():
if group_key == "physical_interface_value":
if group_value != None:
exists = True
if exists:
logical_interface_dict['name'] = group['interface'] + group['physical_interface_value']
logical_interface_dict['admin-status'] = group['admin']
logical_interface_dict['oper-status'] = group['link']
policer_information_list = []
logical_interface_dict["policer-information"] = policer_information_list
logical_interface_list.append(logical_interface_dict)
interface_policer_info_dict['logical-interface'] = logical_interface_list
interface_policer_info.append(interface_policer_info_dict)
else:
logical_interface_list = []
logical_interface_dict = {}
interface_policer_info_dict= {}
interface_policer_info_dict['name'] = group['interface']
interface_policer_info_dict['admin-status'] = group['admin']
interface_policer_info_dict['oper-status'] = group['link']
# inet GE_1M-ge-0/0/2.0-log_int-i GE_1M-ge-0/0/2.0-log_int-o
# multiservice __default_arp_policer__
m = p2.match(line)
if m:
group = m.groupdict()
policer_information_dict = {}
policer_information_dict['policer-family'] = group['policer_family']
policer_information_dict['policer-input'] = group['policer_input']
for group_key, group_value in group.items():
if group_key == "policer_output":
if group_value != None:
policer_information_dict['policer-output'] = group['policer_output']
policer_information_list.append(policer_information_dict)
exists = False
return ret_dict
# =======================================================
# Schema for 'show interfaces queue {interface}'
# =======================================================
class ShowInterfacesQueueSchema(MetaParser):
"""
Schema for:
* show interfaces queue {interface}
"""
def validate_queue(value):
if not isinstance(value, list):
raise SchemaError('queue is not a list')
queue_schema = Schema(
{
"forwarding-class-name": str,
"queue-counters-queued-bytes": str,
"queue-counters-queued-bytes-rate": str,
"queue-counters-queued-packets": str,
"queue-counters-queued-packets-rate": str,
"queue-counters-red-bytes": str,
"queue-counters-red-bytes-high": str,
"queue-counters-red-bytes-low": str,
"queue-counters-red-bytes-medium-high": str,
"queue-counters-red-bytes-medium-low": str,
"queue-counters-red-bytes-rate": str,
"queue-counters-red-bytes-rate-high": str,
"queue-counters-red-bytes-rate-low": str,
"queue-counters-red-bytes-rate-medium-high": str,
"queue-counters-red-bytes-rate-medium-low": str,
"queue-counters-red-packets": str,
"queue-counters-red-packets-high": str,
"queue-counters-red-packets-low": str,
"queue-counters-red-packets-medium-high": str,
"queue-counters-red-packets-medium-low": str,
"queue-counters-red-packets-rate": str,
"queue-counters-red-packets-rate-high": str,
"queue-counters-red-packets-rate-low": str,
"queue-counters-red-packets-rate-medium-high": str,
"queue-counters-red-packets-rate-medium-low": str,
"queue-counters-tail-drop-packets": str,
"queue-counters-tail-drop-packets-rate": str,
Optional("queue-counters-rl-drop-packets"): str,
Optional("queue-counters-rl-drop-packets-rate"): str,
Optional("queue-counters-rl-drop-bytes"): str,
Optional("queue-counters-rl-drop-bytes-rate"): str,
"queue-counters-trans-bytes": str,
"queue-counters-trans-bytes-rate": str,
"queue-counters-trans-packets": str,
"queue-counters-trans-packets-rate": str,
"queue-number": str
}
)
for item in value:
queue_schema.validate(item)
return value
schema = {
"interface-information": {
"physical-interface": {
Optional("description"): str,
"local-index": str,
"snmp-index": str,
"name": str,
"oper-status": str,
"queue-counters": {
"interface-cos-summary": {
"intf-cos-forwarding-classes-in-use": str,
"intf-cos-forwarding-classes-supported": str,
"intf-cos-num-queues-in-use": str,
"intf-cos-num-queues-supported": str,
"intf-cos-queue-type": str
},
"queue": Use(validate_queue)
}
}
}
}
# =======================================================
# Parser for 'show interfaces queue {interface}'
# =======================================================
class ShowInterfacesQueue(ShowInterfacesQueueSchema):
"""
Parser for:
* show interfaces queue {interface}
"""
cli_command = 'show interfaces queue {interface}'
def cli(self, interface=None, output=None):
if not output:
cmd = self.cli_command.format(interface=interface)
out = self.device.execute(cmd)
else:
out = output
# -------------------------------------------------
# Initialize variables
# -------------------------------------------------
ret_dict = {}
red_dropped_bytes = red_dropped_packets = transmitted = None
# -------------------------------------------------
# Regex patterns
# -------------------------------------------------
# Physical interface: ge-0/0/2, Enabled, Physical link is Up
p1 = re.compile(r"^Physical interface: (?P<name>\S+), Enabled, "
r"Physical link is (?P<oper_status>\S+)$")
# Interface index: 143, SNMP ifIndex: 601
p2 = re.compile(r"^Interface index: (?P<local_index>\S+), "
r"SNMP ifIndex: (?P<snmp_index>\S+)$")
# Description: to_ixia_2/4
p3 = re.compile(r"^Description: (?P<description>\S+)$")
# Forwarding classes: 16 supported, 5 in use
p4 = re.compile(r"^Forwarding classes: "
r"(?P<intf_cos_forwarding_classes_supported>\S+) supported, "
r"(?P<intf_cos_forwarding_classes_in_use>\S+) in use$")
# Egress queues: 8 supported, 5 in use
p5 = re.compile(r"^(?P<intf_cos_queue_type>Egress queues): (?P<intf_cos_num_queues_supported>\S+) supported, "
r"(?P<intf_cos_num_queues_in_use>\S+) in use$")
# Queue: 0, Forwarding classes: Bronze-FC
p6 = re.compile(r"^Queue: (?P<queue_number>\S+), "
r"Forwarding classes: (?P<forwarding_class_name>\S+)$")
# Queued:
# Packets : 1470816406 0 pps <-- rate
# Bytes : 564883280956 0 bps
# Transmitted:
p8 = re.compile(r"^(?P<name>Transmitted):$")
# Packets : 1470816406 0 pps
# Bytes : 564883280956 0 bps
# Tail-dropped packets : 0 0 pps
# RED-dropped packets : 0 0 pps
p9 = re.compile(r"^(?P<name>RED-dropped packets) +: +(?P<counts>\S+) +(?P<rates>\S+) +pps$")
# Low : 0 0 pps
# Medium-low : 0 0 pps
# Medium-high : 0 0 pps
# High : 0 0 pps
# RED-dropped bytes : 0 0 bps
p10 = re.compile(r"^(?P<name>RED-dropped bytes) +: +(?P<counts>\S+) +(?P<rates>\S+) +bps$")
# Low : 0 0 bps
# Medium-low : 0 0 bps
# Medium-high : 0 0 bps
# High : 0 0 bps
# RL-dropped packets : 0 0 pps
# Tail-dropped packets : 0 0 pps
p7 = re.compile(r"^(?P<name>Packets|Bytes|Tail-dropped +packets|"
r"RL-dropped +packets|"
r"Low|Medium-low|Medium-high|High) +: "
r"+(?P<counts>\S+) +(?P<rates>\S+) +[p|b]ps$")
# -------------------------------------------------
# Build parsed output
# -------------------------------------------------
for line in out.splitlines():
line = line.strip()
# Physical interface: ge-0/0/2, Enabled, Physical link is Up
m = p1.match(line)
if m:
group = m.groupdict()
physical_interface_dict = ret_dict.setdefault('interface-information', {}).\
setdefault('physical-interface', {})
physical_interface_dict['name'] = group['name']
physical_interface_dict['oper-status'] = group['oper_status']
continue
# Interface index: 143, SNMP ifIndex: 601
m = p2.match(line)
if m:
group = m.groupdict()
for group_key, group_value in group.items():
entry_key = group_key.replace('_', '-')
physical_interface_dict[entry_key] = group_value
continue
# Description: to_ixia_2/4
m = p3.match(line)
if m:
physical_interface_dict['description'] = m.groupdict()['description']
continue
# Forwarding classes: 16 supported, 5 in use
# Egress queues: 8 supported, 5 in use
m = p4.match(line) or p5.match(line)
if m:
if 'queue-counters' not in physical_interface_dict:
interface_cos_summary_dict = physical_interface_dict.\
setdefault('queue-counters', {}).\
setdefault('interface-cos-summary', {})
group = m.groupdict()
for group_key, group_value in group.items():
entry_key = group_key.replace('_', '-')
interface_cos_summary_dict[entry_key] = group_value
continue
# Queue: 0, Forwarding classes: Bronze-FC
m = p6.match(line)
if m:
group = m.groupdict()
red_dropped_packets = None
red_dropped_bytes = None
transmitted = None
if "queue" not in physical_interface_dict['queue-counters']:
physical_interface_dict['queue-counters']['queue'] = []
current_queue_dict = {}
for group_key, group_value in group.items():
entry_key = group_key.replace('_', '-')
current_queue_dict[entry_key] = group_value
physical_interface_dict['queue-counters']['queue'].append(current_queue_dict)
continue
# Packets : 1470816406 0 pps
# Bytes : 564883280956 0 bps
# Tail-dropped packets : 0 0 pps
# ...
# Low : 0 0 pps
# Medium-low : 0 0 pps
# Medium-high : 0 0 pps
# High : 0 0 pps
m = p7.match(line)
if m:
group = m.groupdict()
name = group['name']
counts = group['counts']
rates = group['rates']
# RED-dropped bytes
if red_dropped_bytes:
if name == 'Low':
current_queue_dict['queue-counters-red-bytes-low'] = counts
current_queue_dict['queue-counters-red-bytes-rate-low'] = rates
elif name == 'Medium-low':
current_queue_dict['queue-counters-red-bytes-medium-low'] = counts
current_queue_dict['queue-counters-red-bytes-rate-medium-low'] = rates
elif name == 'Medium-high':
current_queue_dict['queue-counters-red-bytes-medium-high'] = counts
current_queue_dict['queue-counters-red-bytes-rate-medium-high'] = rates
elif name == 'High':
current_queue_dict['queue-counters-red-bytes-high'] = counts
current_queue_dict['queue-counters-red-bytes-rate-high'] = rates
# RED-dropped packets
elif red_dropped_packets:
if name == 'Low':
current_queue_dict['queue-counters-red-packets-low'] = counts
current_queue_dict['queue-counters-red-packets-rate-low'] = rates
elif name == 'Medium-low':
current_queue_dict['queue-counters-red-packets-medium-low'] = counts
current_queue_dict['queue-counters-red-packets-rate-medium-low'] = rates
elif name == 'Medium-high':
current_queue_dict['queue-counters-red-packets-medium-high'] = counts
current_queue_dict['queue-counters-red-packets-rate-medium-high'] = rates
elif name == 'High':
current_queue_dict['queue-counters-red-packets-high'] = counts
current_queue_dict['queue-counters-red-packets-rate-high'] = rates
# Transmitted
elif transmitted:
if name == 'Packets':
current_queue_dict['queue-counters-trans-packets'] = counts
current_queue_dict['queue-counters-trans-packets-rate'] = rates
elif name == 'Bytes':
current_queue_dict['queue-counters-trans-bytes'] = counts
current_queue_dict['queue-counters-trans-bytes-rate'] = rates
elif name == 'Tail-dropped packets':
current_queue_dict['queue-counters-tail-drop-packets'] = counts
current_queue_dict['queue-counters-tail-drop-packets-rate'] = rates
elif name == 'RL-dropped packets':
current_queue_dict['queue-counters-rl-drop-packets'] = counts
current_queue_dict['queue-counters-rl-drop-packets-rate'] = rates
# Queued
else:
if name == 'Packets':
current_queue_dict['queue-counters-queued-packets'] = counts
current_queue_dict['queue-counters-queued-packets-rate'] = rates
elif name == 'Bytes':
current_queue_dict['queue-counters-queued-bytes'] = counts
current_queue_dict['queue-counters-queued-bytes-rate'] = rates
continue
# Transmitted:
m = p8.match(line)
if m:
transmitted = True
continue
# RED-dropped packets : 0 0 pps
m = p9.match(line)
if m:
red_dropped_packets = True
group = m.groupdict()
if group['name'] == 'RED-dropped packets':
current_queue_dict['queue-counters-red-packets'] = group['counts']
current_queue_dict['queue-counters-red-packets-rate'] = group['rates']
continue
# RED-dropped bytes : 0 0 bps
m = p10.match(line)
if m:
red_dropped_bytes = True
group = m.groupdict()
if group['name'] == 'RED-dropped bytes':
current_queue_dict['queue-counters-red-bytes'] = group['counts']
current_queue_dict['queue-counters-red-bytes-rate'] = group['rates']
continue
return ret_dict
class ShowInterfacesExtensiveInterface(ShowInterfaces):
cli_command = 'show interfaces extensive {interface}'
def cli(self, interface, output=None):
if not output:
out = self.device.execute(self.cli_command.format(
interface=interface
))
else:
out = output
return super().cli(output=out)
```
#### File: parser/junos/show_ospf3.py
```python
import re
from genie.metaparser import MetaParser
from pyats.utils.exceptions import SchemaError
from genie.metaparser.util.schemaengine import (Any, Optional, Use,
Schema)
class ShowOspf3InterfaceSchema(MetaParser):
'''schema = {
"ospf3-interface-information": {
"ospf3-interface": [
{
"bdr-id": str,
"dr-id": str,
"interface-name": str,
"neighbor-count": str,
"ospf-area": str,
"ospf-interface-state": str
}
]
}'''
# Sub Schema
def validate_ospf3_interface_list(value):
# Pass ospf3-interface list as value
if not isinstance(value, list):
raise SchemaError('ospf-interface is not a list')
ospf3_interface_schema = Schema({
"bdr-id": str,
"dr-id": str,
"interface-name": str,
"neighbor-count": str,
"ospf-area": str,
"ospf-interface-state": str
})
# Validate each dictionary in list
for item in value:
ospf3_interface_schema.validate(item)
return value
# Main Schema
schema = {
"ospf3-interface-information": {
"ospf3-interface": Use(validate_ospf3_interface_list)
}
}
class ShowOspf3Interface(ShowOspf3InterfaceSchema):
""" Parser for:
* show ospf3 interface
"""
cli_command = 'show ospf3 interface'
def cli(self, output=None):
if not output:
out = self.device.execute(self.cli_command)
else:
out = output
# ge-0/0/0.0 PtToPt 0.0.0.8 0.0.0.0 0.0.0.0 1
p1 = re.compile(
r'^(?P<interface_name>\S+) +(?P<ospf_interface_state>\S+)'
r' +(?P<ospf_area>[0-9]{1,3}(\.[0-9]{1,3}){3}) +(?P<dr_id>[0-9]{1,3}'
r'(\.[0-9]{1,3}){3}) +(?P<bdr_id>[0-9]{1,3}(\.[0-9]{1,3}){3}) +(?P<neighbor_count>\S+)$'
)
ret_dict = {}
for line in out.splitlines():
line = line.strip()
# ge-0/0/0.0 PtToPt 0.0.0.8 0.0.0.0 0.0.0.0 1
m = p1.match(line)
if m:
entry_list = ret_dict.setdefault("ospf3-interface-information", {})\
.setdefault("ospf3-interface", [])
group = m.groupdict()
entry = {}
for group_key, group_value in group.items():
entry_key = group_key.replace('_', '-')
entry[entry_key] = group_value
entry_list.append(entry)
continue
return ret_dict
# ==============================================
# Schema for show ospf3 neighbor extensive
# ==============================================
class ShowOspf3NeighborExtensiveSchema(MetaParser):
"""schema = {
"ospf3-neighbor-information": {
"ospf3-neighbor": [
{
"activity-timer": str,
"bdr-id": str,
"dr-id": str,
"interface-name": str,
"neighbor-address": str,
"neighbor-adjacency-time": {
"#text": str
},
"neighbor-id": str,
"neighbor-priority": str,
"neighbor-up-time": {},
"options": str,
"ospf-area": str,
"ospf-neighbor-state": str,
"ospf3-interface-index": str
}
]
}
}"""
def validate_ospf3_neighbor_extensive_list(value):
# Pass osp3_neighbor_extensive-entry list of dict in value
if not isinstance(value, list):
raise SchemaError('ospf3-table-entry is not a list')
# Create Arp Entry Schema
entry_schema = Schema({
"activity-timer": str,
"bdr-id": str,
"dr-id": str,
"interface-name": str,
"neighbor-address": str,
Optional("neighbor-adjacency-time"): {
"#text": str
},
"neighbor-id": str,
"neighbor-priority": str,
Optional("neighbor-up-time"): {
"#text": str
},
"options": str,
"ospf-area": str,
"ospf-neighbor-state": str,
"ospf3-interface-index": str
})
# Validate each dictionary in list
for item in value:
entry_schema.validate(item)
return value
# Main Schema
schema = {
"ospf3-neighbor-information": {
"ospf3-neighbor": Use(validate_ospf3_neighbor_extensive_list)
}
}
# ==============================================
# Schema for show ospf3 neighbor extensive
# ==============================================
class ShowOspf3NeighborExtensive(ShowOspf3NeighborExtensiveSchema):
""" Parser for:
* show ospf3 neighbor extensive
"""
cli_command = ['show ospf3 neighbor extensive']
def cli(self, output=None):
if not output:
out = self.device.execute(self.cli_command[0])
else:
out = output
ret_dict = {}
#10.189.5.253 ge-0/0/0.0 Full 128 35
p1 = re.compile(
r'^(?P<neighborid>[\w\.\:\/]+) +(?P<interfacename>\S+) '
r'+(?P<ospfneighborstate>\S+) +(?P<pri>\S+) +(?P<dead>\d+)$')
#Neighbor-address fe80::250:56ff:fe8d:53c0
p2 = re.compile(r'^Neighbor-address +(?P<neighbor_address>\S+)$')
#Area 0.0.0.8, opt 0x13, OSPF3-Intf-Index 2
p3 = re.compile(
r'^Area +(?P<area>\S+), opt +(?P<opt>\S+), OSPF3-Intf-Index +(?P<ospf3>\d+)$'
)
#DR-ID 0.0.0.0, BDR-ID 0.0.0.0
p4 = re.compile(r'^DR-ID +(?P<drid>\S+), BDR-ID +(?P<bdrid>\S+)$')
#Up 3w0d 17:07:00, adjacent 3w0d 17:07:00
#Up 00:00:08
p5 = re.compile(
r'^Up +(?P<up>\S+( +[\d\:]+)?)(, +adjacent +(?P<adjacent>\S+ +[\d\:]+))?$'
)
for line in out.splitlines():
line = line.strip()
#10.189.5.253 ge-0/0/0.0 Full 128 35
m = p1.match(line)
if m:
group = m.groupdict()
ospf3_entry_list = ret_dict.setdefault('ospf3-neighbor-information', {}).\
setdefault('ospf3-neighbor', [])
ospf3_entry_dict = {}
ospf3_entry_dict['activity-timer'] = group['dead']
ospf3_entry_dict['neighbor-id'] = group['neighborid']
ospf3_entry_dict['interface-name'] = group['interfacename']
ospf3_entry_dict['ospf-neighbor-state'] = group[
'ospfneighborstate']
ospf3_entry_dict['neighbor-priority'] = group['pri']
ospf3_entry_list.append(ospf3_entry_dict)
continue
#Neighbor-address fe80::250:56ff:fe8d:53c0
m = p2.match(line)
if m:
group = m.groupdict()
ospf3_entry_dict['neighbor-address'] = group[
'neighbor_address']
continue
#Area 0.0.0.8, opt 0x13, OSPF3-Intf-Index 2
m = p3.match(line)
if m:
group = m.groupdict()
ospf3_entry_dict['ospf-area'] = group['area']
ospf3_entry_dict['options'] = group['opt']
ospf3_entry_dict['ospf3-interface-index'] = group['ospf3']
continue
#DR-ID 0.0.0.0, BDR-ID 0.0.0.0
m = p4.match(line)
if m:
group = m.groupdict()
ospf3_entry_dict['dr-id'] = group['drid']
ospf3_entry_dict['bdr-id'] = group['bdrid']
continue
#Up 3w0d 17:07:00, adjacent 3w0d 17:07:00
m = p5.match(line)
if m:
group = m.groupdict()
if group['adjacent']:
ospf3_entry_dict['neighbor-adjacency-time'] = {
'#text': group['adjacent']
}
ospf3_entry_dict['neighbor-up-time'] = {'#text': group['up']}
continue
return ret_dict
# ==============================================
# Schema for 'show ospf3 neighbor'
# ==============================================
class ShowOspf3NeighborSchema(MetaParser):
"""schema = {
"ospf3-neighbor-information": {
"ospf3-neighbor": [
{
"activity-timer": str,
"interface-name": str,
"neighbor-address": str,
"neighbor-id": str,
"neighbor-priority": str,
"ospf-neighbor-state": str
}
]
}
}"""
def validate_ospf3_neighbor_list(value):
# Pass osp3_neighbor_detail-entry list of dict in value
if not isinstance(value, list):
raise SchemaError('ospf3-table-entry is not a list')
# Create Arp Entry Schema
entry_schema = Schema({
"activity-timer": str,
"interface-name": str,
"neighbor-address": str,
"neighbor-id": str,
"neighbor-priority": str,
"ospf-neighbor-state": str
})
# Validate each dictionary in list
for item in value:
entry_schema.validate(item)
return value
# Main Schema
schema = {
"ospf3-neighbor-information": {
"ospf3-neighbor": Use(validate_ospf3_neighbor_list)
}
}
# ==============================================
# Parser for 'show ospf3 neighbor'
# ==============================================
class ShowOspf3Neighbor(ShowOspf3NeighborSchema):
""" Parser for:
* show ospf3 neighbor
"""
cli_command = ['show ospf3 neighbor']
def cli(self, output=None):
if not output:
out = self.device.execute(self.cli_command[0])
else:
out = output
ret_dict = {}
#10.189.5.253 ge-0/0/0.0 Full 128 35
p1 = re.compile(r'^(?P<id>[\d\.]+) +(?P<interface>\S+) '
r'+(?P<state>\S+) +(?P<pri>\S+) +(?P<dead>\d+)$')
#Neighbor-address fe80::250:56ff:fe8d:53c0
p2 = re.compile(r'^Neighbor-address +(?P<neighbor_address>\S+)$')
for line in out.splitlines():
line = line.strip()
#10.189.5.253 ge-0/0/0.0 Full 128 35
m = p1.match(line)
if m:
group = m.groupdict()
ospf3_entry_list = ret_dict.setdefault('ospf3-neighbor-information', {}).\
setdefault('ospf3-neighbor', [])
ospf3_entry_dict = {}
ospf3_entry_dict['activity-timer'] = group['dead']
ospf3_entry_dict['interface-name'] = group['interface']
ospf3_entry_dict['neighbor-id'] = group['id']
ospf3_entry_dict['neighbor-priority'] = group['pri']
ospf3_entry_dict['ospf-neighbor-state'] = group['state']
continue
#Neighbor-address fe80::250:56ff:fe8d:53c0
m = p2.match(line)
if m:
group = m.groupdict()
neighbor_address = group['neighbor_address']
ospf3_entry_dict['neighbor-address'] = neighbor_address
ospf3_entry_list.append(ospf3_entry_dict)
continue
return ret_dict
class ShowOspf3NeighborInstance(ShowOspf3Neighbor):
""" Parser for:
* show ospf3 neighbor instance {instance_name}
"""
cli_command = 'show ospf3 neighbor instance {instance_name}'
def cli(self, instance_name, output=None):
if not output:
out = self.device.execute(self.cli_command.format(
instance_name=instance_name))
else:
out = output
return super().cli(
output=' ' if not out else out
)
class ShowOspf3NeighborDetail(ShowOspf3NeighborExtensive):
""" Parser for:
- show ospf3 neighbor detail
"""
cli_command = ['show ospf3 neighbor detail']
def cli(self, output=None):
if not output:
out = self.device.execute(self.cli_command[0])
else:
out = output
return super().cli(output=out)
class ShowOspf3DatabaseSchema(MetaParser):
'''
schema = {
"ospf3-database-information": {
"ospf3-area-header": {
"ospf-area": str
},
"ospf3-database": [
{
"advertising-router": str,
"age": str,
"checksum": str,
"lsa-id": str,
"lsa-length": str,
"lsa-type": str,
"sequence-number": str,
Optional('our-entry'): None
}
],
"ospf3-intf-header": [
{
"ospf-intf": str
}
]
}
}
'''
def validate_ospf3_database_information(value):
if not isinstance(value, list):
raise SchemaError('ospf3-database-information')
ospf3_database_information_schema = Schema({
"ospf3-area-header": {
"ospf-area": str
},
"ospf3-database":
Use(ShowOspf3DatabaseSchema.validate_ospf3_database_list),
Optional("ospf3-intf-header"):
Use(ShowOspf3DatabaseSchema.validate_ospf3_intf_header_list),
})
for item in value:
ospf3_database_information_schema.validate(item)
return value
# Sub Schema ospf3-database
def validate_ospf3_database_list(value):
# Pass ospf3-database list as value
if not isinstance(value, list):
raise SchemaError('ospf-interface is not a list')
ospf3_database_schema = Schema({
"advertising-router": str,
"age": str,
"checksum": str,
"lsa-id": str,
"lsa-length": str,
"lsa-type": str,
"sequence-number": str,
Optional('our-entry'): bool
})
# Validate each dictionary in list
for item in value:
ospf3_database_schema.validate(item)
return value
# Sub Schema ospf3-intf-header
def validate_ospf3_intf_header_list(value):
# Pass ospf3-intf-header list as value
if not isinstance(value, list):
raise SchemaError('ospf-interface is not a list')
ospf3_intf_header_schema = Schema({"ospf-area": str, "ospf-intf": str})
# Validate each dictionary in list
for item in value:
ospf3_intf_header_schema.validate(item)
return value
# Main Schema
schema = {
"ospf3-database-information": Use(validate_ospf3_database_information)
}
class ShowOspf3Database(ShowOspf3DatabaseSchema):
""" Parser for:
* show ospf3 database
"""
cli_command = 'show ospf3 database'
def cli(self, output=None):
if not output:
out = self.device.execute(self.cli_command)
else:
out = output
# OSPF3 database, Area 0.0.0.8
p1 = re.compile(
r'^OSPF3( +)database,( +)Area( +)'
r'(?P<ospf_area>(\*{0,1})[0-9]{1,3}(\.[0-9]{1,3}){3})$')
# Type ID Adv Rtr Seq Age Cksum Len
# Router 0.0.0.0 10.34.2.250 0x800018ed 2407 0xaf2d 56
p2 = re.compile(
r'^(?P<lsa_type>\S+) +(?P<lsa_id>(\*{0,1})[0-9]{1,3}'
r'(\.[0-9]{1,3}){3}) +(?P<advertising_router>[0-9]{1,3}(\.[0-9]{1,3})'
r'{3}) +(?P<sequence_number>\S+) +(?P<age>\d+) +(?P<checksum>\S+) +(?P<lsa_length>\d+)$'
)
# OSPF3 Link-Local database, interface ge-0/0/0.0 Area 0.0.0.8
p3 = re.compile(
r'^OSPF3( +)Link-Local( +)database,( +)interface( +)'
r'(?P<ospf_intf>\S+)( +)Area( +)(?P<ospf_area>[0-9]{1,3}(\.[0-9]{1,3}){3})$'
)
ret_dict = {}
for line in out.splitlines():
line = line.strip()
# OSPF3 database, Area 0.0.0.8
m = p1.match(line)
if m:
entry_list = ret_dict.setdefault("ospf3-database-information",
[])
entry = {}
group = m.groupdict()
entry.setdefault("ospf3-area-header",
{}).setdefault("ospf-area",
group["ospf_area"])
entry_list.append(entry)
continue
# Router 0.0.0.0 10.34.2.250 0x800018ed 2407 0xaf2d 56
m = p2.match(line)
if m:
entry_list = ret_dict.setdefault("ospf3-database-information",
[])[-1].setdefault(
"ospf3-database", [])
group = m.groupdict()
entry = {}
for group_key, group_value in group.items():
entry_key = group_key.replace('_', '-')
entry[entry_key] = group_value
lsa_id = entry['lsa-id']
if lsa_id[0] == '*':
entry['lsa-id'] = lsa_id[1:]
entry['our-entry'] = True
entry_list.append(entry)
continue
# OSPF3 Link-Local database, interface ge-0/0/0.0 Area 0.0.0.8
m = p3.match(line)
if m:
entry_list = ret_dict.setdefault("ospf3-database-information",
[])[-1].setdefault(
"ospf3-intf-header", [])
group = m.groupdict()
entry = {}
for group_key, group_value in group.items():
entry_key = group_key.replace('_', '-')
entry[entry_key] = group_value
entry_list.append(entry)
continue
return ret_dict
class ShowOspf3InterfaceExtensiveSchema(MetaParser):
""" Schema for:
* show ospf3 interface extensive
"""
# Sub Schema ospf3-interface
def validate_ospf3_interface_list(value):
# Pass ospf3-interface list as value
if not isinstance(value, list):
raise SchemaError('ospf3-interface is not a list')
ospf3_interface_schema = Schema({
"adj-count": str,
"bdr-id": str,
"dead-interval": str,
"dr-id": str,
"hello-interval": str,
"interface-address": str,
"interface-cost": str,
"interface-name": str,
"interface-type": str,
"mtu": str,
"neighbor-count": str,
"ospf-area": str,
"ospf-interface-protection-type": str,
"ospf-interface-state": str,
"ospf-stub-type": str,
"ospf3-interface-index": str,
Optional("ospf3-router-lsa-id"): str,
"prefix-length": str,
"retransmit-interval": str,
Optional("router-priority"): str,
Optional("dr-address"): str
})
# Validate each dictionary in list
for item in value:
ospf3_interface_schema.validate(item)
return value
schema = {
"ospf3-interface-information": {
"ospf3-interface": Use(validate_ospf3_interface_list)
}
}
class ShowOspf3InterfaceExtensive(ShowOspf3InterfaceExtensiveSchema):
""" Parser for:
* show ospf3 interface extensive
"""
cli_command = 'show ospf3 interface extensive'
def cli(self, output=None):
if not output:
out = self.device.execute(self.cli_command)
else:
out = output
ret_dict = {}
# ge-0/0/0.0 PtToPt 0.0.0.8 0.0.0.0 0.0.0.0 1
p1 = re.compile(
r'^(?P<interface_name>\S+)( +)(?P<ospf_interface_state>\S+)'
r'( +)(?P<ospf_area>[\d\.]+)( +)(?P<dr_id>[\d\.]+)( +)'
r'(?P<bdr_id>[\d\.]+)( +)(?P<neighbor_count>\d+)$')
# Address fe80::250:56ff:fe8d:c829, Prefix-length 64
p2 = re.compile(
r'Address( +)(?P<interface_address>\S+),( +)Prefix-length'
r'( +)(?P<prefix_length>\d+)')
# OSPF3-Intf-index 2, Type P2P, MTU 1500, Cost 5
p3 = re.compile(
r'^OSPF3-Intf-index( +)(?P<ospf3_interface_index>\d+),( +)'
r'Type( +)(?P<interface_type>\S+),( +)MTU( +)(?P<mtu>\d+),( +)Cost( +)'
r'(?P<interface_cost>\d+)$')
# Adj count: 1, Router LSA ID: 0
p4 = re.compile(
r'^Adj( +)count:( +)(?P<adj_count>\d+),( +)Router( +)LSA'
r'( +)ID:( +)(?P<ospf3_router_lsa_id>\S+)$')
# Hello 10, Dead 40, ReXmit 5, Not Stub
p5 = re.compile(
r'^Hello( +)(?P<hello_interval>\d+),( +)Dead( +)'
r'(?P<dead_interval>\d+),( +)ReXmit( +)(?P<retransmit_interval>\d+),'
r'( +)(?P<ospf_stub_type>(\S+ ){0,1}\S+)$')
# Protection type: None
p6 = re.compile(
r'^Protection( +)type:( +)(?P<ospf_interface_protection_type>\S+)$'
)
# OSPF3-Intf-index 1, Type LAN, MTU 65535, Cost 0, Priority 128
p7 = re.compile(
r'^OSPF3-Intf-index( +)(?P<ospf3_interface_index>\d+),( +)'
r'Type( +)(?P<interface_type>\S+),( +)MTU( +)(?P<mtu>\d+),( +)Cost( +)'
r'(?P<interface_cost>\d+),( +)Priority( +)(?P<router_priority>\d+)$'
)
# DR addr fe80::250:560f:fc8d:7c08
p8 = re.compile(r'^DR( +)addr( +)(?P<dr_address>\S+)$')
# Validate each dictionary in list
for line in out.splitlines():
line = line.strip()
# ge-0/0/0.0 PtToPt 0.0.0.8 0.0.0.0 0.0.0.0 1
m = p1.match(line)
if m:
interface_list = ret_dict.setdefault("ospf3-interface-information", {})\
.setdefault("ospf3-interface", [])
group = m.groupdict()
entry = {}
for group_key, group_value in group.items():
entry_key = group_key.replace('_', '-')
entry[entry_key] = group_value
interface_list.append(entry)
continue
# Address fe80::250:56ff:fe8d:c829, Prefix-length 64
m = p2.match(line)
if m:
last_interface = ret_dict["ospf3-interface-information"][
"ospf3-interface"][-1]
group = m.groupdict()
entry = last_interface
for group_key, group_value in group.items():
entry_key = group_key.replace('_', '-')
entry[entry_key] = group_value
continue
# OSPF3-Intf-index 2, Type P2P, MTU 1500, Cost 5
m = p3.match(line)
if m:
last_interface = ret_dict["ospf3-interface-information"][
"ospf3-interface"][-1]
group = m.groupdict()
entry = last_interface
for group_key, group_value in group.items():
entry_key = group_key.replace('_', '-')
entry[entry_key] = group_value
continue
# Adj count: 1, Router LSA ID: 0
m = p4.match(line)
if m:
last_interface = ret_dict["ospf3-interface-information"][
"ospf3-interface"][-1]
group = m.groupdict()
entry = last_interface
for group_key, group_value in group.items():
entry_key = group_key.replace('_', '-')
entry[entry_key] = group_value
if entry['ospf3-router-lsa-id'] == '-':
del entry['ospf3-router-lsa-id']
continue
# Hello 10, Dead 40, ReXmit 5, Not Stub
m = p5.match(line)
if m:
last_interface = ret_dict["ospf3-interface-information"][
"ospf3-interface"][-1]
group = m.groupdict()
entry = last_interface
for group_key, group_value in group.items():
entry_key = group_key.replace('_', '-')
entry[entry_key] = group_value
continue
# Protection type: None
m = p6.match(line)
if m:
last_interface = ret_dict["ospf3-interface-information"][
"ospf3-interface"][-1]
group = m.groupdict()
entry = last_interface
for group_key, group_value in group.items():
entry_key = group_key.replace('_', '-')
entry[entry_key] = group_value
continue
# OSPF3-Intf-index 1, Type LAN, MTU 65535, Cost 0, Priority 128
m = p7.match(line)
if m:
last_interface = ret_dict["ospf3-interface-information"][
"ospf3-interface"][-1]
group = m.groupdict()
entry = last_interface
for group_key, group_value in group.items():
entry_key = group_key.replace('_', '-')
entry[entry_key] = group_value
continue
# DR addr fe80::250:560f:fc8d:7c08
m = p8.match(line)
if m:
last_interface = ret_dict["ospf3-interface-information"][
"ospf3-interface"][-1]
group = m.groupdict()
entry = last_interface
for group_key, group_value in group.items():
entry_key = group_key.replace('_', '-')
entry[entry_key] = group_value
continue
return ret_dict
class ShowOspf3DatabaseExternalExtensiveSchema(MetaParser):
""" Schema for:
* show ospf3 database external extensive
"""
# Sub Schema
def validate_ospf3_database_list(value):
# Pass ospf3-database list as value
if not isinstance(value, list):
raise SchemaError('ospf-interface is not a list')
ospf3_interface_schema = Schema({
"advertising-router": str,
"age": str,
"checksum": str,
"lsa-id": str,
"lsa-length": str,
"lsa-type": str,
Optional('our-entry'): bool,
"ospf-database-extensive": {
"aging-timer": {
"#text": str
},
"expiration-time": {
"#text": str
},
"installation-time": {
"#text": str
},
Optional("generation-timer"): {
"#text": str
},
"lsa-change-count": str,
"lsa-changed-time": {
"#text": str
},
Optional("send-time"): {
"#text": str
},
Optional("database-entry-state"): str
},
"ospf3-external-lsa": {
"metric": str,
"ospf3-prefix": str,
"ospf3-prefix-options": str,
"type-value": str
},
"sequence-number": str
})
# Validate each dictionary in list
for item in value:
ospf3_interface_schema.validate(item)
return value
schema = {
"ospf3-database-information": {
"ospf3-database": Use(validate_ospf3_database_list)
}
}
class ShowOspf3DatabaseExternalExtensive(
ShowOspf3DatabaseExternalExtensiveSchema):
""" Parser for:
* show ospf3 database external extensive
"""
cli_command = 'show ospf3 database external extensive'
def cli(self, output=None):
if not output:
out = self.device.execute(self.cli_command)
else:
out = output
ret_dict = {}
# Extern 0.0.0.1 10.34.2.250 0x8000178e 1412 0x3c81 28
p1 = re.compile(
r'^(?P<lsa_type>\S+) +(?P<lsa_id>(\*{0,1})[\d\.]+) +'
r'(?P<advertising_router>[\d\.]+) +(?P<sequence_number>\S+) +(?P<age>\d+)'
r' +(?P<checksum>\S+) +(?P<lsa_length>\d+)$')
# Prefix ::/0
p2 = re.compile(r'^Prefix +(?P<ospf3_prefix>\S+)$')
# Prefix-options 0x0, Metric 1, Type 1,
p3 = re.compile(
r'^Prefix-options +(?P<ospf3_prefix_options>\S+),'
r' Metric +(?P<metric>\d+), +Type +(?P<type_value>\d+),$')
# Aging timer 00:36:27
p4 = re.compile(r'^Aging +timer +(?P<aging_timer>(\S+ ){0,1}[\d:]+)$')
# Gen timer 00:49:49
p5 = re.compile(r'^Gen +timer +(?P<generation_timer>\S+)$')
# Installed 00:23:26 ago, expires in 00:36:28, sent 00:23:24 ago
p6 = re.compile(
r'^Installed +(?P<installation_time>(\S+ ){0,1}[\d:]+)'
r' ago, +expires +in +(?P<expiration_time>(\S+ ){0,1}[\d:]+),'
r' sent +(?P<send_time>(\S+ ){0,1}[\d:]+) +ago$')
# Last changed 29w5d 21:04:29 ago, Change count: 1
p7 = re.compile(
r'^Last +changed +(?P<lsa_changed_time>(\S+ ){0,1}[\d:]+)'
r' ago, +Change +count: +(?P<lsa_change_count>\d+)$')
# Last changed 3w0d 17:02:47 ago, Change count: 2, Ours
p8 = re.compile(
r'^Last +changed +(?P<lsa_changed_time>(\S+ ){0,1}[\d:]+)'
r' ago, +Change +count: +(?P<lsa_change_count>\d+), +(?P<database_entry_state>\S+)$'
)
for line in out.splitlines():
line = line.strip()
# Extern 0.0.0.1 10.34.2.250 0x8000178e 1412 0x3c81 28
m = p1.match(line)
if m:
entry_list = ret_dict.setdefault("ospf3-database-information", {})\
.setdefault("ospf3-database", [])
group = m.groupdict()
entry = {}
for group_key, group_value in group.items():
entry_key = group_key.replace('_', '-')
entry[entry_key] = group_value
if entry['lsa-id'][0] == "*":
entry['lsa-id'] = entry['lsa-id'][1:]
entry['our-entry'] = True
entry_list.append(entry)
continue
# Prefix ::/0
m = p2.match(line)
if m:
last_database = ret_dict["ospf3-database-information"][
"ospf3-database"][-1]
group = m.groupdict()
entry = last_database.setdefault("ospf3-external-lsa", {})
entry['ospf3-prefix'] = group['ospf3_prefix']
continue
# Prefix-options 0x0, Metric 1, Type 1,
m = p3.match(line)
if m:
last_database = ret_dict["ospf3-database-information"][
"ospf3-database"][-1]
group = m.groupdict()
entry = last_database.setdefault("ospf3-external-lsa", {})
for group_key, group_value in group.items():
entry_key = group_key.replace('_', '-')
entry[entry_key] = group_value
continue
# Aging timer 00:36:27
m = p4.match(line)
if m:
last_database = ret_dict["ospf3-database-information"][
"ospf3-database"][-1]
last_database.setdefault("ospf-database-extensive",
{}).setdefault("aging-timer", {})
group = m.groupdict()
last_database["ospf-database-extensive"]["aging-timer"][
"#text"] = group['aging_timer']
continue
# Gen timer 00:49:49
m = p5.match(line)
if m:
last_database = ret_dict["ospf3-database-information"][
"ospf3-database"][-1]
last_database.setdefault("ospf-database-extensive", {})\
.setdefault("generation-timer", {})
group = m.groupdict()
last_database["ospf-database-extensive"]["generation-timer"]["#text"]\
= group['generation_timer']
continue
# Installed 00:23:26 ago, expires in 00:36:28, sent 00:23:24 ago
m = p6.match(line)
if m:
last_database = ret_dict["ospf3-database-information"][
"ospf3-database"][-1]
last_database.setdefault("ospf-database-extensive", {})\
.setdefault("expiration-time", {})
last_database.setdefault("ospf-database-extensive", {})\
.setdefault("installation-time", {})
last_database.setdefault("ospf-database-extensive", {})\
.setdefault("send-time", {})
group = m.groupdict()
last_database["ospf-database-extensive"]["expiration-time"]["#text"]\
= group['expiration_time']
last_database["ospf-database-extensive"]["installation-time"]["#text"]\
= group['installation_time']
last_database["ospf-database-extensive"]["send-time"]["#text"]\
= group['send_time']
continue
# Last changed 29w5d 21:04:29 ago, Change count: 1
m = p7.match(line)
if m:
last_database = ret_dict["ospf3-database-information"][
"ospf3-database"][-1]
last_database.setdefault("ospf-database-extensive",
{}).setdefault(
"lsa-changed-time", {})
group = m.groupdict()
last_database["ospf-database-extensive"]["lsa-changed-time"]["#text"]\
= group['lsa_changed_time']
last_database["ospf-database-extensive"]["lsa-change-count"]\
= group['lsa_change_count']
continue
# Last changed 29w5d 21:40:56 ago, Change count: 1, Ours
m = p8.match(line)
if m:
last_database = ret_dict["ospf3-database-information"][
"ospf3-database"][-1]
last_database.setdefault("ospf-database-extensive", {})\
.setdefault("lsa-changed-time", {})
group = m.groupdict()
last_database["ospf-database-extensive"]["lsa-changed-time"]["#text"]\
= group['lsa_changed_time']
last_database["ospf-database-extensive"]["lsa-change-count"]\
= group['lsa_change_count']
last_database["ospf-database-extensive"]["database-entry-state"]\
= group['database_entry_state']
continue
return ret_dict
# ==============================================
# Schema for show ospf3 overview
# ==============================================
class ShowOspf3OverviewSchema(MetaParser):
schema = {
"ospf3-overview-information": {
"ospf-overview": {
"instance-name": str,
"ospf-area-overview": {
"ospf-abr-count": str,
"ospf-area": str,
"ospf-asbr-count": str,
"ospf-nbr-overview": {
"ospf-nbr-up-count": str
},
"ospf-stub-type": str
},
"ospf-lsa-refresh-time": str,
"ospf-route-table-index": str,
Optional("ospf-configured-overload-remaining-time"): str,
"ospf-router-id": str,
Optional("ospf-tilfa-overview"): {
"ospf-tilfa-enabled": str
},
"ospf-topology-overview": {
"ospf-backup-spf-status": str,
"ospf-full-spf-count": str,
"ospf-prefix-export-count": str,
"ospf-spf-delay": str,
"ospf-spf-holddown": str,
"ospf-spf-rapid-runs": str,
"ospf-topology-id": str,
"ospf-topology-name": str
}
}
}
}
# ==============================================
# Parser for show ospf3 overview
# ==============================================
class ShowOspf3Overview(ShowOspf3OverviewSchema):
""" Parser for:
* show ospf3 overview
"""
cli_command = ['show ospf3 overview']
def cli(self, output=None):
if not output:
out = self.device.execute(self.cli_command[0])
else:
out = output
ret_dict = {}
#Instance: master
p1 = re.compile(r'^Instance: +(?P<instance_name>\S+)$')
#Router ID: 10.189.5.252
p2 = re.compile(r'^Router ID: +(?P<ospf_router_id>[\w\.\:\/]+)$')
#Route table index: 0
p3 = re.compile(
r'^Route table index: +(?P<ospf_route_table_index>\d+)$')
#LSA refresh time: 50 minutes
p5 = re.compile(
r'^LSA refresh time: +(?P<ospf_lsa_refresh_time>\d+) minutes$')
#Post Convergence Backup: Disabled
p6 = re.compile(
r'^Post Convergence Backup: +(?P<ospf_tilfa_enabled>\S+)$')
#Area: 0.0.0.8
p7 = re.compile(r'^Area: +(?P<ospf_area>[\w\.\:\/]+)$')
#Stub type: Not Stub
p8 = re.compile(r'^Stub type: +(?P<ospf_stub_type>[\S\s]+)$')
#Area border routers: 0, AS boundary routers: 5
p9 = re.compile(
r'^Area border routers: +(?P<ospf_abr_count>\d+), AS boundary routers: +(?P<ospf_asbr_count>\d+)$'
)
#Up (in full state): 2
p10 = re.compile(
r'^Up \(in full state\): +(?P<ospf_nbr_up_count>\d+)$')
#Topology: default (ID 0)
p11 = re.compile(
r'^Topology: +(?P<ospf_topology_name>\S+) \(ID +(?P<ospf_topology_id>\d+)\)$'
)
#Prefix export count: 1
p12 = re.compile(
r'^Prefix export count: +(?P<ospf_prefix_export_count>\d+)$')
#Full SPF runs: 1934
p13 = re.compile(r'^Full SPF runs: +(?P<ospf_full_spf_count>\d+)$')
#SPF delay: 0.200000 sec, SPF holddown: 2 sec, SPF rapid runs: 3
p14 = re.compile(
r'^SPF delay: +(?P<ospf_spf_delay>[\w\.\:\/]+) sec, SPF holddown: +(?P<ospf_spf_holddown>[\w\.]+) sec, SPF rapid runs: +(?P<ospf_spf_rapid_runs>[\w\.]+)$'
)
#Backup SPF: Not Needed
p15 = re.compile(r'^Backup SPF: +(?P<ospf_backup_spf_status>[\S\s]+)$')
# Configured overload, expires in 14 seconds
p16 = re.compile(
r'^Configured +overload, +expires +in +'
r'(?P<ospf_configured_overload_remaining_time>\d+) +\S+$'
)
for line in out.splitlines():
line = line.strip()
#Instance: master
m = p1.match(line)
if m:
group = m.groupdict()
ospf3_entry_list = ret_dict.setdefault('ospf3-overview-information', {}).\
setdefault('ospf-overview', {})
ospf3_entry_list['instance-name'] = group['instance_name']
continue
#Router ID: 10.189.5.252
m = p2.match(line)
if m:
group = m.groupdict()
ospf3_entry_list['ospf-router-id'] = group['ospf_router_id']
continue
#Route table index: 0
m = p3.match(line)
if m:
group = m.groupdict()
ospf3_entry_list['ospf-route-table-index'] = group[
'ospf_route_table_index']
continue
#LSA refresh time: 50 minute
m = p5.match(line)
if m:
group = m.groupdict()
ospf3_entry_list['ospf-lsa-refresh-time'] = group[
'ospf_lsa_refresh_time']
continue
#Post Convergence Backup: Disabled
m = p6.match(line)
if m:
group = m.groupdict()
ospf3_entry_list['ospf-tilfa-overview'] = {
'ospf-tilfa-enabled': group['ospf_tilfa_enabled']
}
continue
#Area: 0.0.0.8
m = p7.match(line)
if m:
group = m.groupdict()
ospf3_area_entry_dict = ospf3_entry_list.setdefault(
'ospf-area-overview', {})
ospf3_area_entry_dict.update({'ospf-area': group['ospf_area']})
continue
#Stub type: Not Stub
m = p8.match(line)
if m:
group = m.groupdict()
ospf3_area_entry_dict.update(
{'ospf-stub-type': group['ospf_stub_type']})
continue
#Area border routers: 0, AS boundary routers: 5
m = p9.match(line)
if m:
group = m.groupdict()
ospf3_area_entry_dict.update(
{'ospf-abr-count': group['ospf_abr_count']})
ospf3_area_entry_dict.update(
{'ospf-asbr-count': group['ospf_asbr_count']})
continue
#Up (in full state): 2
m = p10.match(line)
if m:
group = m.groupdict()
ospf3_area_entry_dict.setdefault(
'ospf-nbr-overview',
{"ospf-nbr-up-count": group['ospf_nbr_up_count']})
continue
#Topology: default (ID 0)
m = p11.match(line)
if m:
group = m.groupdict()
ospf3_topology_entry_dict = ospf3_entry_list.setdefault(
'ospf-topology-overview', {})
ospf3_topology_entry_dict.update(
{'ospf-topology-name': group['ospf_topology_name']})
ospf3_topology_entry_dict.update(
{'ospf-topology-id': group['ospf_topology_id']})
continue
#Prefix export count: 1
m = p12.match(line)
if m:
group = m.groupdict()
ospf3_topology_entry_dict.update({
'ospf-prefix-export-count':
group['ospf_prefix_export_count']
})
continue
#Full SPF runs: 1934
m = p13.match(line)
if m:
group = m.groupdict()
ospf3_topology_entry_dict.update(
{'ospf-full-spf-count': group['ospf_full_spf_count']})
continue
#SPF delay: 0.200000 sec, SPF holddown: 2 sec, SPF rapid runs: 3
m = p14.match(line)
if m:
group = m.groupdict()
ospf3_topology_entry_dict.update(
{'ospf-spf-delay': group['ospf_spf_delay']})
ospf3_topology_entry_dict.update(
{'ospf-spf-holddown': group['ospf_spf_holddown']})
ospf3_topology_entry_dict.update(
{'ospf-spf-rapid-runs': group['ospf_spf_rapid_runs']})
continue
#Backup SPF: Not Needed
m = p15.match(line)
if m:
group = m.groupdict()
ospf3_topology_entry_dict.update({
'ospf-backup-spf-status':
group['ospf_backup_spf_status']
})
continue
# Configured overload, expires in 14 seconds
m = p16.match(line)
if m:
group = m.groupdict()
ospf3_entry_list["ospf-configured-overload-remaining-time"] = \
group["ospf_configured_overload_remaining_time"]
continue
return ret_dict
class ShowOspf3OverviewExtensive(ShowOspf3Overview):
""" Parser for:
- show ospf3 overview extensive
"""
cli_command = ['show ospf3 overview extensive']
def cli(self, output=None):
if not output:
out = self.device.execute(self.cli_command[0])
else:
out = output
return super().cli(output=out)
# ==============================================
# Parser for show ospf3 database extensive
# ==============================================
class ShowOspf3DatabaseExtensiveSchema(MetaParser):
"""
schema = {
"ospf3-database-information": {
"ospf3-area-header": {
"ospf-area": str
},
"ospf3-database": [
{
"advertising-router": str,
"age": str,
"checksum": str,
"lsa-id": str,
"lsa-length": str,
"lsa-type": str,
"ospf-database-extensive": {
"aging-timer": {
"#text": str
},
"expiration-time": {},
"installation-time": {},
"lsa-change-count": str,
"lsa-changed-time": {},
"send-time": {}
},
"ospf3-intra-area-prefix-lsa" : {
"reference-lsa-type": str,
"reference-lsa-id": str,
"reference-lsa-router-id": str,
"prefix-count": str,
"ospf3-prefix": [],
"ospf3-prefix-options": [],
"ospf3-prefix-metric": []
},
"ospf3-inter-area-prefix-lsa" : {
"reference-lsa-type": str,
"reference-lsa-id": str,
"reference-lsa-router-id": str,
"prefix-count": str,
"ospf3-prefix": [],
"ospf3-prefix-options": [],
"ospf3-prefix-metric": []
},
"ospf3-router-lsa": {
"bits": str,
"ospf3-options": str,
"ospf3-link": [
{
"link-intf-id": str,
"link-metric": str,
"link-type-name": str,
"link-type-value": str,
"nbr-intf-id": str,
"nbr-rtr-id": str
}
],
"ospf3-lsa-topology": {
"ospf-topology-id": str,
"ospf-topology-name": str,
"ospf3-lsa-topology-link": [
{
"ospf-lsa-topology-link-metric": str,
"ospf-lsa-topology-link-node-id": str,
"ospf-lsa-topology-link-state": str
}
]
},
"ospf3-options": str
},
"sequence-number": str
Optional("ospf3-link-lsa"): {
"linklocal-address": str,
"ospf3-options": str,
Optional("ospf3-prefix"): str,
Optional("ospf3-prefix-options"): str,
"prefix-count": str,
"router-priority": str
}
}
],
"ospf3-intf-header": [
{
"ospf-intf": str
}
]
}
}
"""
# Sub Schema ospf3-link
def validate_ospf3_link_list(value):
if not isinstance(value, list):
raise SchemaError("ospf3-link is not a list")
ospf3_link_schema = Schema({
"link-intf-id": str,
"link-metric": str,
"link-type-name": str,
"link-type-value": str,
"nbr-intf-id": str,
"nbr-rtr-id": str,
})
# Validate each dictionary in list
for item in value:
ospf3_link_schema.validate(item)
return value
# Sub Schema ospf3-lsa-topology-link
def validate_ospf3_lsa_topology_link_list(value):
if not isinstance(value, list):
raise SchemaError("ospf3-lsa-topology-link is not a list")
ospf3_lsa_topology_link_schema = Schema({
"link-type-name":
str,
"ospf-lsa-topology-link-metric":
str,
"ospf-lsa-topology-link-node-id":
str,
"ospf-lsa-topology-link-state":
str,
})
# Validate each dictionary in list
for item in value:
ospf3_lsa_topology_link_schema.validate(item)
return value
# Sub Schema ospf3-database
def validate_ospf3_database_list(value):
if not isinstance(value, list):
raise SchemaError("ospf3-database is not a list")
ospf3_database_schema = Schema({
"advertising-router": str,
"age": str,
"checksum": str,
"lsa-id": str,
Optional("our-entry"): bool,
"lsa-length": str,
"lsa-type": str,
"sequence-number": str,
Optional("ospf-database-extensive"): {
"aging-timer": {
"#text": str
},
"expiration-time": {
"#text": str
},
Optional("ospf3-intra-area-prefix-lsa"): {
Optional("prefix-count"): str,
Optional("reference-lsa-id"): str,
Optional("reference-lsa-router-id"): str,
Optional("reference-lsa-type"): str,
"ospf3-prefix": list,
"ospf3-prefix-metric": list,
"ospf3-prefix-options": list,
},
Optional("ospf3-inter-area-prefix-lsa"): {
Optional("prefix-count"): str,
Optional("reference-lsa-id"): str,
Optional("reference-lsa-router-id"): str,
Optional("reference-lsa-type"): str,
"ospf3-prefix": list,
"ospf3-prefix-metric": list,
"ospf3-prefix-options": list,
},
"installation-time": {
"#text": str
},
Optional("generation-timer"): {
"#text": str
},
"lsa-change-count": str,
"lsa-changed-time": {
"#text": str
},
Optional("send-time"): {
"#text": str
},
Optional("database-entry-state"): str,
},
Optional("ospf3-intra-area-prefix-lsa"): {
Optional("prefix-count"): str,
Optional("reference-lsa-id"): str,
Optional("reference-lsa-router-id"): str,
Optional("reference-lsa-type"): str,
"ospf3-prefix": list,
"ospf3-prefix-metric": list,
"ospf3-prefix-options": list,
},
Optional("ospf3-inter-area-prefix-lsa"): {
Optional("prefix-count"): str,
Optional("reference-lsa-id"): str,
Optional("reference-lsa-router-id"): str,
Optional("reference-lsa-type"): str,
"ospf3-prefix": list,
"ospf3-prefix-metric": list,
"ospf3-prefix-options": list,
},
Optional("ospf3-router-lsa"): {
Optional("bits"):
str,
Optional("ospf3-options"):
str,
Optional("ospf3-link"):
Use(ShowOspf3DatabaseExtensive.validate_ospf3_link_list),
Optional("ospf3-lsa-topology"): {
"ospf-topology-id":
str,
"ospf-topology-name":
str,
"ospf3-lsa-topology-link":
Use(ShowOspf3DatabaseExtensive.
validate_ospf3_lsa_topology_link_list),
},
},
Optional("ospf3-link-lsa"): {
"linklocal-address": str,
"ospf3-options": str,
Optional("ospf3-prefix"): str,
Optional("ospf3-prefix-options"): str,
"prefix-count": str,
"router-priority": str,
},
Optional("ospf3-external-lsa"): {
"metric": str,
"ospf3-prefix": str,
"ospf3-prefix-options": str,
"type-value": str,
},
})
# Validate each dictionary in list
for item in value:
ospf3_database_schema.validate(item)
return value
# Sub Schema ospf3-intf-header
def validate_ospf3_intf_header_list(value):
if not isinstance(value, list):
raise SchemaError("ospf3-intf-header is not a list")
ospf3_link_schema = Schema({"ospf-area": str, "ospf-intf": str})
# Validate each dictionary in list
for item in value:
ospf3_link_schema.validate(item)
return value
schema = {
"ospf3-database-information": {
Optional("ospf3-area-header"): {
"ospf-area": str
},
"ospf3-database": Use(validate_ospf3_database_list),
Optional("ospf3-intf-header"): Use(validate_ospf3_intf_header_list),
}
}
class ShowOspf3DatabaseExtensive(ShowOspf3DatabaseExtensiveSchema):
""" Parser for:
* show ospf3 database extensive
* show ospf3 database advertising-router {address} extensive
* show ospf3 database {lsa_type} advertising-router {address} extensive
"""
cli_command = [
"show ospf3 database extensive",
"show ospf3 database advertising-router {address} extensive",
"show ospf3 database {lsa_type} advertising-router {address} extensive"]
def cli(self, lsa_type=None, address=None, output=None):
if not output:
if lsa_type and address:
out = self.device.execute(self.cli_command[2].format(
address=address,
lsa_type=lsa_type))
elif address:
out = self.device.execute(self.cli_command[1].format(address=address))
else:
out = self.device.execute(self.cli_command[0])
else:
out = output
ret_dict = {}
self.state = None
# OSPF3 database, Area 0.0.0.8
p1 = re.compile(r"^OSPF3( +)database,( +)Area( +)"
r"(?P<ospf_area>[\d\.]+)$")
# Type ID Adv Rtr Seq Age Cksum Len
# Router 0.0.0.0 10.34.2.250 0x800018ed 2407 0xaf2d 56
p2 = re.compile(
r"^(?P<lsa_type>\S+) +(?P<lsa_id>(\*{0,1})[\d\.]+) +(?P<advertising_router>[0-9][\d\.]+)"
r" +(?P<sequence_number>\S+) +(?P<age>\d+) +(?P<checksum>\S+) +(?P<lsa_length>\d+)$"
)
# bits 0x2, Options 0x33
p3 = re.compile(
r"^bits +(?P<bits>\S+), +Options +(?P<ospf3_options>\S+)$")
# Type: PointToPoint, Node ID: 10.169.14.240, Metric: 100, Bidirectional
p4 = re.compile(
r"^Type: +(?P<link_type_name>\S+), +Node +ID: +(?P<ospf_lsa_topology_link_node_id>[\d\.]+)"
r", +Metric: +(?P<ospf_lsa_topology_link_metric>\d+), +(?P<ospf_lsa_topology_link_state>\S+)$"
)
# Aging timer 00:18:16
p5 = re.compile(r"^Aging timer +(?P<aging_timer>(\S+ ){0,1}[\d\:]+)$")
# Installed 00:10:20 ago, expires in 00:49:31, sent 00:10:18 ago
p6 = re.compile(
r"^Installed +(?P<installation_time>(\S+ ){0,1}[\d\:]+) +ago, +expires +in +"
r"(?P<expiration_time>(\S+ ){0,1}[\d\:]+), +sent +(?P<send_time>(\S+ ){0,1}[\d\:]+) +ago$"
)
# Last changed 2w6d 04:50:31 ago, Change count: 196
p7 = re.compile(
r"^Last changed +(?P<lsa_changed_time>(\S+ ){0,1}[\d\:]+) +ago, +Change +"
r"count: +(?P<lsa_change_count>\d+)$")
# Ref-lsa-type Router, Ref-lsa-id 0.0.0.0, Ref-router-id 10.34.2.250
p8 = re.compile(
r"^Ref-lsa-type +(?P<reference_lsa_type>\S+), +Ref-lsa-id +(?P<reference_lsa_id>[\d\.]+)"
r", +Ref-router-id +(?P<reference_lsa_router_id>[\d\.]+)$")
# Prefix-count 3
p9 = re.compile(r"^Prefix-count +(?P<prefix_count>\d+)$")
# Prefix 2001:db8:b0f8:3ab::/64
p10 = re.compile(r"^Prefix +(?P<ospf3_prefix>\S+)$")
# Prefix-options 0x0, Metric 5
p11 = re.compile(
r"^Prefix-options +(?P<ospf3_prefix_options>\S+), +Metric +(?P<metric>\d+)$"
)
# fe80::250:56ff:fe8d:a96c
p12 = re.compile(r"^(?P<linklocal_address>[\S\:]+)$")
# Gen timer 00:49:49
p13 = re.compile(r"^Gen +timer +(?P<generation_timer>\S+)$")
# OSPF3 Link-Local database, interface ge-0/0/0.0 Area 0.0.0.8
p14 = re.compile(
r"^OSPF3 +Link-Local +database, +interface +(?P<ospf_intf>\S+) +"
r"Area +(?P<ospf_area>\S+)$")
# Type PointToPoint (1), Metric 5
p15 = re.compile(
r"^Type +(?P<link_type_name>\S+) +\((?P<link_type_value>\S+)\), +"
r"Metric +(?P<link_metric>\S+)$")
# Loc-If-Id 2, Nbr-If-Id 2, Nbr-Rtr-Id 10.189.5.253
p16 = re.compile(
r"^Loc-If-Id +(?P<link_intf_id>\S+), +Nbr-If-Id +(?P<nbr_intf_id>\S+)"
r", +Nbr-Rtr-Id +(?P<nbr_rtr_id>\S+)$")
# Options 0x33, Priority 128
p17 = re.compile(
r"^Options +(?P<ospf3_options>\S+), +Priority +(?P<router_priority>\S+)$"
)
# Prefix-options 0x0, Metric 50, Type 1,
p18 = re.compile(
r"^Prefix-options +(?P<ospf3_prefix_options>\S+), +Metric +(?P<metric>\S+)"
r", +Type +(?P<type_value>\S+),$")
# Last changed 29w5d 21:40:56 ago, Change count: 1, Ours
p19 = re.compile(
r"^Last +changed +(?P<lsa_changed_time>(\S+ ){0,1}[\d\:]+) +ago, +"
r"Change +count: +(?P<lsa_change_count>\d+), +(?P<database_entry_state>\S+)$"
)
# Installed 00:41:50 ago, expires in 00:18:10
p20 = re.compile(
r"^Installed +(?P<installation_time>(\S+ ){0,1}[\d\:]+) +ago, +expires +"
r"in +(?P<expiration_time>(\S+ ){0,1}[\d\:]+)$")
# Prefix 2001:db8:eb18:6337::/64 Prefix-options 0x0
p21 = re.compile(
r"^Prefix +(?P<ospf3_prefix>\S+) +Prefix-options +(?P<ospf3_prefix_options>\S+)$"
)
for line in out.splitlines():
line = line.strip()
# OSPF3 database, Area 0.0.0.8
m = p1.match(line)
if m:
ospf_area = (ret_dict.setdefault(
"ospf3-database-information",
{}).setdefault("ospf3-area-header",
{}).setdefault("ospf-area", None))
group = m.groupdict()
ret_dict["ospf3-database-information"]["ospf3-area-header"][
"ospf-area"] = group["ospf_area"]
continue
# Router 0.0.0.0 10.34.2.250 0x800018ed 2504 0xaf2d 56
m = p2.match(line)
if m:
entry_list = ret_dict.setdefault("ospf3-database-information",
{}).setdefault(
"ospf3-database", [])
group = m.groupdict()
entry = {}
for group_key, group_value in group.items():
entry_key = group_key.replace("_", "-")
entry[entry_key] = group_value
if entry["lsa-id"][0] == "*":
entry["lsa-id"] = entry["lsa-id"][1:]
entry["our-entry"] = True
self.state = group["lsa_type"]
entry_list.append(entry)
continue
# bits 0x2, Options 0x33
m = p3.match(line)
if m:
last_database = ret_dict["ospf3-database-information"][
"ospf3-database"][-1]
entry = last_database.setdefault("ospf3-router-lsa", {})
group = m.groupdict()
for group_key, group_value in group.items():
entry_key = group_key.replace("_", "-")
entry[entry_key] = group_value
continue
# Type: PointToPoint, Node ID: 10.169.14.240, Metric: 100, Bidirectional
m = p4.match(line)
if m:
last_database = ret_dict["ospf3-database-information"][
"ospf3-database"][-1]
topology = last_database.setdefault("ospf3-router-lsa",
{}).setdefault(
"ospf3-lsa-topology",
{})
topology["ospf-topology-id"] = "0"
topology["ospf-topology-name"] = "default"
link_list = topology.setdefault("ospf3-lsa-topology-link", [])
group = m.groupdict()
entry = {}
for group_key, group_value in group.items():
entry_key = group_key.replace("_", "-")
entry[entry_key] = group_value
link_list.append(entry)
continue
# Aging timer 00:18:16
m = p5.match(line)
if m:
last_database = ret_dict["ospf3-database-information"][
"ospf3-database"][-1]
last_database.setdefault("ospf-database-extensive",
{}).setdefault("aging-timer", {})
group = m.groupdict()
last_database["ospf-database-extensive"]["aging-timer"][
"#text"] = group["aging_timer"]
continue
# Installed 00:10:20 ago, expires in 00:49:31, sent 00:10:18 ago
m = p6.match(line)
if m:
last_entry = ret_dict["ospf3-database-information"][
"ospf3-database"][-1]
last_entry.setdefault("ospf-database-extensive",
{}).setdefault("expiration-time", {})
last_entry.setdefault("ospf-database-extensive",
{}).setdefault("installation-time", {})
last_entry.setdefault("ospf-database-extensive",
{}).setdefault("send-time", {})
group = m.groupdict()
last_entry["ospf-database-extensive"]["expiration-time"][
"#text"] = group["expiration_time"]
last_entry["ospf-database-extensive"]["installation-time"][
"#text"] = group["installation_time"]
last_entry["ospf-database-extensive"]["send-time"][
"#text"] = group["send_time"]
continue
# Last changed 2w6d 04:50:31 ago, Change count: 196
m = p7.match(line) # lsa_changed_time , lsa_changed_count
if m:
last_entry = ret_dict["ospf3-database-information"][
"ospf3-database"][-1]
last_entry.setdefault("ospf-database-extensive",
{}).setdefault("lsa-changed-time", {})
group = m.groupdict()
last_entry["ospf-database-extensive"]["lsa-changed-time"]["#text"]\
= group["lsa_changed_time"]
last_entry["ospf-database-extensive"][
"lsa-change-count"] = group["lsa_change_count"]
continue
# Ref-lsa-type Router, Ref-lsa-id 0.0.0.0, Ref-router-id 10.34.2.250
m = p8.match(line)
if m:
last_entry = ret_dict["ospf3-database-information"][
"ospf3-database"][-1]
if self.state == "IntraArPfx":
entry = last_entry.setdefault("ospf3-intra-area-prefix-lsa", {})
elif self.state == "InterArPfx":
entry = last_entry.setdefault("ospf3-inter-area-prefix-lsa", {})
group = m.groupdict()
for group_key, group_value in group.items():
entry_key = group_key.replace("_", "-")
entry[entry_key] = group_value
continue
# Prefix-count 3
m = p9.match(line)
if m:
last_entry = ret_dict["ospf3-database-information"][
"ospf3-database"][-1]
if self.state == "IntraArPfx":
entry = last_entry.setdefault("ospf3-intra-area-prefix-lsa", {})
elif self.state == "InterArPfx":
entry = last_entry.setdefault("ospf3-inter-area-prefix-lsa", {})
elif self.state == "Link":
entry = last_entry.setdefault("ospf3-link-lsa", {})
group = m.groupdict()
for group_key, group_value in group.items():
entry_key = group_key.replace("_", "-")
entry[entry_key] = group_value
continue
# Prefix 2001:db8:b0f8:3ab::/64
m = p10.match(line)
if m:
last_database = ret_dict["ospf3-database-information"][
"ospf3-database"][-1]
group = m.groupdict()
if self.state == "IntraArPfx":
entry_list = last_database.setdefault(
"ospf3-intra-area-prefix-lsa",
{}).setdefault("ospf3-prefix", [])
entry_list.append(group["ospf3_prefix"])
elif self.state == "InterArPfx":
entry_list = last_database.setdefault(
"ospf3-inter-area-prefix-lsa", {}
).setdefault("ospf3-prefix", [])
entry_list.append(group["ospf3_prefix"])
elif self.state == "Extern":
entry = last_database.setdefault("ospf3-external-lsa", {})
entry["ospf3-prefix"] = group["ospf3_prefix"]
else:
raise "state error"
continue
# Prefix-options 0x0, Metric 5
m = p11.match(line)
if m:
last_database = ret_dict["ospf3-database-information"][
"ospf3-database"][-1]
group = m.groupdict()
if self.state == "IntraArPfx":
entry = last_database.setdefault("ospf3-intra-area-prefix-lsa", {})
elif self.state == "InterArPfx":
entry = last_database.setdefault("ospf3-inter-area-prefix-lsa", {})
entry.setdefault("ospf3-prefix-options", []).append(
group["ospf3_prefix_options"]
)
entry.setdefault("ospf3-prefix-metric", []).append(group["metric"])
continue
# fe80::250:56ff:fe8d:a96c
m = p12.match(line)
if m:
last_database = ret_dict["ospf3-database-information"][
"ospf3-database"][-1]
entry = last_database.setdefault("ospf3-link-lsa", {})
group = m.groupdict()
for group_key, group_value in group.items():
entry_key = group_key.replace("_", "-")
entry[entry_key] = group_value
continue
# Gen timer 00:49:49
m = p13.match(line)
if m:
last_database = ret_dict["ospf3-database-information"][
"ospf3-database"][-1]
last_database.setdefault("ospf-database-extensive", {})\
.setdefault("generation-timer", {})
group = m.groupdict()
last_database["ospf-database-extensive"]["generation-timer"][
"#text"] = group["generation_timer"]
continue
# OSPF3 Link-Local database, interface ge-0/0/0.0 Area 0.0.0.8
m = p14.match(line)
if m:
header_list = ret_dict.setdefault("ospf3-database-information",
{}).setdefault(
"ospf3-intf-header", [])
group = m.groupdict()
entry = {}
for group_key, group_value in group.items():
entry_key = group_key.replace("_", "-")
entry[entry_key] = group_value
header_list.append(entry)
continue
# Type PointToPoint (1), Metric 5
m = p15.match(line)
if m:
last_database = ret_dict["ospf3-database-information"][
"ospf3-database"][-1]
ospf3_link_list = last_database.setdefault(
"ospf3-router-lsa", {}).setdefault("ospf3-link", [])
group = m.groupdict()
entry = {}
for group_key, group_value in group.items():
entry_key = group_key.replace("_", "-")
entry[entry_key] = group_value
ospf3_link_list.append(entry)
continue
# Loc-If-Id 2, Nbr-If-Id 2, Nbr-Rtr-Id 10.189.5.253
m = p16.match(line)
if m:
last_database = ret_dict["ospf3-database-information"][
"ospf3-database"][-1]
last_ospf3_link = last_database["ospf3-router-lsa"][
"ospf3-link"][-1]
group = m.groupdict()
entry = last_ospf3_link
for group_key, group_value in group.items():
entry_key = group_key.replace("_", "-")
entry[entry_key] = group_value
continue
# Options 0x33, Priority 128
m = p17.match(line) # ospf3-options
if m:
last_database = ret_dict["ospf3-database-information"][
"ospf3-database"][-1]
group = m.groupdict()
entry = last_database["ospf3-link-lsa"]
for group_key, group_value in group.items():
entry_key = group_key.replace("_", "-")
entry[entry_key] = group_value
continue
# Prefix-options 0x0, Metric 50, Type 1,
m = p18.match(line)
if m:
last_database = ret_dict["ospf3-database-information"][
"ospf3-database"][-1]
group = m.groupdict()
entry = last_database.setdefault("ospf3-external-lsa", {})
for group_key, group_value in group.items():
entry_key = group_key.replace("_", "-")
entry[entry_key] = group_value
continue
# Last changed 29w5d 21:40:56 ago, Change count: 1, Ours
m = p19.match(line) # lsa_changed_time , lsa_changed_count
if m:
last_entry = ret_dict["ospf3-database-information"][
"ospf3-database"][-1]
last_entry.setdefault("ospf-database-extensive",
{}).setdefault("lsa-changed-time", {})
group = m.groupdict() # database_entry_state
last_entry["ospf-database-extensive"]["lsa-changed-time"][
"#text"] = group["lsa_changed_time"]
last_entry["ospf-database-extensive"][
"lsa-change-count"] = group["lsa_change_count"]
last_entry["ospf-database-extensive"][
"database-entry-state"] = group["database_entry_state"]
continue
# Installed 00:41:50 ago, expires in 00:18:10
m = p20.match(line)
if m:
last_entry = ret_dict["ospf3-database-information"][
"ospf3-database"][-1]
last_entry.setdefault("ospf-database-extensive",
{}).setdefault("expiration-time", {})
last_entry.setdefault("ospf-database-extensive",
{}).setdefault("installation-time", {})
group = m.groupdict()
last_entry["ospf-database-extensive"]["expiration-time"][
"#text"] = group["expiration_time"]
last_entry["ospf-database-extensive"]["installation-time"][
"#text"] = group["installation_time"]
continue
# Prefix 2001:db8:eb18:6337::/64 Prefix-options 0x0
m = p21.match(line)
if m:
last_entry = ret_dict["ospf3-database-information"][
"ospf3-database"][-1]
entry = last_entry.setdefault("ospf3-link-lsa", {})
group = m.groupdict()
for group_key, group_value in group.items():
entry_key = group_key.replace("_", "-")
entry[entry_key] = group_value
continue
return ret_dict
class ShowOspf3DatabaseNetworkDetailSchema(MetaParser):
""" Schema for:
* show ospf3 database network detail
"""
""" schema = {
Optional("@xmlns:junos"): str,
"ospf3-database-information": {
Optional("@xmlns"): str,
"ospf3-area-header": {
"ospf-area": str
},
"ospf3-database": [
{
Optional("@heading"): str,
"advertising-router": str,
"age": str,
"checksum": str,
"lsa-id": str,
"lsa-length": str,
"lsa-type": str,
"ospf3-network-lsa": {
"attached-router": "list",
"ospf3-lsa-topology": {
"ospf-topology-id": str,
"ospf-topology-name": str,
"ospf3-lsa-topology-link": [
{
"link-type-name": str,
"ospf-lsa-topology-link-metric": str,
"ospf-lsa-topology-link-node-id": str,
"ospf-lsa-topology-link-state": str
}
]
},
"ospf3-options": str
},
"our-entry": str,
"sequence-number": str
}
]
}
} """
def validate_ospf_lsa_topology_innerlist(value):
if not isinstance(value, list):
raise SchemaError('ospf3 lsa is not a list')
ospf3_lsa_schema = Schema({
"link-type-name": str,
"ospf-lsa-topology-link-metric": str,
"ospf-lsa-topology-link-node-id": str,
"ospf-lsa-topology-link-state": str
})
for item in value:
ospf3_lsa_schema.validate(item)
return value
def validate_ospf3_database_topology_list(value):
if not isinstance(value, list):
raise SchemaError('ospf-database is not a list')
ospf3_database_schema = Schema({
Optional("@heading"): str,
"advertising-router": str,
"age": str,
"checksum": str,
"lsa-id": str,
"lsa-length": str,
"lsa-type": str,
"ospf3-network-lsa": {
"attached-router": list,
"ospf3-lsa-topology": {
Optional("ospf-topology-id"):
str,
Optional("ospf-topology-name"):
str,
"ospf3-lsa-topology-link":
Use(ShowOspf3DatabaseNetworkDetail.
validate_ospf_lsa_topology_innerlist)
},
"ospf3-options": str
},
Optional("our-entry"): bool,
"sequence-number": str
})
for item in value:
ospf3_database_schema.validate(item)
return value
schema = {
Optional("@xmlns:junos"): str,
"ospf3-database-information": {
Optional("@xmlns"): str,
"ospf3-area-header": {
"ospf-area": str
},
"ospf3-database": Use(validate_ospf3_database_topology_list)
}
}
class ShowOspf3DatabaseNetworkDetail(ShowOspf3DatabaseNetworkDetailSchema):
""" Parser for:
* show ospf3 database network detail
"""
cli_command = 'show ospf3 database network detail'
def cli(self, output=None):
if not output:
out = self.device.execute(self.cli_command)
else:
out = output
# OSPF3 database, Area 0.0.0.0
p1 = re.compile(r'^OSPF3 +database, +Area +(?P<ospf_area>\S+)$')
# Network *0.0.0.9 192.168.219.235 0x8000001d 892 0xf99f 36
# Network 0.0.0.3 192.168.219.236 0x80000b14 2142 0x1983 36
p2 = re.compile(r'^(?P<lsa_type>\S+) *(?P<our_entry>\*)?'
r'(?P<lsa_id>[\d\.]+) +(?P<advertising_router>\S+) '
r'+(?P<sequence_number>\S+) +(?P<age>\S+) '
r'+(?P<checksum>\S+) +(?P<lsa_length>\S+)$')
# Options 0x33
p3 = re.compile(r'^Options +(?P<ospf3_options>\S+)$')
# Attached router 192.168.219.235
p4 = re.compile(r'^Attached router +(?P<attached_router>\S+)$')
# Type: Transit, Node ID: 192.168.219.236, Metric: 0, Bidirectional
p6 = re.compile(
r'^Type: +(?P<link_type_name>\S+)+, '
r'+Node +ID: +(?P<ospf_lsa_topology_link_node_id>\S+), '
r'+Metric: +(?P<ospf_lsa_topology_link_metric>\d+), '
r'+(?P<ospf_lsa_topology_link_state>\S+)$')
ret_dict = {}
for line in out.splitlines():
line = line.strip()
# OSPF3 database, Area 0.0.0.0
m = p1.match(line)
if m:
ospf_database_information_entry = ret_dict.setdefault(
"ospf3-database-information", {})
ospf3_database_list = ospf_database_information_entry.setdefault(
"ospf3-database", [])
group = m.groupdict()
entry_dict = {}
entry_dict["ospf-area"] = group["ospf_area"]
ospf_database_information_entry[
"ospf3-area-header"] = entry_dict
continue
# Network *10.69.197.1 192.168.219.235 0x80000026 1730 0x22 0x1b56 36
# Network 0.0.0.3 192.168.219.236 0x80000b14 2142 0x1983 36
m = p2.match(line)
if m:
ospf3_database_dict = {}
attached_router_list = []
ospf3_network_lsa = {}
ospf3_lsa_topology = {}
ospf3_lsa_topology_link = []
ospf3_network_lsa["attached-router"] = attached_router_list
ospf3_lsa_topology[
"ospf3-lsa-topology-link"] = ospf3_lsa_topology_link
ospf3_network_lsa["ospf3-lsa-topology"] = ospf3_lsa_topology
ospf3_database_dict["ospf3-network-lsa"] = ospf3_network_lsa
ospf3_database_list.append(ospf3_database_dict)
group = m.groupdict()
for group_key, group_value in group.items():
if (group_key == "our_entry"):
if (group_value == '*'):
ospf3_database_dict['our-entry'] = True
else:
entry_key = group_key.replace('_', '-')
ospf3_database_dict[entry_key] = group_value
continue
# Options 0x33
m = p3.match(line)
if m:
group = m.groupdict()
ospf3_network_lsa["ospf3-options"] = group["ospf3_options"]
continue
# attached router 192.168.219.235
m = p4.match(line)
if m:
group = m.groupdict()
attached_router_list.append(group["attached_router"])
continue
# Type: Transit, Node ID: 192.168.219.236, Metric: 0, Bidirectional
m = p6.match(line)
if m:
group = m.groupdict()
entry_dict = {}
for group_key, group_value in group.items():
entry_key = group_key.replace('_', '-')
entry_dict[entry_key] = group_value
ospf3_lsa_topology_link.append(entry_dict)
continue
return ret_dict
class ShowOspf3DatabaseLinkAdvertisingRouterSchema(MetaParser):
""" Schema for:
* show ospf3 database link advertising-router {ipaddress} detail
"""
""" schema = {
Optional("@xmlns:junos"): str,
"ospf3-database-information": {
Optional("@xmlns"): str,
"ospf3-database": [
{
Optional("@heading"): str,
"advertising-router": str,
"age": str,
"checksum": str,
"lsa-id": str,
"lsa-length": str,
"lsa-type": str,
"ospf3-link-lsa": {
"linklocal-address": str,
"ospf3-options": str,
"ospf3-prefix": str,
"ospf3-prefix-options": str,
"prefix-count": str,
"router-priority": str
},
"our-entry": str,
"sequence-number": str
}
],
"ospf3-intf-header": [
{
"ospf-area": str,
"ospf-intf": str
}
]
}
} """
def validate_ospf3_intf_list(value):
if not isinstance(value, list):
raise SchemaError('ospf3 intf is not a list')
ospf3_intf_schema = Schema({"ospf-area": str, "ospf-intf": str})
for item in value:
ospf3_intf_schema.validate(item)
return value
def validate_ospf3_database_list(value):
if not isinstance(value, list):
raise SchemaError('ospf-database is not a list')
ospf3_database_schema = Schema({
Optional("@heading"): str,
"advertising-router": str,
"age": str,
"checksum": str,
"lsa-id": str,
"lsa-length": str,
"lsa-type": str,
"ospf3-link-lsa": {
"linklocal-address": str,
"ospf3-options": str,
Optional("ospf3-prefix"): str,
Optional("ospf3-prefix-options"): str,
"prefix-count": str,
"router-priority": str
},
Optional("our-entry"): bool,
"sequence-number": str
})
for item in value:
ospf3_database_schema.validate(item)
return value
schema = {
Optional("@xmlns:junos"): str,
"ospf3-database-information": {
Optional("@xmlns"): str,
"ospf3-database": Use(validate_ospf3_database_list),
"ospf3-intf-header": Use(validate_ospf3_intf_list)
}
}
class ShowOspf3DatabaseLinkAdvertisingRouter(
ShowOspf3DatabaseLinkAdvertisingRouterSchema):
""" Parser for:
* show ospf3 database link advertising-router {ipaddress} detail
"""
cli_command = 'show ospf3 database link advertising-router {ipaddress} detail'
def cli(self, ipaddress=None, output=None):
if not output:
if ipaddress:
cmd = self.cli_command.format(ipaddress=ipaddress)
else:
cmd = self.cli_command
out = self.device.execute(cmd)
else:
out = output
# Type ID Adv Rtr Seq Age Opt Cksum Len
p0 = re.compile(r'^(?P<heading>Type \s+ID[\s\S]+)$')
# OSPF3 Link-Local database, interface ge-0/0/2.0 Area 0.0.0.0
p1 = re.compile(r'^OSPF3 +Link-Local +database, +interface '
r'+(?P<ospf_intf>\S+) +Area +(?P<ospf_area>\S+)$')
# Link *0.0.0.9 192.168.219.235 0x80000b10 1379 0xd3b0 56
p2 = re.compile(r'^(?P<lsa_type>\S+) *(?P<our_entry>\*)?'
r'(?P<lsa_id>[\d\.]+) +(?P<advertising_router>\S+) '
r'+(?P<sequence_number>\S+) +(?P<age>\S+) '
r'+(?P<checksum>\S+) +(?P<lsa_length>\S+)$')
# fe80::20c:2900:3367:243d
p3 = re.compile(r'^(?P<linklocal_address>fe80+[\s\S]+)$')
# Options 0x33, Priority 20
p4 = re.compile(
r'^Options +(?P<ospf3_options>\S+), Priority +(?P<router_priority>\S+)$'
)
# Prefix-count 1
p5 = re.compile(r'^Prefix-count +(?P<prefix_count>\S+)$')
# Prefix 2001:db8:dae9:cf16::/64 Prefix-options 0x0
p6 = re.compile(r'^Prefix +(?P<ospf3_prefix>\S+) '
r'+Prefix-options +(?P<ospf3_prefix_options>\S+)$')
ret_dict = {}
for line in out.splitlines():
line = line.strip()
# Type ID Adv Rtr Seq Age Opt Cksum Len
m = p0.match(line)
if m:
ospf3_database_dict = {}
group = m.groupdict()
ospf3_database_dict["@heading"] = group["heading"]
# OSPF3 Link-Local database, interface ge-0/0/2.0 Area 0.0.0.0
m = p1.match(line)
if m:
ospf_database_information_entry = ret_dict.setdefault(
"ospf3-database-information", {})
ospf3_database_list = ospf_database_information_entry.setdefault(
"ospf3-database", [])
ospf3_intf_header = ospf_database_information_entry.setdefault(
"ospf3-intf-header", [])
group = m.groupdict()
entry_dict = {}
entry_dict["ospf-area"] = group["ospf_area"]
entry_dict["ospf-intf"] = group["ospf_intf"]
ospf3_intf_header.append(entry_dict)
continue
# Link *0.0.0.9 192.168.219.235 0x80000b10 1379 0xd3b0 56
m = p2.match(line)
if m:
ospf3_link_lsa = {}
group = m.groupdict()
for group_key, group_value in group.items():
if (group_key == "our_entry"):
if (group_value == '*'):
ospf3_database_dict['our-entry'] = True
else:
entry_key = group_key.replace('_', '-')
ospf3_database_dict[entry_key] = group_value
ospf3_database_list.append(ospf3_database_dict)
continue
# fe80::20c:2900:3367:243d
m = p3.match(line)
if m:
group = m.groupdict()
for group_key, group_value in group.items():
entry_key = group_key.replace('_', '-')
ospf3_link_lsa[entry_key] = group_value
continue
# Options 0x33, Priority 20
m = p4.match(line)
if m:
group = m.groupdict()
for group_key, group_value in group.items():
entry_key = group_key.replace('_', '-')
ospf3_link_lsa[entry_key] = group_value
continue
# Prefix-count 1
m = p5.match(line)
if m:
group = m.groupdict()
for group_key, group_value in group.items():
entry_key = group_key.replace('_', '-')
ospf3_link_lsa[entry_key] = group_value
ospf3_database_dict["ospf3-link-lsa"] = ospf3_link_lsa
continue
# Prefix 2001:db8:dae9:cf16::/64 Prefix-options 0x0
m = p6.match(line)
if m:
group = m.groupdict()
for group_key, group_value in group.items():
entry_key = group_key.replace('_', '-')
ospf3_link_lsa[entry_key] = group_value
continue
return ret_dict
class ShowOspf3RouteNetworkExtensiveSchema(MetaParser):
'''schema = {
"ospf3-route-information": {
"ospf-topology-route-table": {
"ospf3-route": [
"ospf3-route-entry": {
"address-prefix": str,
"interface-cost": str,
"next-hop-type": str,
"ospf-area": str,
"ospf-next-hop": {
"next-hop-name": {
"interface-name": str
}
},
"route-origin": str,
"route-path-type": str,
"route-priority": str,
"route-type": str
}
]
}
}
}'''
def validate_ospf3_route_list(value):
if not isinstance(value, list):
raise SchemaError('ospf-route is not a list')
ospf3_route_schema = Schema({
"ospf3-route-entry": {
"address-prefix": str,
"interface-cost": str,
"next-hop-type": str,
"ospf-area": str,
Optional("ospf-next-hop"): {
"next-hop-name": {
"interface-name": str
}
},
"route-origin": str,
"route-path-type": str,
"route-priority": str,
"route-type": str
}
})
for item in value:
ospf3_route_schema.validate(item)
return value
schema = {
"ospf3-route-information": {
"ospf-topology-route-table": {
"ospf3-route": Use(validate_ospf3_route_list)
}
}
}
'''
Parser for:
* show ospf3 route network extensive
'''
class ShowOspf3RouteNetworkExtensive(ShowOspf3RouteNetworkExtensiveSchema):
cli_command = 'show ospf3 route network extensive'
def cli(self, output=None):
if not output:
out = self.device.execute(self.cli_command)
else:
out = output
ret_dict = {}
#2001::4/128 Intra Network IP 0
p1 = re.compile(r'^(?P<address_prefix>[\d\:\/]+) '
r'+(?P<route_path_type>\S+) +(?P<route_type>\S+) '
r'+(?P<next_hop_type>\S+) +(?P<interface_cost>\d+)$')
#NH-interface lo0.0
p2 = re.compile(r'^NH-interface +(?P<interface_name>\S+)$')
#Area 0.0.0.0, Origin 10.64.4.4, Priority low
p3 = re.compile(r'^Area +(?P<ospf_area>\S+),+ Origin '
r'+(?P<route_origin>\S+), +Priority '
r'+(?P<route_priority>\S+)$')
for line in out.splitlines():
line = line.strip()
#2001::4/128 Intra Network IP 0
m = p1.match(line)
if m:
group = m.groupdict()
ospf3_topology_route_table = ret_dict.setdefault(
'ospf3-route-information', {}).setdefault('ospf-topology-route-table', {}).\
setdefault('ospf3-route', [])
route_entry_dict = {}
for group_key, group_value in group.items():
entry_key = group_key.replace('_', '-')
route_entry_dict[entry_key] = group_value
#NH-interface lo0.0
m = p2.match(line)
if m:
group = m.groupdict()
next_hop_dict = {'next-hop-name':{'interface-name':group['interface_name']}}
route_entry_dict['ospf-next-hop'] = next_hop_dict
continue
#Area 0.0.0.0, Origin 10.64.4.4, Priority low
m = p3.match(line)
if m:
group = m.groupdict()
for group_key, group_value in group.items():
entry_key = group_key.replace('_', '-')
route_entry_dict[entry_key] = group_value
ospf3_parent_route_dict = {}
ospf3_parent_route_dict['ospf3-route-entry'] = route_entry_dict
ospf3_topology_route_table.append(ospf3_parent_route_dict)
continue
return ret_dict
class ShowOspf3NeighborInstanceAllSchema(MetaParser):
"""schema = {
"ospf3-neighbor-information-all": {
"ospf3-instance-neighbor": {
"ospf3-instance-name": str,
"ospf3-realm-neighbor": {
"ospf3-realm-name": str
"ospf3-neighbor": [
{
"activity-timer": str,
"interface-name": str,
"neighbor-address": str,
"neighbor-id": str,
"neighbor-priority": str,
"ospf-neighbor-state": str
}
]
}
}
}
}"""
def validate_ospf3_neighbor_list(value):
# Pass osp3_neighbor_detail-entry list of dict in value
if not isinstance(value, list):
raise SchemaError('ospf3-table-entry is not a list')
# Create Arp Entry Schema
entry_schema = Schema({
"activity-timer": str,
"interface-name": str,
"neighbor-address": str,
"neighbor-id": str,
"neighbor-priority": str,
"ospf-neighbor-state": str
})
# Validate each dictionary in list
for item in value:
entry_schema.validate(item)
return value
# Main Schema
schema = {
"ospf3-neighbor-information-all": {
"ospf3-instance-neighbor": {
"ospf3-instance-name": str,
"ospf3-realm-neighbor": {
"ospf3-realm-name": str,
"ospf3-neighbor": Use(validate_ospf3_neighbor_list)
}
}
}
}
# ==============================================
# Parser for 'show ospf3 neighbor instance all'
# ==============================================
class ShowOspf3NeighborInstanceAll(ShowOspf3NeighborInstanceAllSchema):
""" Parser for:
* show ospf3 neighbor instance all
"""
cli_command = ['show ospf3 neighbor instance all']
def cli(self, output=None):
if not output:
out = self.device.execute(self.cli_command[0])
else:
out = output
ret_dict = {}
# Instance: master
p0 = re.compile(r'^Instance: +(?P<instance_name>\S+)$')
# Realm: ipv6-unicast
p1 = re.compile(r'^Realm: +(?P<realm_name>\S+)$')
#10.189.5.253 ge-0/0/0.0 Full 128 35
p2 = re.compile(r'^(?P<id>[\d\.]+) +(?P<interface>\S+) '
r'+(?P<state>\S+) +(?P<pri>\S+) +(?P<dead>\d+)$')
#Neighbor-address fe80::250:56ff:fe8d:53c0
p3 = re.compile(r'^Neighbor-address +(?P<neighbor_address>\S+)$')
for line in out.splitlines():
line = line.strip()
# Instance: master
m = p0.match(line)
if m:
group = m.groupdict()
instance = group['instance_name']
ospf3_instance_neighbor = ret_dict.setdefault('ospf3-neighbor-information-all', {}).setdefault(
'ospf3-instance-neighbor', {})
ospf3_instance_neighbor['ospf3-instance-name'] = instance
continue
# Realm: ipv6-unicast
m = p1.match(line)
if m:
group = m.groupdict()
realm_name = group['realm_name']
ospf3_realm_neighbor = ospf3_instance_neighbor.setdefault("ospf3-realm-neighbor", {})
ospf3_realm_neighbor["ospf3-realm-name"] = realm_name
continue
#10.189.5.253 ge-0/0/0.0 Full 128 35
m = p2.match(line)
if m:
group = m.groupdict()
ospf3_entry_list = ospf3_realm_neighbor.setdefault('ospf3-neighbor', [])
ospf3_entry_dict = {}
ospf3_entry_dict['activity-timer'] = group['dead']
ospf3_entry_dict['interface-name'] = group['interface']
ospf3_entry_dict['neighbor-id'] = group['id']
ospf3_entry_dict['neighbor-priority'] = group['pri']
ospf3_entry_dict['ospf-neighbor-state'] = group['state']
continue
#Neighbor-address fe80::250:56ff:fe8d:53c0
m = p3.match(line)
if m:
group = m.groupdict()
neighbor_address = group['neighbor_address']
ospf3_entry_dict['neighbor-address'] = neighbor_address
ospf3_entry_list.append(ospf3_entry_dict)
continue
return ret_dict
```
#### File: parser/junos/show_task.py
```python
import re
# Metaparser
from genie.metaparser import MetaParser
from genie.metaparser.util.schemaengine import (Any,
Optional, Use, Schema)
class ShowTaskReplicationSchema(MetaParser):
""" Schema for:
* show task replication
"""
schema = {
"task-replication-state": {
"task-gres-state": str,
"task-re-mode": str
}
}
class ShowTaskReplication(ShowTaskReplicationSchema):
""" Parser for:
* show task replication
"""
cli_command = 'show task replication'
def cli(self, output=None):
if not output:
out = self.device.execute(self.cli_command)
else:
out = output
# Stateful Replication: Disabled
p1 = re.compile(r'^Stateful +Replication: +(?P<task_gres_state>\S+)$')
# RE mode: Master
p2 = re.compile(r'^RE +mode: +(?P<task_re_mode>\S+)$')
ret_dict = {}
for line in out.splitlines():
line = line.strip()
# Stateful Replication: Disabled
m = p1.match(line)
if m:
group = m.groupdict()
ret_dict.setdefault("task-replication-state",{})\
.setdefault("task-gres-state", group["task_gres_state"])
continue
# RE mode: Master
m = p2.match(line)
if m:
group = m.groupdict()
ret_dict.setdefault("task-replication-state",{})\
.setdefault("task-re-mode", group["task_re_mode"])
continue
return ret_dict
```
#### File: parser/junos/show_ted.py
```python
import re
# metaparser
from genie.metaparser import MetaParser
from pyats.utils.exceptions import SchemaError
from genie.metaparser.util.schemaengine import Schema, Any, Optional, Use
class ShowTedDatabaseExtensiveSchema(MetaParser):
""" Schema for:
- 'show ted database extensive'
- 'show ted database extensive {node_id}'
"""
schema = {
'isis_nodes': int,
'inet_nodes': int,
'node': {
Any(): { # '172.16.1.1'
'type': str,
'age': int,
'link_in': int,
'link_out': int,
Optional('protocol'): {
Any(): { # 'ospf(0.0.0.1)'
'to': {
Any(): { # '172.16.1.1'
'local': {
Any(): { # '172.16.1.1'
'remote': {
Any(): { # '172.16.1.1'
'local_interface_index': int,
'remote_interface_index': int,
Optional('color'): str,
'metric': int,
Optional('static_bw'): str,
Optional('reservable_bw'): str,
Optional('available_bw'): {
Any(): { # priority
'bw': str
}
},
'interface_switching_capability_descriptor': {
Any(): { # from Interface Switching Capability Descriptor(1):
'switching_type': str,
'encoding_type': str,
'maximum_lsp_bw': {
Any(): { # 1, 2, 3, ...
'bw': str
}
}
}
},
Optional('p2p_adj_sid'): {
'sid': {
Any(): {
'address_family': str,
'flags': str,
'weight': int
}
}
}
}
}
}
}
}
},
Optional('prefixes'): {
Any(): { # prefix
'flags': str,
'prefix_sid': {
Any(): { # sid
'flags': str,
'algo': int
}
}
}
},
Optional('spring_capabilities'): {
'srgb_block': {
'start': int,
'range': int,
'flags': str
}
},
Optional('spring_algorithms'): list
}
}
}
}
}
class ShowTedDatabaseExtensive(ShowTedDatabaseExtensiveSchema):
""" Parser for:
- 'show ted database extensive'
- 'show ted database extensive {node_id}'
"""
cli_command = [
'show ted database extensive',
'show ted database extensive {node_id}'
]
def cli(self, node_id=None, output=None):
if output is None:
if node_id:
cmd = self.cli_command[1].format(node_id=node_id)
else:
cmd = self.cli_command[0]
out = self.device.execute(cmd)
else:
out = output
# init vars
ret_dict = {}
# TED database: 0 ISIS nodes 0 INET nodes
p1 = re.compile(r'^TED +database: +(?P<isis_nodes>\d+) +ISIS +nodes +(?P<inet_nodes>\d+) +INET +nodes$')
# NodeID: 172.16.1.1
p2 = re.compile(r'^NodeID: +(?P<node_id>\S+)$')
# Type: Rtr, Age: 1000 secs, LinkIn: 0, LinkOut: 0
p3 = re.compile(r'^Type: +(?P<type>[\w-]+), +Age: +(?P<age>\d+) +secs, +LinkIn: '
r'+(?P<link_in>\d+), +LinkOut: +(?P<link_out>\d+)$')
# Protocol: OSPF(0.0.0.1)
p4 = re.compile(r'^Protocol: +(?P<protocol>[\w().]+)$')
# To: 172.16.1.1, Local: 10.16.0.1, Remote: 10.16.0.2
p5 = re.compile(r'^To: +(?P<to>\S+), +Local: +(?P<local>\S+), +Remote: +(?P<remote>\S+)$')
# Local interface index: 0, Remote interface index: 0
p6 = re.compile(r'^Local +interface +index: +(?P<local_interface_index>\d+), +'
r'Remote +interface +index: +(?P<remote_interface_index>\d+)$')
# Color: 0 blue
p7 = re.compile(r'^Color: +(?P<color>[\w<> ]+)$')
# Metric: 0
p8 = re.compile(r'^Metric: +(?P<metric>\d+)$')
# Static BW: 100Mbps
p9 = re.compile(r'^Static +BW: +(?P<static_bw>\w+)$')
# Reservable BW: 100bps
p10 = re.compile(r'^Reservable +BW: +(?P<reservable_bw>\w+)$')
# [0] 0bps
# [0] 0bps [1] 0bps
# [0] 0bps [1] 0bps [2] 0bps
# [0] 0bps [1] 0bps [2] 0bps [3] 0bps
p11 = re.compile(r'\[(?P<priority>\d+)\] +(?P<bw>\w+)')
# Interface Switching Capability Descriptor(1):
p12 = re.compile(
r'^Interface +Switching +Capability +Descriptor\((?P<descriptor>[\w ]+)\):$')
# Switching type: Packet
p13 = re.compile(r'^Switching +type: +(?P<switching_type>\w+)$')
# Encoding type: Packet
p14 = re.compile(r'^Encoding +type: +(?P<encoding_type>\w+)$')
# IPV4, SID: 12345, Flags: 0x00, Weight: 0
p15 = re.compile(r'^(?P<address_family>\w+), +SID: +(?P<sid>\d+), +Flags: +'
r'(?P<flags>\w+), +Weight: +(?P<weight>\d+)$')
# 172.16.1.1/32
p16 = re.compile(r'^(?P<prefix>\S+/\d+)$')
# Flags: 0x60
p17 = re.compile(r'^Flags: +(?P<flags>\w+)$')
# SID: 1234, Flags: 0x00, Algo: 0
p18 = re.compile(
r'^SID: +(?P<sid>\d+), +Flags: +(?P<flags>\w+), +Algo: +(?P<algo>\d+)$')
# SRGB block [Start: 12345, Range: 1234, Flags: 0x00]
p19 = re.compile(r'^SRGB +block +\[Start: +(?P<start>\d+), +Range: +'
r'(?P<range>\d+), +Flags: +(?P<flags>\w+)\]$')
# Algo: 0
p20 = re.compile(r'^Algo: +(?P<algo>\d+)$')
for line in out.splitlines():
line = line.strip()
m = p1.match(line)
if m:
group = m.groupdict()
ret_dict.update({k: int(v) for k, v in group.items()})
continue
m = p2.match(line)
if m:
group = m.groupdict()
node_dict = ret_dict.setdefault(
'node', {}).setdefault(group['node_id'], {})
continue
m = p3.match(line)
if m:
group = m.groupdict()
node_dict.update({'type': group['type']})
node_dict.update({'age': int(group['age'])})
node_dict.update({'link_in': int(group['link_in'])})
node_dict.update({'link_out': int(group['link_out'])})
continue
m = p4.match(line)
if m:
group = m.groupdict()
protocol_dict = node_dict.setdefault(
'protocol', {}).setdefault(group['protocol'], {})
continue
m = p5.match(line)
if m:
group = m.groupdict()
remote_dict = (protocol_dict.setdefault('to', {}).setdefault(group['to'], {})
.setdefault('local', {}).setdefault(group['local'], {})
.setdefault('remote', {}).setdefault(group['remote'], {}))
continue
m = p6.match(line)
if m:
group = m.groupdict()
remote_dict.update({k: int(v) for k, v in group.items()})
continue
m = p7.match(line)
if m:
group = m.groupdict()
remote_dict.update({'color': group['color']})
continue
m = p8.match(line)
if m:
group = m.groupdict()
remote_dict.update({'metric': int(group['metric'])})
continue
m = p9.match(line)
if m:
group = m.groupdict()
remote_dict.update({'static_bw': group['static_bw']})
continue
m = p10.match(line)
if m:
group = m.groupdict()
remote_dict.update({'reservable_bw': group['reservable_bw']})
continue
m = p11.findall(line)
if m:
if 'interface_switching_capability_descriptor' in remote_dict:
for k, v in m:
(descriptor_dict.setdefault('maximum_lsp_bw', {})
.setdefault(int(k), {})
.update({'bw': v}))
else:
for k, v in m:
(remote_dict.setdefault('available_bw', {})
.setdefault(int(k), {})
.update({'bw': v}))
continue
m = p12.match(line)
if m:
group = m.groupdict()
descriptor_dict = (remote_dict.setdefault('interface_switching_capability_descriptor', {})
.setdefault(group['descriptor'], {}))
continue
m = p13.match(line)
if m:
group = m.groupdict()
descriptor_dict.update(
{'switching_type': group['switching_type']})
continue
m = p14.match(line)
if m:
group = m.groupdict()
descriptor_dict.update(
{'encoding_type': group['encoding_type']})
continue
m = p15.match(line)
if m:
group = m.groupdict()
p2p_adj_sid_dict = (remote_dict.setdefault('p2p_adj_sid', {})
.setdefault('sid', {})
.setdefault(group['sid'], {}))
p2p_adj_sid_dict.update(
{'address_family': group['address_family']})
p2p_adj_sid_dict.update({'flags': group['flags']})
p2p_adj_sid_dict.update({'weight': int(group['weight'])})
continue
m = p16.match(line)
if m:
group = m.groupdict()
prefix_dict = (protocol_dict.setdefault('prefixes', {})
.setdefault(group['prefix'], {}))
continue
m = p17.match(line)
if m:
group = m.groupdict()
prefix_dict.update({'flags': group['flags']})
continue
m = p18.match(line)
if m:
group = m.groupdict()
prefix_sid_dict = (prefix_dict.setdefault('prefix_sid', {})
.setdefault(int(group['sid']), {}))
prefix_sid_dict.update({'flags': group['flags']})
prefix_sid_dict.update({'algo': int(group['algo'])})
continue
m = p19.match(line)
if m:
group = m.groupdict()
srgb_block_dict = (protocol_dict.setdefault('spring_capabilities', {})
.setdefault('srgb_block', {}))
srgb_block_dict.update({'start': int(group['start'])})
srgb_block_dict.update({'range': int(group['range'])})
srgb_block_dict.update({'flags': group['flags']})
continue
m = p20.match(line)
if m:
group = m.groupdict()
current_algorithms = protocol_dict.get('spring_algorithms', [])
current_algorithms.append(group['algo'])
protocol_dict.update({'spring_algorithms': current_algorithms})
return ret_dict
class ShowTedDatabaseIpAddressSchema(MetaParser):
""" Schema for:
* show ted database {ipaddress}
schema = {
"ted-database-information": {
"ted-database": {
"ted-database-age": str,
"ted-database-id": str,
"ted-database-link-in": str,
"ted-database-link-out": str,
"ted-database-protocol": str,
"ted-database-type": str,
"ted-link": [
{
"ted-link-local-address": str,
"ted-link-local-ifindex": str,
"ted-link-protocol": str,
"ted-link-remote-address": str,
"ted-link-remote-ifindex": str,
"ted-link-to": str
}
]
},
"ted-database-summary": {
"ted-database-inet-count": str,
"ted-database-iso-count": str
}
}
}
"""
# Subschema ted link
def validate_ted_link(val):
''' Validates each value in ted link '''
if not isinstance(val, list):
raise SchemaError('ted link is not a list')
ted_link_schema = Schema({
"ted-link-local-address": str,
"ted-link-local-ifindex": str,
"ted-link-protocol": str,
"ted-link-remote-address": str,
"ted-link-remote-ifindex": str,
"ted-link-to": str
})
for item in val:
ted_link_schema.validate(item)
return val
schema = {
"ted-database-information": {
"ted-database": {
"ted-database-age": str,
"ted-database-id": str,
"ted-database-link-in": str,
"ted-database-link-out": str,
"ted-database-protocol": str,
"ted-database-type": str,
"ted-link": Use(validate_ted_link)
},
"ted-database-summary": {
"ted-database-inet-count": str,
"ted-database-iso-count": str
}
}
}
class ShowTedDatabaseIpAddress(ShowTedDatabaseIpAddressSchema):
""" Parser for:
* 'show ted database {ipaddress}'
"""
cli_command = 'show ted database {ip_address}'
def cli(self, ip_address, output=None):
if not output:
out = self.device.execute(self.cli_command.format(ip_address = ip_address))
else:
out = output
ret_dict = {}
# TED database: 0 ISIS nodes 0 INET nodes
p1 = re.compile(r'^TED +database: +(?P<isis_nodes>\d+) +ISIS +nodes +'
r'(?P<inet_nodes>\d+) +INET +nodes$')
# 10.34.2.250 Rtr 1876 2 2 OSPF(0.0.0.8)
p2 = re.compile(r'^(?P<ted_database_id>\S+) +(?P<ted_database_type>\S+) +'
r'(?P<ted_database_age>\d+) +(?P<ted_database_link_in>\d+) +'
r'(?P<ted_database_link_out>\d+) +(?P<ted_database_protocol>\S+)$')
# To: 10.169.14.240, Local: 10.169.14.158, Remote: 10.169.14.157
p3 = re.compile(r'^To: +(?P<ted_link_to>\S+), +Local: +'
r'(?P<ted_link_local_address>\S+), +Remote: +'
r'(?P<ted_link_remote_address>\S+)$')
# Local interface index: 333, Remote interface index: 0
p4 = re.compile(r'^Local +interface +index: +'
r'(?P<ted_link_local_ifindex>\d+), +Remote +interface +index: +'
r'(?P<ted_link_remote_ifindex>\d+)$')
for line in out.splitlines():
line = line.strip()
# TED database: 0 ISIS nodes 0 INET nodes
m = p1.match(line)
if m:
group = m.groupdict()
ted_link_enrty_list = []
ted_db_info = ret_dict.setdefault('ted-database-information', {})
ted_db_summary = ted_db_info.setdefault("ted-database-summary", {})
ted_db = ted_db_info.setdefault("ted-database", {})
ted_db['ted-link'] = ted_link_enrty_list # Ted link (list)
ted_db_summary['ted-database-iso-count'] = group['isis_nodes']
ted_db_summary['ted-database-inet-count'] = group['inet_nodes']
continue
# 10.34.2.250 Rtr 1876 2 2 OSPF(0.0.0.8)
m = p2.match(line)
if m:
group = m.groupdict()
for group_key, group_value in group.items():
entry_key = group_key.replace('_', '-')
ted_db[entry_key] = group_value
continue
# To: 10.169.14.240, Local: 10.169.14.158, Remote: 10.169.14.157
m = p3.match(line)
if m:
group = m.groupdict()
ted_link_entry_dict = {}
for group_key, group_value in group.items():
entry_key = group_key.replace('_', '-')
ted_link_entry_dict[entry_key] = group_value
continue
# Local interface index: 333, Remote interface index: 0
m = p4.match(line)
if m:
group = m.groupdict()
for group_key, group_value in group.items():
entry_key = group_key.replace('_', '-')
ted_link_entry_dict[entry_key] = group_value
ted_link_entry_dict['ted-link-protocol'] = ted_db['ted-database-protocol']
ted_link_enrty_list.append(ted_link_entry_dict)
continue
return ret_dict
```
#### File: viptela/tests/test_show_reboot_history.py
```python
import unittest
from unittest.mock import Mock
# ATS
from pyats.topology import Device
# Metaparset
from genie.metaparser.util.exceptions import SchemaEmptyParserError, \
SchemaMissingKeyError
# Parser
from genie.libs.parser.viptela.show_reboot_history import ShowRebootHistory
# ============================================
# Parser for the following commands
# * 'show reboot history'
# ============================================
class TestShowRebootHistory(unittest.TestCase):
device = Device(name='aDevice')
maxDiff = None
empty_output = {'execute.return_value': ''}
golden_output = {'execute.return_value': '''
srp_vedge# show reboot history
REBOOT DATE TIME REBOOT REASON
-------------------------------------------------------------------------
2020-06-04T04:54:36+00:00 Initiated by user
2020-06-16T09:19:57+00:00 Initiated by user
2020-06-18T13:28:53+00:00 Initiated by user - activate 99.99.999-4542
'''}
golden_parsed_output = {'reboot_date_time': {
'2020-06-04T04:54:36+00:00': {
'reboot_reason': 'Initiated by user'
},
'2020-06-16T09:19:57+00:00': {
'reboot_reason': 'Initiated by user'
},
'2020-06-18T13:28:53+00:00': {
'reboot_reason': 'Initiated by user - activate 99.99.999-4542'
}
}
}
def test_empty(self):
self.device = Mock(**self.empty_output)
obj = ShowRebootHistory(device=self.device)
with self.assertRaises(SchemaEmptyParserError):
parsed_output = obj.parse()
def test_golden_new(self):
self.device = Mock(**self.golden_output)
obj = ShowRebootHistory(device=self.device)
parsed_output = obj.parse()
self.assertEqual(parsed_output, self.golden_parsed_output)
if __name__ == '__main__':
unittest.main()
``` |
{
"source": "jmedina28/EjerciciosClases",
"score": 4
} |
#### File: EjerciciosClases/clases/Palindromos_Instancias.py
```python
from operator import truediv
import os
import re
class Palindromos:
def __init__(self, atributo):
self.atributo = atributo
def test(self, contenido): #comprueba si es palindromo
a, b = 'áéíóúüñÁÉÍÓÚÜ', 'aeiouunAEIOUU'
self.tilde = str.maketrans(a, b)
contenido = contenido.lower() # Convierto el texto en minúsculas.
contenido = contenido.replace(' ', '') # Quito los espacios.
contenido = contenido.translate(self.tilde) # Elimino las tildes.
self.lista = list(contenido) # Convierto el atributo en una lista.
# Invierto la lista.
self.listaresultado = list(reversed(contenido))
# Comparo el atributo original con el inverso.
if self.lista == self.listaresultado:
return True
else:
return False
def destructor(self): #si el introduido no es palindromo lo printea
self.doct = open("Palindromos.txt", "r")
ultima_linea = self.doct.readlines()[-1]
if Palindromos(self.atributo).test(self.atributo) == False:
print(str(ultima_linea))
self.doct.close()
def ejecutar(self):
self.doct = open("Palindromos.txt", "r")
self.ultima_linea = self.doct.readlines()[-1]
if self.ultima_linea != "#" and Palindromos(self.atributo).test(self.ultima_linea) == True:
print(str(self.ultima_linea))
self.doct.close() #Hasta aquí comprueba si el dato anterior fue palindromo, y si lo fue, lo printea
self.m_atributo = Palindromos(self.atributo).atributo.upper()
self.doct = open("Palindromos.txt", "a")
self.doct.write("\n"+str(self.m_atributo)) #introduce en txt el dato(txt para guardar las variables y recuperarlas al destruirlas)
self.doct.close()
if Palindromos(self.atributo).test(self.atributo) == True:
print(True)
else:
print(False)
Palindromos(self.atributo).destructor()
``` |
{
"source": "jmedinaaws/DS4Ateam60_2021",
"score": 2
} |
#### File: DS4Ateam60_2021/turbiedad-master/app.py
```python
import dash
import dash_html_components as html
import dash_admin_components as dac
import dash_core_components as dcc
import json
import decimal
import pickle
from dash.dependencies import Input, Output
from dash.exceptions import PreventUpdate
import apps.calculos as calc
from apps.predecir import predecir_tab
from apps.analisis import analisis_tab
from apps.app_plots import plot_scatter
# =============================================================================
# Dash App and Flask Server
# =============================================================================
app = dash.Dash(__name__)
# =============================================================================
# Dash Admin Components
# =============================================================================
# Navbar
right_ui = dac.NavbarDropdown(
badge_label="!",
badge_color="danger",
src="https://quantee.ai",
header_text="2 Items",
children=[
dac.NavbarDropdownItem(
children="message 1",
date="today"
),
dac.NavbarDropdownItem(
children="message 2",
date="yesterday"
),
]
)
navbar = dac.Navbar(color = "info",
text="",
id="idNavbar"
# children=right_ui
)
# Sidebar
sidebar = dac.Sidebar(
dac.SidebarMenu(
[
# dac.SidebarHeader(children="Cards"),
# dac.SidebarMenuItem(id='tab_cards', label='Basic cards', icon='box'),
dac.SidebarMenuItem(id='tab_predecir', label='Predecir', icon='id-card'),
dac.SidebarMenuItem(id='tab_analisis', label='Análisis Historico', icon='image'),
# dac.SidebarHeader(children="Boxes"),
# dac.SidebarMenuItem(id='tab_basic_boxes', label='Basic boxes', icon='desktop'),
# dac.SidebarMenuItem(id='tab_value_boxes', label='Value/Info boxes', icon='suitcase')
]
),
style={'background-color': '#4B6587'},
title=' Aquarisc',
skin="dark",
# color="primary",
brand_color="info",
url="http://www.aquariscsat.com/",
src=app.get_asset_url("logo.svg"),
elevation=3,
opacity=0.8
)
# Body
body = dac.Body(
dac.TabItems([
predecir_tab,
analisis_tab,
])
)
# Controlbar
controlbar = dac.Controlbar(
[
html.Br(),
html.P("Slide to change graph "),
dcc.Slider(
id='controlbar-slider',
min=10,
max=50,
step=1,
value=20
)
],
skin = "light"
)
# Footer
footer = dac.Footer(
html.A(html.Img(src=app.get_asset_url("logo_DS4a.jpg")),
href = "https://c1-web.correlation-one.com/ds4a-latam",
# target = "_blank",
),
right_text = "TEAM 60 / 2021"
)
# almacena la infomcion para mantenerla en la session
# dcc.Store inside the app that stores the intermediate value
store= html.Div([
dcc.Store(id='vDataVariable'),
])
# =============================================================================
# App Layout
# =============================================================================
app.layout = dac.Page([navbar, sidebar, body,controlbar, footer, store ])
# =============================================================================
# Callbacks
# =============================================================================
def activate(input_id,
n_predecir, n_analisis):
# Depending on tab which triggered a callback, show/hide contents of app
if input_id == 'tab_predecir' and n_predecir:
return True, False , "Módulo de Predicción"
elif input_id == 'tab_analisis' and n_analisis:
return False, True , "Módulo de Historicos"
else:
return True, False, " " # App init
@app.callback([Output('content_predecir', 'active'),
Output('content_analisis', 'active'),
Output('idNavbar','text')],
[Input('tab_predecir', 'n_clicks'),
Input('tab_analisis', 'n_clicks')]
)
def display_tab(n_predecir, n_analisis):
ctx = dash.callback_context # Callback context to recognize which input has been triggered
# Get id of input which triggered callback
if not ctx.triggered:
raise PreventUpdate
else:
input_id = ctx.triggered[0]['prop_id'].split('.')[0]
return activate(input_id,
n_predecir, n_analisis)
def activatetab(input_id,
n_predecir, n_analisis):
# Depending on tab which triggered a callback, show/hide contents of app
if input_id == 'tab_predecir' and n_predecir:
return True, False
elif input_id == 'tab_analisis' and n_analisis:
return False, True
else:
return True, False # App init
@app.callback([Output('tab_predecir', 'active'),
Output('tab_analisis', 'active')],
[Input('tab_predecir', 'n_clicks'),
Input('tab_analisis', 'n_clicks')]
)
def activate_tab(n_predecir, n_analisis):
ctx = dash.callback_context # Callback context to recognize which input has been triggered
# Get id of input which triggered callback
if not ctx.triggered:
raise PreventUpdate
else:
input_id = ctx.triggered[0]['prop_id'].split('.')[0]
return activatetab(input_id,
n_predecir, n_analisis)
# callback
# Store vDataVariable in json (turbiedad, coagulente)
@app.callback(
# Output('vDataVariable','data'),
[Output('Idturbiedad','value'),
Output('Idcoagulante','value')],
[
Input("pH", "value"),
Input("Temp", "value"),
Input("Conduct", "value"),
Input("OxiRed","value"),
Input("boton","n_clicks"),
],
)
def on_button_click(pH, Temp,Conduct,OxiRed,n_clicks):
if n_clicks == 0:
raise PreventUpdate
vTurbiedad=calc.calcule_turbiedad(pH,Temp,Conduct,OxiRed)
vCoagulante=calc.calcule_coagulante(pH,Temp,Conduct)
return vTurbiedad, vCoagulante
# var = json.dumps([vTurbiedad, vCoagulante])
# return var
# actualiza Turbiedad, Coagulante
''''@app.callback(
Output('Idturbiedad','value'),
Output('Idcoagulante','value'),
Input('vDataVariable','data'))
def on_refresh_predecir(vDataVariable):
data=json.loads(vDataVariable,parse_float=decimal.Decimal)
return round(data[0],4),round(data[1],4) '''
# Update figure on slider change
@app.callback(
Output('box-graph', 'figure'),
[Input('controlbar-slider', 'value')])
def update_box_graph(value):
return plot_scatter(value)
# =============================================================================
# Run app
# =============================================================================
if __name__ == "__main__":
app.run_server(host="0.0.0.0", port="8050", debug=True)
```
#### File: turbiedad-master/apps/calculos.py
```python
import pickle
import tpot
import pandas as pd
# load the model from disk
loaded_model = pickle.load(open('model/model_Other_RF.sav', 'rb'))
def calcule_turbiedad(ph,temp,conduct,OxiRed):
data = {'pH': ph, ## Puede crearse
'conductividad': conduct,
'potOxiReduccion': OxiRed,
'T': temp,
'HR': 121,
'PV': 1.24,
'PA': 92,
'VV': 1.45,
'Rad': 9.2,
'P': 12,
'La Sierra': 0,
'Santander': 1,
'Timbio': 0}
input_data = pd.DataFrame(data, index = [0])
turbiedad = loaded_model.predict(input_data.to_numpy())
return turbiedad[0]
def calcule_turbiedad1(ph,temp,conduct,):
return ph+temp+conduct
def calcule_coagulante(ph,temp,conduct):
return ph+temp+conduct
``` |
{
"source": "jmedinah09/MetPy",
"score": 3
} |
#### File: metpy/calc/indices.py
```python
import numpy as np
from .thermo import mixing_ratio, saturation_vapor_pressure
from .tools import get_layer
from .. import constants as mpconsts
from ..package_tools import Exporter
from ..units import atleast_1d, check_units, concatenate, units
from ..xarray import preprocess_xarray
exporter = Exporter(globals())
@exporter.export
@preprocess_xarray
@check_units('[temperature]', '[pressure]', '[pressure]')
def precipitable_water(dewpt, pressure, bottom=None, top=None):
r"""Calculate precipitable water through the depth of a sounding.
Formula used is:
.. math:: -\frac{1}{\rho_l g} \int\limits_{p_\text{bottom}}^{p_\text{top}} r dp
from [Salby1996]_, p. 28.
Parameters
----------
dewpt : `pint.Quantity`
Atmospheric dewpoint profile
pressure : `pint.Quantity`
Atmospheric pressure profile
bottom: `pint.Quantity`, optional
Bottom of the layer, specified in pressure. Defaults to None (highest pressure).
top: `pint.Quantity`, optional
The top of the layer, specified in pressure. Defaults to None (lowest pressure).
Returns
-------
`pint.Quantity`
The precipitable water in the layer
"""
# Sort pressure and dewpoint to be in decreasing pressure order (increasing height)
sort_inds = np.argsort(pressure)[::-1]
pressure = pressure[sort_inds]
dewpt = dewpt[sort_inds]
if top is None:
top = np.nanmin(pressure) * pressure.units
if bottom is None:
bottom = np.nanmax(pressure) * pressure.units
pres_layer, dewpt_layer = get_layer(pressure, dewpt, bottom=bottom, depth=bottom - top)
w = mixing_ratio(saturation_vapor_pressure(dewpt_layer), pres_layer)
# Since pressure is in decreasing order, pw will be the opposite sign of that expected.
pw = -1. * (np.trapz(w.magnitude, pres_layer.magnitude) * (w.units * pres_layer.units)
/ (mpconsts.g * mpconsts.rho_l))
return pw.to('millimeters')
@exporter.export
@preprocess_xarray
@check_units('[pressure]')
def mean_pressure_weighted(pressure, *args, **kwargs):
r"""Calculate pressure-weighted mean of an arbitrary variable through a layer.
Layer top and bottom specified in height or pressure.
Parameters
----------
pressure : `pint.Quantity`
Atmospheric pressure profile
args : `pint.Quantity`
Parameters for which the pressure-weighted mean is to be calculated.
heights : `pint.Quantity`, optional
Heights from sounding. Standard atmosphere heights assumed (if needed)
if no heights are given.
bottom: `pint.Quantity`, optional
The bottom of the layer in either the provided height coordinate
or in pressure. Don't provide in meters AGL unless the provided
height coordinate is meters AGL. Default is the first observation,
assumed to be the surface.
depth: `pint.Quantity`, optional
The depth of the layer in meters or hPa.
Returns
-------
`pint.Quantity`
u_mean: u-component of layer mean wind.
`pint.Quantity`
v_mean: v-component of layer mean wind.
"""
heights = kwargs.pop('heights', None)
bottom = kwargs.pop('bottom', None)
depth = kwargs.pop('depth', None)
ret = [] # Returned variable means in layer
layer_arg = get_layer(pressure, *args, heights=heights,
bottom=bottom, depth=depth)
layer_p = layer_arg[0]
layer_arg = layer_arg[1:]
# Taking the integral of the weights (pressure) to feed into the weighting
# function. Said integral works out to this function:
pres_int = 0.5 * (layer_p[-1].magnitude**2 - layer_p[0].magnitude**2)
for i, datavar in enumerate(args):
arg_mean = np.trapz(layer_arg[i] * layer_p, x=layer_p) / pres_int
ret.append(arg_mean * datavar.units)
return ret
@exporter.export
@preprocess_xarray
@check_units('[pressure]', '[speed]', '[speed]', '[length]')
def bunkers_storm_motion(pressure, u, v, heights):
r"""Calculate the Bunkers right-mover and left-mover storm motions and sfc-6km mean flow.
Uses the storm motion calculation from [Bunkers2000]_.
Parameters
----------
pressure : `pint.Quantity`
Pressure from sounding
u : `pint.Quantity`
U component of the wind
v : `pint.Quantity`
V component of the wind
heights : `pint.Quantity`
Heights from sounding
Returns
-------
right_mover: `pint.Quantity`
U and v component of Bunkers RM storm motion
left_mover: `pint.Quantity`
U and v component of Bunkers LM storm motion
wind_mean: `pint.Quantity`
U and v component of sfc-6km mean flow
"""
# mean wind from sfc-6km
wind_mean = concatenate(mean_pressure_weighted(pressure, u, v, heights=heights,
depth=6000 * units('meter')))
# mean wind from sfc-500m
wind_500m = concatenate(mean_pressure_weighted(pressure, u, v, heights=heights,
depth=500 * units('meter')))
# mean wind from 5.5-6km
wind_5500m = concatenate(mean_pressure_weighted(pressure, u, v, heights=heights,
depth=500 * units('meter'),
bottom=heights[0] + 5500 * units('meter')))
# Calculate the shear vector from sfc-500m to 5.5-6km
shear = wind_5500m - wind_500m
# Take the cross product of the wind shear and k, and divide by the vector magnitude and
# multiply by the deviaton empirically calculated in Bunkers (2000) (7.5 m/s)
shear_cross = concatenate([shear[1], -shear[0]])
rdev = shear_cross * (7.5 * units('m/s').to(u.units) / np.hypot(*shear))
# Add the deviations to the layer average wind to get the RM motion
right_mover = wind_mean + rdev
# Subtract the deviations to get the LM motion
left_mover = wind_mean - rdev
return right_mover, left_mover, wind_mean
@exporter.export
@preprocess_xarray
@check_units('[pressure]', '[speed]', '[speed]')
def bulk_shear(pressure, u, v, heights=None, bottom=None, depth=None):
r"""Calculate bulk shear through a layer.
Layer top and bottom specified in meters or pressure.
Parameters
----------
pressure : `pint.Quantity`
Atmospheric pressure profile
u : `pint.Quantity`
U-component of wind.
v : `pint.Quantity`
V-component of wind.
height : `pint.Quantity`, optional
Heights from sounding
depth: `pint.Quantity`, optional
The depth of the layer in meters or hPa. Defaults to 100 hPa.
bottom: `pint.Quantity`, optional
The bottom of the layer in height or pressure coordinates.
If using a height, it must be in the same coordinates as the given
heights (i.e., don't use meters AGL unless given heights
are in meters AGL.) Defaults to the highest pressure or lowest height given.
Returns
-------
u_shr: `pint.Quantity`
u-component of layer bulk shear
v_shr: `pint.Quantity`
v-component of layer bulk shear
"""
_, u_layer, v_layer = get_layer(pressure, u, v, heights=heights,
bottom=bottom, depth=depth)
u_shr = u_layer[-1] - u_layer[0]
v_shr = v_layer[-1] - v_layer[0]
return u_shr, v_shr
@exporter.export
@preprocess_xarray
@check_units('[energy] / [mass]', '[speed] * [speed]', '[speed]')
def supercell_composite(mucape, effective_storm_helicity, effective_shear):
r"""Calculate the supercell composite parameter.
The supercell composite parameter is designed to identify
environments favorable for the development of supercells,
and is calculated using the formula developed by
[Thompson2004]_:
.. math:: \text{SCP} = \frac{\text{MUCAPE}}{1000 \text{J/kg}} *
\frac{\text{Effective SRH}}{50 \text{m}^2/\text{s}^2} *
\frac{\text{Effective Shear}}{20 \text{m/s}}
The effective_shear term is set to zero below 10 m/s and
capped at 1 when effective_shear exceeds 20 m/s.
Parameters
----------
mucape : `pint.Quantity`
Most-unstable CAPE
effective_storm_helicity : `pint.Quantity`
Effective-layer storm-relative helicity
effective_shear : `pint.Quantity`
Effective bulk shear
Returns
-------
`pint.Quantity`
supercell composite
"""
effective_shear = np.clip(atleast_1d(effective_shear), None, 20 * units('m/s'))
effective_shear[effective_shear < 10 * units('m/s')] = 0 * units('m/s')
effective_shear = effective_shear / (20 * units('m/s'))
return ((mucape / (1000 * units('J/kg')))
* (effective_storm_helicity / (50 * units('m^2/s^2')))
* effective_shear).to('dimensionless')
@exporter.export
@preprocess_xarray
@check_units('[energy] / [mass]', '[length]', '[speed] * [speed]', '[speed]')
def significant_tornado(sbcape, surface_based_lcl_height, storm_helicity_1km, shear_6km):
r"""Calculate the significant tornado parameter (fixed layer).
The significant tornado parameter is designed to identify
environments favorable for the production of significant
tornadoes contingent upon the development of supercells.
It's calculated according to the formula used on the SPC
mesoanalysis page, updated in [Thompson2004]_:
.. math:: \text{SIGTOR} = \frac{\text{SBCAPE}}{1500 \text{J/kg}} * \frac{(2000 \text{m} -
\text{LCL}_\text{SB})}{1000 \text{m}} *
\frac{SRH_{\text{1km}}}{150 \text{m}^\text{s}/\text{s}^2} *
\frac{\text{Shear}_\text{6km}}{20 \text{m/s}}
The lcl height is set to zero when the lcl is above 2000m and
capped at 1 when below 1000m, and the shr6 term is set to 0
when shr6 is below 12.5 m/s and maxed out at 1.5 when shr6
exceeds 30 m/s.
Parameters
----------
sbcape : `pint.Quantity`
Surface-based CAPE
surface_based_lcl_height : `pint.Quantity`
Surface-based lifted condensation level
storm_helicity_1km : `pint.Quantity`
Surface-1km storm-relative helicity
shear_6km : `pint.Quantity`
Surface-6km bulk shear
Returns
-------
`pint.Quantity`
significant tornado parameter
"""
surface_based_lcl_height = np.clip(atleast_1d(surface_based_lcl_height),
1000 * units.m, 2000 * units.m)
surface_based_lcl_height[surface_based_lcl_height > 2000 * units.m] = 0 * units.m
surface_based_lcl_height = ((2000. * units.m - surface_based_lcl_height)
/ (1000. * units.m))
shear_6km = np.clip(atleast_1d(shear_6km), None, 30 * units('m/s'))
shear_6km[shear_6km < 12.5 * units('m/s')] = 0 * units('m/s')
shear_6km /= 20 * units('m/s')
return ((sbcape / (1500. * units('J/kg')))
* surface_based_lcl_height
* (storm_helicity_1km / (150. * units('m^2/s^2')))
* shear_6km)
@exporter.export
@preprocess_xarray
@check_units('[pressure]', '[speed]', '[speed]', '[length]', '[speed]', '[speed]')
def critical_angle(pressure, u, v, heights, stormu, stormv):
r"""Calculate the critical angle.
The critical angle is the angle between the 10m storm-relative inflow vector
and the 10m-500m shear vector. A critical angle near 90 degrees indicates
that a storm in this environment on the indicated storm motion vector
is likely ingesting purely streamwise vorticity into its updraft, and [Esterheld2008]_
showed that significantly tornadic supercells tend to occur in environments
with critical angles near 90 degrees.
Parameters
----------
pressure : `pint.Quantity`
Pressures from sounding.
u : `pint.Quantity`
U-component of sounding winds.
v : `pint.Quantity`
V-component of sounding winds.
heights : `pint.Quantity`
Heights from sounding.
stormu : `pint.Quantity`
U-component of storm motion.
stormv : `pint.Quantity`
V-component of storm motion.
Returns
-------
`pint.Quantity`
critical angle in degrees
"""
# Convert everything to m/s
u = u.to('m/s')
v = v.to('m/s')
stormu = stormu.to('m/s')
stormv = stormv.to('m/s')
sort_inds = np.argsort(pressure[::-1])
pressure = pressure[sort_inds]
heights = heights[sort_inds]
u = u[sort_inds]
v = v[sort_inds]
# Calculate sfc-500m shear vector
shr5 = bulk_shear(pressure, u, v, heights=heights, depth=500 * units('meter'))
# Make everything relative to the sfc wind orientation
umn = stormu - u[0]
vmn = stormv - v[0]
vshr = np.asarray([shr5[0].magnitude, shr5[1].magnitude])
vsm = np.asarray([umn.magnitude, vmn.magnitude])
angle_c = np.dot(vshr, vsm) / (np.linalg.norm(vshr) * np.linalg.norm(vsm))
critical_angle = np.arccos(angle_c) * units('radian')
return critical_angle.to('degrees')
``` |
{
"source": "JMedinilla/Dareby",
"score": 3
} |
#### File: JMedinilla/Dareby/Workout_Find.py
```python
from bs4 import BeautifulSoup as BS
from urllib import request as URLRequest
urlBase = "https://darebee.com"
urlFilter = "https://darebee.com/workouts.html?start="
def findAllWorkouts():
workoutList = []
loopCount = 0
loopMax = 9999
while loopCount <= loopMax:
tmpFilter = urlFilter + str(loopCount)
workoutDivs = BS(URLRequest.urlopen(tmpFilter),
"html.parser").select(".pull-none")
if len(workoutDivs) < 1:
loopMax = 0
else:
for div in workoutDivs:
wkUrl = div.select_one("a[href]")["href"]
workoutList.append(urlBase + wkUrl)
loopCount += 15
print(len(workoutList))
return workoutList
``` |
{
"source": "jmeekhof/bazel_rules_pmd",
"score": 2
} |
#### File: tests/analysis/tests.bzl
```python
load("@bazel_skylib//lib:unittest.bzl", "analysistest", "asserts")
load("@rules_pmd//pmd:defs.bzl", "pmd")
def _expand_paths(ctx, values):
source_dir = ctx.build_file_path.replace("/BUILD", "")
output_dir = ctx.bin_dir.path
return [
value
.replace("{{source_dir}}", source_dir)
.replace("{{output_dir}}", output_dir)
for value in values
]
# Action full contents test
def _action_full_contents_test_impl(ctx):
env = analysistest.begin(ctx)
actions = analysistest.target_actions(env)
asserts.equals(env, 3, len(actions))
# Action: writing file "srcs.txt"
action_write_file_srcs = actions[0]
action_write_file_srcs_outputs_expected = _expand_paths(env.ctx, [
"{{output_dir}}/{{source_dir}}/srcs_test_target_full.txt",
])
action_write_file_srcs_outputs_actual = [file.path for file in action_write_file_srcs.outputs.to_list()]
asserts.equals(env, action_write_file_srcs_outputs_expected, action_write_file_srcs_outputs_actual)
# Action: writing file "srcs_ignore.txt"
action_write_file_srcs_ignore = actions[1]
action_write_file_srcs_ignore_ouptuts_expected = _expand_paths(env.ctx, [
"{{output_dir}}/{{source_dir}}/srcs_ignore_test_target_full.txt",
])
action_write_file_srcs_ignore_ouptuts_actual = [file.path for file in action_write_file_srcs_ignore.outputs.to_list()]
asserts.equals(env, action_write_file_srcs_ignore_ouptuts_expected, action_write_file_srcs_ignore_ouptuts_actual)
# Action: PMD
action_pmd = actions[2]
action_pmd_arguments_expected = _expand_paths(env.ctx, [
"bazel-out/host/bin/pmd/pmd",
"-filelist",
"{{output_dir}}/{{source_dir}}/srcs_test_target_full.txt",
"-ignorelist",
"{{output_dir}}/{{source_dir}}/srcs_ignore_test_target_full.txt",
"-encoding",
"UTF-8",
"-language",
"java",
"-version",
"1.8",
"-rulesets",
"{{source_dir}}/rulesets.xml",
"-minimumpriority",
"42",
"-format",
"html",
"-reportfile",
"{{output_dir}}/{{source_dir}}/test_target_full_pmd_report.html",
"-failOnViolation",
"false",
"-no-cache",
"-threads",
"42",
])
action_pmd_arguments_actual = action_pmd.argv
action_pmd_inputs_expected = _expand_paths(env.ctx, [
"{{output_dir}}/{{source_dir}}/srcs_test_target_full.txt",
"{{source_dir}}/path A.kt",
"{{source_dir}}/path B.kt",
"{{source_dir}}/path C.kt",
"{{output_dir}}/{{source_dir}}/srcs_ignore_test_target_full.txt",
"{{source_dir}}/path D.kt",
"{{source_dir}}/path E.kt",
"{{source_dir}}/rulesets.xml",
"bazel-out/host/internal/_middlemen/pmd_Spmd-runfiles",
"bazel-out/host/bin/pmd/pmd.jar",
"bazel-out/host/bin/pmd/pmd",
])
action_pmd_inputs_actual = [file.path for file in action_pmd.inputs.to_list()]
action_pmd_outputs_expected = _expand_paths(env.ctx, [
"{{output_dir}}/{{source_dir}}/test_target_full_pmd_report.html",
])
action_pmd_outputs_actual = [file.path for file in action_pmd.outputs.to_list()]
asserts.equals(env, action_pmd_arguments_expected, action_pmd_arguments_actual)
asserts.equals(env, action_pmd_inputs_expected, action_pmd_inputs_actual)
asserts.equals(env, action_pmd_outputs_expected, action_pmd_outputs_actual)
return analysistest.end(env)
action_full_contents_test = analysistest.make(_action_full_contents_test_impl)
def _test_action_full_contents():
pmd(
name = "test_target_full",
srcs = ["path A.kt", "path B.kt", "path C.kt"],
srcs_ignore = ["path D.kt", "path E.kt"],
srcs_language = "java",
srcs_language_version = "1.8",
rulesets = ["rulesets.xml"],
rules_minimum_priority = 42,
report_format = "html",
fail_on_violation = False,
threads_count = 42,
)
action_full_contents_test(
name = "action_full_contents_test",
target_under_test = ":test_target_full",
)
# Action blank contents test
def _action_blank_contents_test_impl(ctx):
env = analysistest.begin(ctx)
actions = analysistest.target_actions(env)
asserts.equals(env, 2, len(actions))
# Action: writing file "srcs.txt"
action_write_file_srcs = actions[0]
action_write_file_srcs_outputs_expected = _expand_paths(env.ctx, [
"{{output_dir}}/{{source_dir}}/srcs_test_target_blank.txt",
])
action_write_file_srcs_outputs_actual = [file.path for file in action_write_file_srcs.outputs.to_list()]
asserts.equals(env, action_write_file_srcs_outputs_expected, action_write_file_srcs_outputs_actual)
# Action: PMD
action_pmd = actions[1]
action_pmd_arguments_expected = _expand_paths(env.ctx, [
"bazel-out/host/bin/pmd/pmd",
"-filelist",
"{{output_dir}}/{{source_dir}}/srcs_test_target_blank.txt",
"-encoding",
"UTF-8",
"-language",
"java",
"-rulesets",
"{{source_dir}}/rulesets.xml",
"-minimumpriority",
"5",
"-format",
"text",
"-reportfile",
"{{output_dir}}/{{source_dir}}/test_target_blank_pmd_report.txt",
"-failOnViolation",
"true",
"-no-cache",
"-threads",
"1",
])
action_pmd_arguments_actual = action_pmd.argv
action_pmd_inputs_expected = _expand_paths(env.ctx, [
"{{output_dir}}/{{source_dir}}/srcs_test_target_blank.txt",
"{{source_dir}}/path A.kt",
"{{source_dir}}/path B.kt",
"{{source_dir}}/path C.kt",
"{{source_dir}}/rulesets.xml",
"bazel-out/host/internal/_middlemen/pmd_Spmd-runfiles",
"bazel-out/host/bin/pmd/pmd.jar",
"bazel-out/host/bin/pmd/pmd",
])
action_pmd_inputs_actual = [file.path for file in action_pmd.inputs.to_list()]
action_pmd_outputs_expected = _expand_paths(env.ctx, [
"{{output_dir}}/{{source_dir}}/test_target_blank_pmd_report.txt",
])
action_pmd_outputs_actual = [file.path for file in action_pmd.outputs.to_list()]
asserts.equals(env, action_pmd_arguments_expected, action_pmd_arguments_actual)
asserts.equals(env, action_pmd_inputs_expected, action_pmd_inputs_actual)
asserts.equals(env, action_pmd_outputs_expected, action_pmd_outputs_actual)
return analysistest.end(env)
action_blank_contents_test = analysistest.make(_action_blank_contents_test_impl)
def _test_action_blank_contents():
pmd(
name = "test_target_blank",
srcs = ["path A.kt", "path B.kt", "path C.kt"],
rulesets = ["rulesets.xml"],
)
action_blank_contents_test(
name = "action_blank_contents_test",
target_under_test = ":test_target_blank",
)
# Suite
def test_suite(name):
_test_action_full_contents()
_test_action_blank_contents()
native.test_suite(
name = name,
tests = [
":action_full_contents_test",
":action_blank_contents_test",
],
)
``` |
{
"source": "jmeel14/yt-databot-discord",
"score": 3
} |
#### File: bot_cmds/cmd_fragments/_time_parse.py
```python
from re import search as re_s
def convert_duration(time_str):
resp_obj = { "H": None, "M": None, "S": None }
re_match = re_s('^PT(\d*H)?(\d*M)?(\d*S)?', time_str)
if re_match:
re_str = re_match.groups()
for grp in re_str:
if grp and grp[-1] in resp_obj:
if "H" in grp:
resp_obj[grp[-1]] = grp[:-1] + ":"
else:
resp_obj[grp[-1]] = grp[:-1]
ret_str = "{0}{1}:{2}".format(
resp_obj["H"] or "",
resp_obj["M"] or "00",
resp_obj["S"] or "00"
)
return ret_str
else:
return None
```
#### File: yt-databot-discord/bot_cmds/cmds_eval.py
```python
from . import cmd_main
import re
from asyncio import sleep
import discord
async def await_coro(coro):
try:
awaited = await coro
return awaited
except Exception as CoroException:
if CoroException.args[0] == "cannot await":
print(CoroException)
return coro
async def cmd_func(cmd_trigger, cmd_str, msg_obj, **kwargs):
try:
eval_client = kwargs["self_client"]
eval_http = kwargs["self_http"]
eval_disc = discord
eval_re = re.compile("^" + cmd_str.split()[0] + "\s(.*)", flags=re.DOTALL)
eval_str = eval_re.search(cmd_str).groups()[0]
eval_run = await await_coro(eval(eval_str))
output_embed = cmd_main.Embed(
title = "Evaluation output",
description = "The code you requested to evaluate outputs the following:",
colour = 0x00BB55
)
output_embed.add_field(
name = "Input",
value = "```py\n" + str(cmd_str) + "\n```",
inline = False
)
output_embed.add_field(
name = "Output",
value = "```{0}```".format(eval_run)
)
except BaseException as EvalError:
output_embed = cmd_main.Embed(
title = "Evaluation output",
description = "An error occurred evaluating your code:",
colour = 0xDD0000
)
output_embed.add_field(
name = "Error",
value ="```{0}```".format(str(EvalError))
)
out_msg = await msg_obj.channel.send(
content = None,
embed = output_embed
)
return {
"output_admin": True,
"output_msg": out_msg,
"trigger_msg": msg_obj
}
async def cmd_func2(cmd_trigger, cmd_str, msg_obj, **kwargs):
try:
exec_client = kwargs["self_client"]
exec_http = kwargs["self_http"]
exec_re = re.compile("^" + cmd_str.split()[0] + "\s(.*)", re.DOTALL)
exec_str = exec_re.search(cmd_str).groups()[0]
exec_run = await await_coro(exec(exec_str))
output_embed = cmd_main.Embed(
title = "Evaluation output",
description = "The code you requested to evaluate outputs the following:",
colour = 0x00BB55
)
output_embed.add_field(
name = "Input",
value = "```py\n" + str(cmd_str) + "\n```",
inline = False
)
output_embed.add_field(
name = "output",
value = "```py\n" + str(exec_run) + "\n```"
)
except BaseException as EvalError:
try:
print(exec_str)
except:
pass
output_embed = cmd_main.Embed(
title = "Evaluation output",
description = "An error occurred evaluating your code:",
colour = 0xDD0000
)
output_embed.add_field(
name = "Error",
value = EvalError.args
)
out_msg = await msg_obj.channel.send(
content = None,
embed = output_embed
)
return {
"output_admin": True,
"output_msg": out_msg,
"trigger_mg": msg_obj
}
cmd_eval = cmd_main.Command(
"Eval",
"eval evaluate evaluation evaluator",
None,
"This command outputs developer-level functions, and is admin-only.",
cmd_func,
True
)
cmd_exec = cmd_main.Command(
"Exec",
"exec execute executor execution",
None,
"This command outputs developer-level functions, and is admin-only.",
cmd_func2,
True
)
```
#### File: yt-databot-discord/bot_config/_cfg_json.py
```python
from json import loads as json_ls
from json import dumps as json_ds
def read_json(json_file):
json_o = open(json_file, 'r')
json_r = json_o.read()
json_o.close()
json_l = json_ls(json_r)
return json_l
def write_json(json_file, json_content):
op_file = open(json_file, 'w')
op_file.write(json_content)
op_file.close()
return True
```
#### File: jmeel14/yt-databot-discord/bot_main.py
```python
import discord
import bot_cmds
import bot_config
import re
import random
import aiohttp
import asyncio
import os
import logging
logging.basicConfig(level=logging.INFO)
import traceback
CMD_LIST = bot_cmds.cmd_main.cmd_list
CLIENT_LOGGER = logging.getLogger('discord')
async def generic_err(prefix, discord_client, msg_obj, cmd_name):
if len(prefix) > 3:
err_str = "".join([
"There was an issue processing your requested command '", cmd_name,"'.",
"\n", "This should not have happened, so please let the developer know using ",
"`@MENTION_BOT suggest h <description of the problem>`."
])
err_embed = discord.Embed(
title = "Invalid Command Error",
description = err_str,
colour = 0xDD0000
)
err_embed.set_footer(text = "Unknown command error")
await msg_obj.channel.send(
content = None,
embed = err_embed
)
class Bot(discord.Client):
def __init__(self, ownerID, guildMeta, bot_token):
super(Bot, self).__init__()
self.owner_guild_id = guildMeta["self_guild"]
self.support_channel_id = guildMeta["self_support"]
self.owner_id = ownerID
self.client = self
self.run(bot_token)
async def on_ready(self):
try:
await self.user.edit(avatar = open('./avatar.png', 'rb').read())
CLIENT_LOGGER.log(20, "Avatar successfully updated to meet latest version on disk.")
except:
pass
try:
await self.change_presence(
activity = discord.Activity(name = " for @mention help", type = discord.ActivityType.watching)
)
CLIENT_LOGGER.log(20, "Successfully set activity status to 'Watching for @mention help'.")
except Exception as StatusError:
print(StatusError)
self.http_session = aiohttp.ClientSession()
CLIENT_LOGGER.log(20, "START-UP: Bot started with ID {0.id} and name {0.name}".format(self.user))
async def on_guild_join(self, gld):
join_str = "JOIN: Bot joined guild with ID {0} and name {1}\nMembers: {2} Humans:{3}"
join_str = join_str.format(
gld.id, gld.name, len(gld.members),
len([mbr for mbr in gld.members if mbr.bot]) / len(gld.members)
)
print(join_str)
async def on_guild_remove(self, gld):
print("Left/removed from guild {0}".format(gld.name))
async def on_message(self, msg_obj):
if not msg_obj.author.bot or msg_obj.author.id == self.owner_id:
is_DM = isinstance(msg_obj.channel, discord.abc.PrivateChannel)
if is_DM:
print_str = {
"announce": "INCOMING MESSAGE | [{0.author.id}] {0.author.name} : {0.content}"
}
print(print_str["announce"].format(msg_obj))
sv_prefix = bot_config.cfg_prefix.check_prefix(msg_obj, is_DM)
msg_cmd = bot_config.cfg_func.check_command(msg_obj, self.user.id, sv_prefix)
if msg_cmd:
cmd_name = msg_cmd.split(" ")[0]
try:
if cmd_name in CMD_LIST:
if CMD_LIST[cmd_name]['admin'] and msg_obj.author.id != self.owner_id:
await generic_err(sv_prefix, self, msg_obj, cmd_name)
return
else:
resp_msg = await CMD_LIST[cmd_name]['func'](
cmd_name, msg_cmd, msg_obj,
msg_cmd_compiled = "**Command response for <@{0}>:** {1}".format(msg_obj.author.id, msg_cmd),
msg_guild_prefix = sv_prefix,
self_client = self.client,
self_http = self.http_session,
self_guild_meta = {
"self_guild": self.get_guild(self.owner_guild_id),
"self_support_chnl": self.get_channel(self.support_channel_id),
"self_author": self.owner_id
}
)
try:
if resp_msg["output_admin"]:
await asyncio.sleep(30)
await resp_msg["output_msg"].delete()
try:
await resp_msg["trigger_msg"].delete()
except:
pass
except:
pass
except:
if msg_obj.channel.id not in [110373943822540800, 468690756899438603, 110374153562886144]:
await generic_err(sv_prefix, self, msg_obj, cmd_name)
if not isinstance(msg_obj.channel, discord.abc.PrivateChannel):
print_str = [
"ERROR | In guild [{0.guild.id}] {0.guild.name}, by user [{0.author.id}] {0.author.name}",
"{0.author.name}: {1}"
]
else:
print_str = [
"ERROR | In DM from [{0.author.id}] {0.author.name}",
"{0.author.name}: {1}"
]
print(print_str[0].format(msg_obj))
print(print_str[1].format(msg_obj, msg_cmd))
traceback.print_exc()
if msg_obj.author.id == self.owner_id:
if msg_obj.content == "bot.shutdown":
await self.client.logout()
await self.http_session.close()
elif msg_obj.content == "bot.restart":
os.system('./bot_start.sh')
await self.client.logout()
DISC_YT_BOT = Bot(
bot_config.cfg_auth.get_data("bot_author", True, extended_prop="bot_meta"),
bot_config.cfg_auth.get_data("bot_guild_meta", True, extended_prop="bot_meta"),
bot_config.cfg_auth.get_data('bot_key', True, extended_prop="bot_meta")
)
``` |
{
"source": "jmehnle/aiohttpproxy",
"score": 2
} |
#### File: lib/aiohttpproxy/server.py
```python
import io
import os, os.path
import logging
import time
import asyncio
import aiohttp
import aiohttp.server
import urllib.parse
import json
from contextlib import suppress
import aiohttpproxy
from aiohttpproxy import cache
class ProxyRequestHandler(aiohttp.server.ServerHttpProtocol):
def __init__(self, cache = None):
super(ProxyRequestHandler, self).__init__()
self.cache = cache
@asyncio.coroutine
def handle_request(self, message, payload):
now = time.time()
url = message.path
url_parsed = urllib.parse.urlparse(url)
logging.info('{0} {1}'.format(message.method, url))
if message.method == 'GET' and url == '/ping':
# Generate pong.
logging.info('Ping, Pong.')
yield from self.send_pong_response(200, message.version)
return
if message.method != 'GET' or url_parsed.scheme.lower() != 'http':
# Only GET method and http: scheme supported.
logging.info('Refusing non-GET/HTTP request: {0}'.format(url))
yield from self.send_response(501, message.version)
return
if self.cache:
try:
cache_entry = self.cache[url]
except KeyError:
cache_entry = None
serve_from_cache = self.cache and bool(cache_entry)
store_in_cache = self.cache and not bool(cache_entry)
if serve_from_cache:
cache_metadata = cache_entry.metadata
cache_file = open(cache_entry.filename, 'rb')
logging.debug('Serving response from cache (filename = {0}, age = {1:.3}s).'.format(
cache_entry.filename, now - cache_entry.mtime))
response = cache_metadata['response']
else:
# Execute request and cache response:
logging.debug('Executing request.')
response = yield from aiohttp.request('GET', url, headers = message.headers)
response_content = response.content
if store_in_cache:
try:
size = int(response.headers['Content-Length'])
except (ValueError, KeyError):
size = None
try:
cache_metadata = { 'response': response }
cache_entry = self.cache.new_entry(url, cache_metadata, size)
cache_file = open(cache_entry.filename, 'wb')
logging.debug('Storing response in cache (filename = {0}).'.format(
cache_entry.filename))
except cache.CacheSizeExceededError:
# Do not cache responses larger than cache size.
store_in_cache = False
logging.debug('Not caching response exceeding overall cache size ({0} > {1}).'.format(
size, self.cache.max_size))
proxy_response = aiohttp.Response(
self.writer, response.status, http_version = response.version)
proxy_response_headers = [
(name, value)
for name, value
in response.headers.items(getall = True)
if name not in ('CONTENT-ENCODING')
]
# Copy response headers, except for Content-Encoding header,
# since unfortunately aiohttp transparently decodes content.
proxy_response.add_headers(*proxy_response_headers)
proxy_response.send_headers()
try:
while True:
if serve_from_cache:
chunk = cache_file.read(io.DEFAULT_BUFFER_SIZE)
else:
chunk = yield from response_content.read(io.DEFAULT_BUFFER_SIZE)
if not chunk:
break
proxy_response.write(chunk)
if store_in_cache:
cache_file.write(chunk)
yield from proxy_response.write_eof()
finally:
if serve_from_cache or store_in_cache:
cache_file.close()
if store_in_cache:
self.cache[url] = cache_entry
@asyncio.coroutine
def send_response(self, status, http_version, headers = None, text = b''):
response = aiohttp.Response(self.writer, status, http_version = http_version)
if isinstance(headers, list):
for name, value in headers:
response.add_header(name, value)
response.add_header('Content-Length', str(len(text)))
response.send_headers()
response.write(text)
yield from response.write_eof()
@asyncio.coroutine
def send_pong_response(self, status, http_version):
response_text = json.dumps(
{
'version': aiohttpproxy.__version__
},
indent = 4
).encode('ascii')
yield from self.send_response(status, http_version, text = response_text)
# vim:sw=4 sts=4
``` |
{
"source": "jmeile/JMMidiBassPedalController",
"score": 2
} |
#### File: JMMidiBassPedalController/src/MidiConnector.py
```python
from __future__ import print_function
import traceback
import sys
import fnmatch
from rtmidi import MidiIn, MidiOut
from rtmidi.midiutil import open_midiport
from MidiProcessor import MidiProcessor
from CustomLogger import CustomLogger, PrettyFormat
import logging
from autologging import logged
import xmlschema
import platform
VIRTUAL_PREFFIX = "Virtual:"
#Creates a logger for this module.
logger = logging.getLogger(CustomLogger.get_module_name())
#Setups the logger with default settings
logger.setup()
#Register the logger with this class
@logged(logger)
class MidiConnector:
"""
Opens the MIDI ports and process the incomming connections
"""
def __init__(self, args, xsd_schema = 'conf/MidiBassPedalController.xsd'):
"""
Initializes the MidiConnector class
Parameters:
* args: command-line arguments
* xsd_schema: path to the xsd schema
"""
self.__log.debug("Initializing MidiConnector")
self._args = args
self._xsd_schema = xsd_schema
self._midi_in = None
self._midi_out = None
self._in_ports = []
self._in_port = 0
self._use_virtual_in = False
self._out_ports = []
self._out_port = 0
self._use_virtual_out = False
self._xml_dict = {}
self.__log.debug("MidiConnector was initialized:\n%s",
PrettyFormat(self.__dict__))
def start(self):
"""
Starts the processing requests
Remarks:
* It will either list the available MIDI ports, run in interative or
silent mode, according to the passed command line options
Returns:
* A status string; either: "Quit", "Reload", "Reboot", or "Shutdown"
"""
self.__log.info("Starting MidiConnector")
status = None
self._get_all_ports()
exit = False
if len(self._in_ports) == 0:
self.__log.info("No MIDI IN ports were found. Please connect your MIDI "
"device and run the script again")
exit = True
if len(self._out_ports) == 0:
self.__log.info("No MIDI OUT ports were found. Please connect your MIDI "
"device and run the script again")
exit = True
if not exit:
if self._args.list:
self.__log.debug("--list switch was passed")
self._list_ports()
else:
self._parse_xml_config()
self._parse_ports()
self._open_ports()
midi_processor = MidiProcessor(
self._xml_dict,
self._midi_in,
self._midi_out,
ignore_sysex = False,
ignore_timing = False,
ignore_active_sense = False,
)
midi_processor.parse_xml()
status = midi_processor.read_midi()
self.__log.info("Exiting")
self._close_ports()
self._free_midi()
self.__log.debug("MidiConnector has been ended")
return status
def _parse_xml_config(self):
"""
Parses the specified xml configuration file
"""
self.__log.info("Parsing XML config: %s", self._xsd_schema)
exit = False
self.__log.debug("Calling XMLSchema11 api")
try:
xsd_schema = xmlschema.XMLSchema11(self._xsd_schema)
except:
exit = True
error = traceback.format_exc()
self.__log.info("Error while parsing xsd file:\n%s\n\n%s",
self._xsd_schema, error)
if not exit:
self.__log.debug("Converting XML schema to dict")
try:
xml_dict = xsd_schema.to_dict(self._args.config)
#A last manual validation must be done here: the InitialBank value must
#be less or equal than the total number of banks
if xml_dict['@InitialBank'] > len(xml_dict['Bank']):
raise Exception("InitialBank is higher than the possible number of "
"banks / maximum: " + str(len(xml_dict['Bank'])) + \
", given value: " + str(xml_dict['@InitialBank']))
self.__log.debug("Got: \n%s", PrettyFormat(xml_dict))
except:
exit = True
error = traceback.format_exc()
message = "Error while parsing xml file:\n%s\n\n%s" % (
self._args.config, error
)
self.__log.info(message)
if exit:
self.__log.debug("Unexpected error occured, aborting...")
self._free_midi()
sys.exit()
self._xml_dict = xml_dict
def _open_port(self, interface_type, midi_port, is_virtual = False):
"""
Opens the specified MIDI port for the entered midi_callback
Parameters:
* interface_type: which interface to open: 'input' or 'output'
* midi_port: MIDI port used to open the MIDI interface
* is_virtual: whether or not the port is virtual
Returns:
* In case of opening a virtual port, it will return a MIDI interface
"""
if not is_virtual:
self.__log.debug("Opening MIDI port: %s", PrettyFormat(midi_port))
port_name = None
client_name = None
else:
self.__log.debug("Opening Virtual MIDI port")
port_name = midi_port
midi_port = None
client_name = VIRTUAL_PREFFIX[:-1]
try:
midi_interface = open_midiport(port = midi_port, type_ = interface_type,
use_virtual = is_virtual,
interactive = False,
client_name = client_name,
port_name = port_name)[0]
except:
error = traceback.format_exc()
self.__log.info(error)
self._free_midi()
sys.exit()
return midi_interface
def _open_ports(self):
"""
Opens the entered MIDI ports
"""
self._midi_in = self._open_port("input", self._in_port,
self._use_virtual_in)
if self._use_virtual_in:
port_name = self._in_port
else:
port_name = self._in_ports[self._in_port]
self.__log.info("MIDI IN Port: '%s' was opened", port_name)
self._midi_out = self._open_port("output", self._out_port,
self._use_virtual_out)
if self._use_virtual_out:
port_name = self._out_port
else:
port_name = self._out_ports[self._out_port]
self.__log.info("MIDI OUT Port: '%s' was opened", port_name)
def _close_port(self, midi_interface):
"""
Closes the specified MIDI interface
Parameters:
* midi_interface: MIDI interface that will be closed
"""
self.__log.debug("Closing MIDI port")
try:
midi_interface.close_port()
except:
error = traceback.format_exc()
self.__log.info(error)
def _close_ports(self):
"""
Closes all opened MIDI ports
"""
self._close_port(self._midi_in)
if self._use_virtual_in:
port_name = self._in_port
else:
port_name = self._in_ports[self._in_port]
self.__log.info("MIDI IN Port: '%s' was closed", port_name)
self._close_port(self._midi_out)
if self._use_virtual_out:
port_name = self._out_port
else:
port_name = self._out_ports[self._out_port]
self.__log.info("MIDI OUT Port: '%s' was closed", port_name)
def _parse_port(self, port_list, arg_name):
"""
Gets the specified port from command line
Parameters:
* port_list: List of available MIDI ports
* arg_name: name of the argument to get. It can be: InPort or OutPort
Returns:
* A tupple containing:
- either a port index or a virtual port string name
- either if using a virtual or a real port
"""
self.__log.debug("Getting: %s from:\n%s", arg_name, PrettyFormat(port_list))
use_virtual = False
num_ports = len(port_list)
port_value = self._xml_dict.get('@'+arg_name, num_ports)
self.__log.debug("Port value: %s", port_value)
if (type(port_value) == str) and port_value.isdigit():
port_value = int(port_value)
elif type(port_value) == str:
is_windows = (platform.system() == "Windows")
if port_value.startswith(VIRTUAL_PREFFIX):
if not is_windows:
#Virtual port only work unser MACOS and Linux. Windows doesn't
#supports this. On the last operating system, the Virtual part will be
#removed and it will be threatened as a normal port. You can assure
#compatibilty between Windows and other OS by creating first the ports
#with loopMIDI
use_virtual = True
elif port_value[-1] != '*':
port_value += '*'
port_value = port_value[len(VIRTUAL_PREFFIX):]
if not use_virtual:
self.__log.debug("Searching port")
#On this case, a string with part of the name was given, so, it
#will be searched in the available ports
port_index = 0
port_found = False
for port_name in port_list:
filtered = fnmatch.filter([port_name], port_value)
if filtered != []:
port_found = True
break
port_index += 1
if not port_found:
self.__log.info("The %s: %s wasn't found.", arg_name, port_value)
self._free_midi()
self.__log.debug("Port wasn't found, exiting")
sys.exit()
port_value = port_index + 1
self.__log.debug("Port was found, index: %d", port_value)
else:
self.__log.debug("Virutal Port will be used")
if not use_virtual:
#Internally, port numbers start from 0 because they are in an array
port_value -= 1
if port_value >= num_ports:
self.__log.info("Invalid port number was supplied")
self._free_midi()
self.__log.debug("Exiting after getting invalid port")
sys.exit()
return port_value, use_virtual
def _parse_ports(self):
"""
Gets the passed ports to the command line
"""
self.__log.debug("Parsing ports")
self._in_port, self._use_virtual_in = self._parse_port(self._in_ports,
'InPort')
self._out_port, self._use_virtual_out = self._parse_port(self._out_ports,
'OutPort')
self.__log.debug("Ports were parsed")
def _open_midi(self):
"""Starts MIDI without opening a port"""
self.__log.info("Opening MIDI interfaces")
try:
self._midi_out = MidiOut()
self._midi_in = MidiIn()
#Note: if you need to catch SysEx, MIDI clock, and active sense
#messages, then use the method: ignore_types as follows:
#self._midi_in.ignore_types(sysex = False, timing = False,
# active_sense = False)
#They are ignored by default. I don't need this right now, so the
#standard behaviour is OK for me
except:
error = traceback.format_exc()
self.__log.info(error)
self._free_midi()
return False
self.__log.debug("MIDI interfaces were opened")
return True
def _free_midi(self):
"""Frees MIDI resources"""
self.__log.debug("Releasing MIDI")
if hasattr(self, '_midi_in'):
del self._midi_in
if hasattr(self, '_midi_out'):
del self._midi_out
self.__log.info("MIDI was released")
def _get_midi_ports(self, midi_interface):
"""
Gets the available ports for the specified MIDI interface
Parameters:
* midi_interface: interface used for listing the ports. It can be
either _midi_in or _midi_out.
"""
self.__log.debug("Getting available MIDI ports")
ports = midi_interface.get_ports()
self.__log.debug("Got:\n%s", PrettyFormat(ports))
port_index = 0
for port in ports:
port_index_str = str(port_index)
ports[port_index] = port
port_index += 1
self.__log.debug("Fixed port indexes:\n%s", PrettyFormat(ports))
return ports
def _get_all_ports(self):
"""
Gets all the available MIDI IN and Out ports.
"""
in_ports = []
out_ports = []
if self._open_midi():
self.__log.debug("Getting all MIDI IN ports")
in_ports = self._get_midi_ports(self._midi_in)
self.__log.debug("Got:\n%s", PrettyFormat(in_ports))
self.__log.debug("Getting all MIDI OUT ports")
out_ports = self._get_midi_ports(self._midi_out)
self.__log.debug("Got:\n%s", PrettyFormat(out_ports))
self._in_ports = in_ports
self._out_ports = out_ports
self._free_midi()
def _get_formatted_port_list(self, port_list):
"""
Gets the port list as follows:
<port_index>: <port_name>
"""
self.__log.debug("Getting formatted port list")
port_list_tuples = []
for port_index, port_name in enumerate(port_list):
port_list_tuples.append(str(port_index + 1) + ": " + port_name)
self.__log.debug("Got: %s", PrettyFormat(port_list_tuples))
return '\n\r'.join(port_list_tuples)
def _list_ports(self):
"""
Lists all the available MIDI IN and Out ports.
"""
self.__log.info("\nAvailable MIDI IN ports:")
self.__log.info(self._get_formatted_port_list(self._in_ports))
self.__log.info("\nAvailable MIDI OUT ports:")
self.__log.info(self._get_formatted_port_list(self._out_ports))
```
#### File: JMMidiBassPedalController/src/ValidateXML.py
```python
from __future__ import print_function
import argparse
from argparse import ArgumentParser
from argparse import RawTextHelpFormatter
import xmlschema
from pprint import pprint
class ValidateXMLArgumentParser(ArgumentParser):
"""
ArgumentParser for the helper application
Remarks:
- The helper application will accept the following command line options:
* --config: XML configuration file to check against the XSD schema.
"""
def __init__(self, description = "Checks the specified XML file against the "
"XSD schema"):
"""
Setups the ArgumentParser of the helper program
Parameters:
* description: description of what the program is doing
"""
self._parser = ArgumentParser(description = description,
formatter_class = RawTextHelpFormatter, add_help = False)
def add_arguments(self,
main_help = "Shows this help message and exits",
config_help = "XML file to use. If not given, then"
"conf/sample-config.xml will\nbe assumed\n"):
"""
Adds the command line options and commands to the argument parser
Parameters:
* main_help: text of the -h, --help option
* config_help: help of the "--config" command line option
"""
self._parser.add_argument("-h", "--help", action = "help",
default = argparse.SUPPRESS, help = main_help)
self._parser.add_argument("-c", "--config",
default = "conf/sample-config.xml",
help = config_help)
def parse_arguments(self):
"""
Validates the supplied command line options. It will show an error
message if the vaildation failed and then it will exit
"""
return self._parser.parse_args()
schema_name = 'conf/MidiBassPedalController.xsd'
if __name__ == "__main__":
parser = ValidateXMLArgumentParser()
parser.add_arguments()
args = parser.parse_arguments()
my_schema = xmlschema.XMLSchema11(schema_name)
pprint(my_schema.is_valid(args.config))
pprint(my_schema.to_dict(args.config))
``` |
{
"source": "jmeintrup/random-graph-generator",
"score": 3
} |
#### File: jmeintrup/random-graph-generator/main.py
```python
import argparse
import random
import re
pat = re.compile(r"^erdos|fixed$")
def generation_type(arg_value, pat=pat):
if not pat.match(arg_value):
raise argparse.ArgumentTypeError('Expected "erdos" or "fixed" as type')
return arg_value
parser = argparse.ArgumentParser(description='Random Graph Generation Tool')
parser.add_argument('-n', metavar='N', type=int, help='Num Vertices')
parser.add_argument('-p', metavar='P', type=float, help='Probability per Edge')
parser.add_argument('-s', metavar='SEED', type=int, help='Seed for RNG')
parser.add_argument('--type', metavar='type', type=generation_type,default='erdos', help='Type: "erdos" or "fixed", fixed requires "-m" argument')
parser.add_argument('-m', metavar='M', type=int, help='Num Edges for fixed generation')
args = parser.parse_args()
random.seed(args.s)
def erdos(n, p):
edges=random_spanning_tree(n)
for v in range(1,n+1):
for u in range(v+1, n+1):
if random.random() <= p:
edges.add((v, u))
edges=list(edges)
edges.sort()
return edges
def fixed(n, m):
spanning_tree=random_spanning_tree(n)
edges=spanning_tree
if len(spanning_tree)>m:
return spanning_tree
all_edges=set()
for u in range(0,n):
for v in range(u+1,n):
all_edges.add((u, v))
candidates=all_edges-spanning_tree
while len(edges) < m and len(candidates) > 0:
edge = random.sample(candidates, 1).pop()
candidates.remove(edge)
edges.add(edge)
edges=list(edges)
edges.sort()
return edges
def random_spanning_tree(n):
vertices=list(range(0,n))
S=set(vertices)
T=set()
v = random.sample(S, 1).pop()
S.remove(v)
T.add(v)
edges=set()
while len(S)>0:
u=random.sample(vertices, 1).pop()
if u not in T:
edges.add((min(u,v), max(u,v)))
S.remove(u)
T.add(u)
v=u
return edges
def print_dimacs(n, edges, comment=None):
print(f"p edges {n} {len(edges)}")
for edge in edges:
print(f"e {edge[0]+1} {edge[1]+1}")
if args.type == 'erdos':
edges=erdos(args.n, args.p)
print(f"c Randomly Generated Erdos-Reyni Graph with p={args.p} and n={args.n}")
print_dimacs(args.n, edges)
if args.type == 'fixed':
if args.m == None:
raise argparse.ArgumentTypeError('Fixed type requires "m" argument')
print(f"c Randomly Generated graph with fixed number of edges and vertices")
edges=fixed(args.n, args.m)
print_dimacs(args.n, edges)
``` |
{
"source": "jmeisele/celery-farm",
"score": 4
} |
#### File: backend/optimizer/solver_interface.py
```python
from abc import ABC, abstractmethod
from typing import Dict, Optional
from models.solver import SolverParameters
# definition of a solver interface which must be followed when implementing custom solvers
class SolverInterface(ABC):
@abstractmethod
def set_solver_parameters(self, parameters: Optional[SolverParameters] = None):
"""
Set parameters (e.g. run time, gap, etc.) to control the behavior of the solver.
"""
pass
@abstractmethod
def build_model(self):
"""
Given the problem data, build a formal model instance. If you are not using a classical MIP/
LP solver this method might be passed in the subclass.
"""
pass
@abstractmethod
def solve_instance(self):
"""
Given the problem data, solve the instance. This is where the actual optimization happens!
"""
pass
@abstractmethod
def get_solution_status(self) -> Dict:
"""
Return a dictionary which contains relevant statistics about the solution of a problem instance.
"""
pass
```
#### File: backend/optimizer/solver.py
```python
from typing import Optional
from pydantic import PrivateAttr
from pyscipopt import Model, quicksum
from models.solver import ProblemData, SolverParameters
from optimizer.solver_interface import SolverInterface
class Solver(SolverInterface, ProblemData):
# Set additional (private) attributes as needed
_model: str = PrivateAttr()
_solver_parameters: str = PrivateAttr()
def __init__(self, **data):
super().__init__(**data)
self._model = None
self._solver_parameters = None
def set_solver_parameters(self, parameters: Optional[SolverParameters] = None): # noqa: C901
if parameters:
self._solver_parameters = parameters
if "setBoolParam" in self._solver_parameters:
for key, value in self._solver_parameters["setBoolParam"].items():
self._model.setBoolParam(key, value)
if "setIntParam" in self._solver_parameters:
for key, value in self._solver_parameters["setIntParam"].items():
self._model.setIntParam(key, value)
if "setRealParam" in self._solver_parameters:
for key, value in self._solver_parameters["setRealParam"].items():
self._model.setRealParam(key, value)
if "setCharParam" in self._solver_parameters:
for key, value in self._solver_parameters["setCharParam"].items():
self._model.setCharParam(key, value)
if "setStringParam" in self._solver_parameters:
for key, value in self._solver_parameters["setStringParam"].items():
self._model.setStringParam(key, value)
def build_model(self):
self._model = Model("flp")
# Define decision variables
x, y = {}, {}
for j in self.facilities:
y[j] = self._model.addVar(vtype="B", name=f"y_{j}")
for i in self.customers:
x[i, j] = self._model.addVar(vtype="C", name=f"x_{i,j}")
# Define constraint set
for i in self.customers:
self._model.addCons(
quicksum(x[i, j] for j in self.facilities) == self.demand[i],
f"Demand_{i}",
)
for j in self.facilities:
self._model.addCons(
quicksum(x[i, j] for i in self.customers) <= self.facility_capacity[j] * y[j],
f"Capacity_{j}",
)
for (i, j) in x:
self._model.addCons(x[i, j] <= self.demand[i] * y[j], f"Strong_{i,j}")
# Define objective function
self._model.setObjective(
quicksum(self.facility_installation_cost[j] * y[j] for j in self.facilities)
+ quicksum(self.transportation_cost[i][j] * x[i, j] for i in self.customers for j in self.facilities),
"minimize",
)
def solve_instance(self):
self._model.optimize()
def get_solution_status(self) -> dict:
if self._model.getStatus() == "optimal":
return {
"status": self._model.getStatus(),
"scip_parameters": self._solver_parameters,
"objective_function_value": self._model.getObjVal(),
"solution_time": self._model.getSolvingTime(),
"gap": self._model.getGap(),
"number_of_decision_vars": self._model.getNVars(),
"number_of_constraints": self._model.getNConss(),
"decision_variables": {var.name: self._model.getVal(var) for var in self._model.getVars()},
}
else:
return {
"status": self._model.getStatus(),
"scip_parameters": self._solver_parameters,
}
``` |
{
"source": "jmeisele/fastapi-ml-scaffolding",
"score": 2
} |
#### File: tests/test_api/test_prediction.py
```python
import math
def test_prediction(test_client) -> None:
response = test_client.post(
"/api/model/predict",
json={
"median_income_in_block": 8.3252,
"median_house_age_in_block": 41,
"average_rooms": 6,
"average_bedrooms": 1,
"population_per_block": 322,
"average_house_occupancy": 2.55,
"block_latitude": 37.88,
"block_longitude": -122.23,
},
)
assert response.status_code == 200
assert "median_house_value" in response.json()
assert "currency" in response.json()
assert math.isclose(response.json().get("median_house_value"), 422005, rel_tol=1.0)
def test_prediction_nopayload(test_client) -> None:
response = test_client.post(
"/api/model/predict",
json={},
)
assert response.status_code == 422
``` |
{
"source": "jmeisele/fastapi-tf",
"score": 2
} |
#### File: app/core/security.py
```python
import secrets
from typing import Optional
from fastapi import HTTPException, Security
from fastapi.security.api_key import APIKeyHeader
from starlette.status import HTTP_400_BAD_REQUEST, HTTP_401_UNAUTHORIZED
from app.core import config
from app.core.messages import AUTH_REQ, NO_API_KEY
api_key = APIKeyHeader(name="token", auto_error=False)
def validate_request(header: Optional[str] = Security(api_key)) -> bool:
if header is None:
raise HTTPException(
status_code=HTTP_400_BAD_REQUEST, detail=NO_API_KEY, headers={}
)
if not secrets.compare_digest(header, str(config.API_KEY)):
raise HTTPException(
status_code=HTTP_401_UNAUTHORIZED, detail=AUTH_REQ, headers={}
)
return True
```
#### File: fastapi-tf/app/main.py
```python
from fastapi import FastAPI
from app.api.routes.router import api_router
from app.core.config import (API_PREFIX, APP_NAME, APP_VERSION, IS_DEBUG)
from app.core.event_handlers import (start_app_handler, stop_app_handler)
def get_app() -> FastAPI:
"""FastAPI app controller"""
fast_app = FastAPI(title=APP_NAME, version=APP_VERSION, debug=IS_DEBUG)
fast_app.include_router(api_router, prefix=API_PREFIX)
fast_app.add_event_handler("startup", start_app_handler(fast_app))
fast_app.add_event_handler("shutdown", stop_app_handler(fast_app))
return fast_app
app = get_app()
``` |
{
"source": "JMejlsted/iGEM-distribution",
"score": 3
} |
#### File: scripts/scriptutils/helpers.py
```python
from collections import Iterable
from typing import Union
import sbol3
from sbol_utilities.helper_functions import is_plasmid
import tyto
def vector_to_insert(component: sbol3.Component) -> sbol3.Component:
"""If the component is a vector, peel it open to find the sub-component that is not the vector portion
If the component is not a vector, return it directly
Throws a ValueError if the component is a vector but does not have precisely one insert
:param component: SBOL3 component to extract from
:return: component if not vector; otherwise the vector
"""
# is either the component or any feature thereof a vector? If not, then return component
subvectors = {f for f in component.features if is_plasmid(f)}
if len(subvectors) == 0 and not is_plasmid(component):
return component
# otherwise, if there's precisely one non-vector subcomponent, return the Component it points to
inserts = {f for f in set(component.features)-subvectors if isinstance(f, sbol3.SubComponent)}
if len(inserts) == 1:
return inserts.pop().instance_of.lookup()
else:
raise ValueError(f'Vector should have one insert, but found {len(inserts)}: {component.identity}')
# TODO: migrate this to sbol-utilities
def contained_components(roots: Union[sbol3.TopLevel, Iterable[sbol3.TopLevel]]) -> set[sbol3.Component]:
"""Find the set of all SBOL Components contained within the roots or their children
This will explore via Collection.member relations nd Component.feature relations
:param roots: collection of TopLevel objects to explore
:return: set of Components found
"""
if isinstance(roots, sbol3.TopLevel):
roots = [roots]
explored = set() # set being built via traversal
# subfunction for walking containment tree
def walk_tree(obj: sbol3.TopLevel):
if obj not in explored:
explored.add(obj)
if isinstance(obj, sbol3.Component):
for f in (f.instance_of.lookup() for f in obj.features if isinstance(f, sbol3.SubComponent)):
walk_tree(f)
elif isinstance(obj, sbol3.Collection):
for m in obj.members:
walk_tree(m.lookup())
for r in roots:
walk_tree(r)
# filter result for containers:
return {c for c in explored if isinstance(c, sbol3.Component)}
# TODO: remove this TYTO extension methods after resolution of issue https://github.com/SynBioDex/tyto/issues/33
def has_SO_uri(uri: str) -> bool:
"""Test if a given URI is in the ontology (effectively an exception-checking wrapper around get_term_by_uri)
:param ontology: Ontology to check for term containment
:param str: URI to look up
:return: true if this is a URI in the
"""
if not (uri.startswith("https://identifiers.org/SO") or uri.startswith("http://identifiers.org/so/SO")):
return False
try:
tyto.SO.get_term_by_uri(uri)
return True
except LookupError:
return False
``` |
{
"source": "jmelahman/jmelahman.github.io",
"score": 3
} |
#### File: jmelahman.github.io/tools/here_code_to_html.py
```python
import fileinput
from pygments import highlight
from pygments.lexers import get_lexer_by_name
from pygments.formatters import HtmlFormatter
def main():
print(code_to_html(fileinput.input()))
def code_to_html(code, lexer = 'python3', linenos = True):
formatter = HtmlFormatter(linenos=linenos,
noclasses=False,
cssclass='')
html = highlight(code, get_lexer_by_name(lexer), formatter)
return html
if __name__ == '__main__':
main()
```
#### File: jmelahman.github.io/tools/utils.py
```python
from selenium.common.exceptions import TimeoutException
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
def wait_and_find_element(driver, by, value, timeout=30):
try:
element = WebDriverWait(driver, timeout).until(
EC.presence_of_element_located((by, value))
)
except TimeoutException:
print('Unable to locate %s within %s' % (value, timeout))
``` |
{
"source": "jmelchio/spinnaker-resource",
"score": 2
} |
#### File: spinnaker-resource/test/testin.py
```python
import json
import os
import sys
import unittest
from io import StringIO
from unittest.mock import patch
from assets import inscript
spinnaker_waitforconcourse_running = json.loads('''
[
{
"application": "metricsdemo",
"authentication": {
"allowedAccounts": [
"seoul",
"montclair",
"atherton"
],
"user": "anonymous"
},
"buildTime": 1554412918160,
"canceled": false,
"id": "01D7N3NNCGRF14VNPHMM46X19X",
"initialConfig": {},
"keepWaitingPipelines": false,
"limitConcurrent": true,
"name": "block",
"notifications": [],
"origin": "api",
"pipelineConfigId": "4652d7ac-e9af-41b2-b41f-a946e24354f2",
"stages": [
{
"context": {
"master": "some-master",
"teamName": "A-team",
"pipelineName": "some-pipeline",
"resourceName": "spin-resource",
"parameters": {
"thing_one": "one",
"thing_two": "two"
}
},
"id": "01D7N3NNCG0GBKK28RS25R4HX4",
"name": "Manual Judgment",
"outputs": {},
"refId": "1",
"requisiteStageRefIds": [],
"startTime": 1554412918193,
"status": "RUNNING",
"tasks": [
{
"id": "1",
"implementingClass": "com.netflix.spinnaker.orca.echo.pipeline.ManualJudgmentStage$WaitForManualJudgmentTask",
"loopEnd": false,
"loopStart": false,
"name": "waitForConcourseJobStartTask",
"stageEnd": true,
"stageStart": true,
"startTime": 1554412918208,
"status": "RUNNING"
}
],
"type": "concourse"
}
],
"startTime": 1554412918173,
"status": "RUNNING",
"systemNotifications": [],
"trigger": {
"artifacts": [],
"dryRun": false,
"enabled": false,
"eventId": "fdc68837-d4ae-421a-817d-c9d31d532939",
"notifications": [],
"parameters": {},
"rebake": false,
"resolvedExpectedArtifacts": [],
"strategy": false,
"type": "manual",
"user": "anonymous"
},
"type": "PIPELINE"
}
]
''')
spinnaker_waitforconcourse_completed = json.loads('''
[
{
"application": "metricsdemo",
"authentication": {
"allowedAccounts": [
"seoul",
"montclair",
"atherton"
],
"user": "anonymous"
},
"buildTime": 1554412918160,
"canceled": false,
"id": "01D7N3NNCGRF14VNPHMM46X19X",
"initialConfig": {},
"keepWaitingPipelines": false,
"limitConcurrent": true,
"name": "block",
"notifications": [],
"origin": "api",
"pipelineConfigId": "4652d7ac-e9af-41b2-b41f-a946e24354f2",
"stages": [
{
"context": {
"master": "some-master",
"teamName": "A-team",
"pipelineName": "some-pipeline",
"resourceName": "spin-resource",
"parameters": {
"thing_one": "one",
"thing_two": "two"
}
},
"id": "01D7N3NNCG0GBKK28RS25R4HX4",
"name": "Manual Judgment",
"outputs": {},
"refId": "1",
"requisiteStageRefIds": [],
"startTime": 1554412918193,
"status": "RUNNING",
"tasks": [
{
"id": "1",
"implementingClass": "com.netflix.spinnaker.orca.echo.pipeline.ManualJudgmentStage$WaitForManualJudgmentTask",
"loopEnd": false,
"loopStart": false,
"name": "waitForConcourseJobStartTask",
"stageEnd": true,
"stageStart": true,
"startTime": 1554412918208,
"status": "COMPLETED"
}
],
"type": "concourse"
}
],
"startTime": 1554412918173,
"status": "RUNNING",
"systemNotifications": [],
"trigger": {
"artifacts": [],
"dryRun": false,
"enabled": false,
"eventId": "fdc68837-d4ae-421a-817d-c9d31d532939",
"notifications": [],
"parameters": {},
"rebake": false,
"resolvedExpectedArtifacts": [],
"strategy": false,
"type": "manual",
"user": "anonymous"
},
"type": "PIPELINE"
}
]
''')
spinnaker_multiple_values = json.loads('''
[
{
"application": "metricsdemo",
"authentication": {
"allowedAccounts": [
"seoul",
"montclair",
"atherton"
],
"user": "anonymous"
},
"buildTime": 1554412918160,
"canceled": false,
"id": "01D7N3NNCGRF14VNPHMM46X19X",
"initialConfig": {},
"keepWaitingPipelines": false,
"limitConcurrent": true,
"name": "block",
"notifications": [],
"origin": "api",
"pipelineConfigId": "4652d7ac-e9af-41b2-b41f-a946e24354f2",
"stages": [
{
"context": {
"failPipeline": true,
"instructions": "Should I complete?",
"judgmentInputs": [],
"notifications": []
},
"id": "01D7N3NNCG0GBKK28RS25R4HX4",
"name": "Manual Judgment",
"outputs": {},
"refId": "1",
"requisiteStageRefIds": [],
"startTime": 1554412918193,
"status": "RUNNING",
"tasks": [
{
"id": "1",
"implementingClass": "com.netflix.spinnaker.orca.echo.pipeline.ManualJudgmentStage$WaitForManualJudgmentTask",
"loopEnd": false,
"loopStart": false,
"name": "waitForJudgment",
"stageEnd": true,
"stageStart": true,
"startTime": 1554412918208,
"status": "RUNNING"
}
],
"type": "manualJudgment"
},
{
"context": {
"master": "some-master",
"teamName": "A-team",
"pipelineName": "some-pipeline",
"resourceName": "spin-resource"
},
"id": "01D7N3NNCG0GBKK28RS25R4HX4",
"name": "Manual Judgment",
"outputs": {},
"refId": "1",
"requisiteStageRefIds": [],
"startTime": 1554412918193,
"status": "RUNNING",
"tasks": [
{
"id": "1",
"implementingClass": "com.netflix.spinnaker.orca.echo.pipeline.ManualJudgmentStage$WaitForManualJudgmentTask",
"loopEnd": false,
"loopStart": false,
"name": "waitForConcourseJobStartTask",
"stageEnd": true,
"stageStart": true,
"startTime": 1554412918208,
"status": "RUNNING"
}
],
"type": "concourse"
},
{
"context": {
"master": "some-master",
"teamName": "A-team",
"pipelineName": "some-pipeline",
"resourceName": "spin-resource"
},
"id": "01D7N3NNCGZ2PWFS2FKYBS2FFV",
"name": "Clone Server Group",
"outputs": {},
"refId": "2",
"requisiteStageRefIds": [
"1"
],
"status": "NOT_STARTED",
"tasks": [],
"type": "concourse"
}
],
"startTime": 1554412918173,
"status": "RUNNING",
"systemNotifications": [],
"trigger": {
"artifacts": [],
"dryRun": false,
"enabled": false,
"eventId": "fdc68837-d4ae-421a-817d-c9d31d532939",
"notifications": [],
"parameters": {},
"rebake": false,
"resolvedExpectedArtifacts": [],
"strategy": false,
"type": "manual",
"user": "anonymous"
},
"type": "PIPELINE"
}
]
''')
concourse_in_match_version = json.loads(''' { "source":
{ "base_url": "http://spinnaker.gate:8084/", "app_name": "metricsdemo", "master": "some-master", "team_name": "A-team",
"pipeline_name": "some-pipeline", "resource_name": "spin-resource", "path": "file.props"},
"version": { "stage_guid": "01D7N3NNCG0GBKK28RS25R4HX4"}} ''')
concourse_in_match_version_two = json.loads(''' { "source":
{ "base_url": "http://spinnaker.gate:8084/", "app_name": "metricsdemo", "master": "some-master", "team_name": "A-team",
"pipeline_name": "some-pipeline", "resource_name": "spin-resource", "path": "file_two.props"},
"version": { "stage_guid": "01D7N3NNCG0GBKK28RS25R4HX4"}} ''')
concourse_in_match_version_three = json.loads(''' { "source":
{ "base_url": "http://spinnaker.gate:8084/", "app_name": "metricsdemo", "master": "some-master", "team_name": "A-team",
"pipeline_name": "some-pipeline", "resource_name": "spin-resource", "path": "file_three.props"},
"version": { "stage_guid": "01D7N3NNCG0GBKK28RS25R4HX4"}} ''')
concourse_in_without_baseurl = json.loads('''{ "source": { "app_name": "metricsdemo", "master": "some-master"
, "team_name": "A-team", "pipeline_name": "some-pipeline", "resource_name": "spin-resource"},
"version": {"stage_guid": "1"}}''')
class TestIn(unittest.TestCase):
@patch('assets.common.call_spinnaker', return_value=spinnaker_waitforconcourse_running)
@patch('assets.common.capture_input', return_value=concourse_in_match_version)
@patch('assets.inscript.notify_spinnaker', return_value=True)
def test_unit_happy_path(self, call_spinnaker, capture_input, notify_spinnaker):
backup = sys.stdout
sys.stdout = StringIO()
inscript.main('/tmp/')
out = sys.stdout.getvalue()
sys.stdout.close()
sys.stdout = backup
self.assertEqual(out,
'{"version": {"stage_guid": "01D7N3NNCG0GBKK28RS25R4HX4"}, "job_name": "job-unknown", \
"build_name": "build-number-name", "metadata": [{"name": "thing_one", "value": "one"}, \
{"name": "thing_two", "value": "two"}]}\n',
'Wrong information returned from in script')
self.assertTrue(os.path.isfile('/tmp/file.props'), 'File does not exist.')
with open('/tmp/file.props', 'r') as config_file:
contents = config_file.read()
self.assertEqual(contents, 'thing_one=one\nthing_two=two\n', 'String not found')
os.remove('/tmp/file.props')
@patch('assets.common.call_spinnaker', return_value=spinnaker_multiple_values)
@patch('assets.common.capture_input', return_value=concourse_in_match_version_two)
@patch('assets.inscript.notify_spinnaker', return_value=True)
def test_unit_happy_path_no_parameters(self, call_spinnaker, capture_input, notify_spinnaker):
backup = sys.stdout
sys.stdout = StringIO()
inscript.main('/tmp/')
out = sys.stdout.getvalue()
sys.stdout.close()
sys.stdout = backup
self.assertEqual(out,
'{"version": {"stage_guid": "01D7N3NNCG0GBKK28RS25R4HX4"}, "job_name": "job-unknown", \
"build_name": "build-number-name", "metadata": []}\n', 'Wrong information returned from in script')
self.assertTrue(os.path.isfile('/tmp/file_two.props'), 'File does not exist.')
with open('/tmp/file_two.props', 'r') as config_file:
contents = config_file.read()
self.assertEqual(contents, '', 'File not empty')
os.remove('/tmp/file_two.props')
@patch('assets.common.call_spinnaker', return_value=spinnaker_waitforconcourse_completed)
@patch('assets.common.capture_input', return_value=concourse_in_match_version_three)
@patch('assets.inscript.notify_spinnaker', return_value=True)
def test_unit_crappy_path_no_running_wait_task(self, call_spinnaker, capture_input, notify_spinnaker):
backup = sys.stderr
sys.stderr = StringIO()
with self.assertRaises(SystemExit) as context:
inscript.main('/tmp/')
err = sys.stderr.getvalue()
sys.stderr.close()
sys.stderr = backup
self.assertEqual(str(context.exception), '1', 'Return code of `1` expected')
self.assertEqual(err, 'No running Wait for Concourse task found\nSystem Exit detected\n')
@patch('assets.common.capture_input', return_value=concourse_in_without_baseurl)
def test_unit_crappy_path_missing_base_url(self, capture_input):
backup = sys.stderr
sys.stderr = StringIO()
with self.assertRaises(SystemExit) as context:
inscript.main('/tmp/')
err = sys.stderr.getvalue()
sys.stderr.close()
sys.stderr = backup
self.assertEqual(str(context.exception), '1', 'Return code of `1` expected')
self.assertEqual(err, 'Unable to complete operation: \'base_url\'\nSystem Exit detected\n',
'Expected error message about base_url')
class TestTimeOut(unittest.TestCase):
def test_unit_crappy_path_timeout(self):
backup = sys.stderr
sys.stderr = StringIO()
with self.assertRaises(SystemExit) as context:
inscript.main('/tmp/')
err = sys.stderr.getvalue()
sys.stderr.close()
sys.stderr = backup
self.assertEqual(str(context.exception), '1', 'Return code of `1` expected')
if __name__ == '__main__':
unittest.main()
``` |
{
"source": "jmelesky/voronoi_example",
"score": 4
} |
#### File: jmelesky/voronoi_example/test_distances.py
```python
import math
import random
def eucliddistance(x1, y1, x2, y2):
xdist = abs(x1 - x2)
ydist = abs(y1 - y2)
return math.sqrt( (xdist ** 2) + (ydist ** 2) )
def taxidistance(x1, y1, x2, y2):
return (abs(x1 - x2) + abs(y1 - y2))
def civdistance(x1, y1, x2, y2):
return (max(abs(x1 - x2), abs(y1 - y2)))
def dandddistance(x1, y1, x2, y2):
mindist = min(abs(x1 - x2), abs(y1 - y2))
maxdist = max(abs(x1 - x2), abs(y1 - y2))
return ((maxdist - mindist) + (1.5 * mindist))
def otherdistance(x1, y1, x2, y2):
mindist = min(abs(x1 - x2), abs(y1 - y2))
maxdist = max(abs(x1 - x2), abs(y1 - y2))
return ((maxdist - mindist) + (1.4 * mindist))
def main():
# generate a list of 300 random points
points = [ (random.randrange(200), random.randrange(200))
for x in range(300) ]
# get the euclidean distance from origin to each point
base_distances = [ eucliddistance(0,0,p[0],p[1])
for p in points ]
# now, we run through each of the other distance
# measures, and take the total differences
taxidiff = 0
civdiff = 0
dandddiff = 0
otherdiff = 0
for i in range(len(points)):
p = points[i]
bd = base_distances[i]
taxidiff += abs(taxidistance(0,0,p[0],p[1]) - bd)
civdiff += abs(civdistance(0,0,p[0],p[1]) - bd)
dandddiff += abs(dandddistance(0,0,p[0],p[1]) - bd)
otherdiff += abs(otherdistance(0,0,p[0],p[1]) - bd)
print("taxi diff: %d" % (taxidiff))
print("civ diff: %d" % (civdiff))
print("dandd diff: %d" % (dandddiff))
print("other diff: %d" % (otherdiff))
if __name__ == '__main__':
main()
``` |
{
"source": "jmeline/wifi_signal_analysis",
"score": 3
} |
#### File: wifi_signal_analysis/src/main.py
```python
from sampleParser import SampleParser as SampleExtractor
from sampleAnalyizer import SampleAnalyizer
## Modify your path ##
path = './Export/'
## ## ## ## ## ## ## #
def main():
## Collect all of the samples
## organize them together into a dictionary.
extractor = SampleExtractor(path)
currentFilesInDirectory = extractor.getDirectoryFiles()
for _file in currentFilesInDirectory:
extractor.storeFileNamesByPatternInDictionary(_file)
sampleDictionary = extractor.getSampleDictionary()
## Debugging
extractor.printDirectory()
## Begin analyzing data
print ("Start Analyzing...")
analyzer = SampleAnalyizer()
for key, value in sorted(sampleDictionary.items()):
print ("ValueArr: ", value)
if key == 'theta=90':
print ("key: ", key)
for filename in value:
# if not key is 'efficien':
dataframe = analyzer.generateDataFrameFromFile(path + filename)
print ("Key: ", key)
analyzer.setVariables(key)
analyzer.analyzeDataFrame(dataframe)
# analyzer.percent_above_threshold(dataframe, key)
print ("End Analyzing")
#mf.printVariables()
## Begin graphing data
if __name__ == '__main__':
main()
```
#### File: wifi_signal_analysis/src/sampleAnalyizer.py
```python
import pandas as pd
import numpy as np
import pprint
class Vividict(dict):
def __missing__(self, key):
value = self[key] = type(self)()
return value
class SampleAnalyizer():
def __init__(self):
self.tests = Vividict()
def printVariables(self):
print ("cut", self.cut)
print ("plot_data:", self.plot_data)
print ("plot_data2:", self.plot_data)
print ("arr_signal:", self.arr_signal)
pp = pprint.PrettyPrinter(indent=4)
pp.pprint(self.tests)
def setVariables(self, value=""):
self.pname = value
self.cut = value
self.arr_signal = []
self.extract_frequency = "2440000000"
self.signalCount = 0
self.count = 0
self.threshold = -5.0
self.totalSignal = 120
self.plot_data = []
self.plot_data2 = []
self.waterFall = []
self.sortedSignals = []
self.results = 0
def analyzeDataFrame(self, df):
""" Let Pandas do the heavy lifting here """
# reduce dataframe to only values that match extract_frequency
accepted_df = df[df.ix[:,0] == int(self.extract_frequency)]
# count up the rows in the entire datatable. i.e get a count
self.count = len(accepted_df)
# determine the count of values in column 2 in the datatable that are equal
# to or exceed the threshold value
self.signalCount = len(accepted_df[accepted_df.ix[:,2] >= self.threshold])
# Add 15 and then multiply by 5 to each row in the dataframe
self.arr_signal = (accepted_df.ix[:,2] + 15.0) * 5.0
# round to two decimal places
self.arr_signal = np.round(self.arr_signal, decimals=2)
print ("self.count: ", self.count)
print ("self.signalCount: ", self.signalCount)
self.results = 100.0 * int(10000 * self.signalCount / self.totalSignal) / 10000
print ("-> ", self.results, "% of signals above ", self.threshold, " dbi ")
print ("self.percentAbove: ", self.results/100)
def createWaterFallChart(self):
""" Create the waterfall chart """
pass
def percent_above_threshold(self, dataframe="", value=""):
if self.cut != 'theta=90':
self.plot_data.extend(self.plot_data2)
self.sortedSignals = sorted(self.arr_signal)
#print("self.count:", self.count)
#print("self.signalCount:", self.signalCount)
#print("self.totalSignal:", self.totalSignal)
#print("self.signalCount/self.totalSignal:", float(self.signalCount)/self.totalSignal)
self.results = 100.0 * int(10000 * self.signalCount / self.totalSignal) / 10000
print ("-> ", self.results, "% of signals above ", self.threshold, " dbi ")
#print ("self.plot_data:", sorted(self.plot_data), "len:", len(self.plot_data))
#print ("self.plot_data2:", sorted(self.plot_data2), "len:", len(self.plot_data2))
self.tests[self.pname][self.cut]["percentAbove"] = self.results/100
self.tests[self.pname][self.cut]["plot_data"] = self.plot_data
for index in range(-30,11):
count = self.countSignals(index)
self.waterFall.append("%.2f" % (count/121.0 * 83.333))
self.tests[self.pname][self.cut]["waterFall"] = self.waterFall
if self.results <= 30:
print ("- FAIL -")
else:
print (" \n")
self.signalCount = 0
else:
print ("Error! No value for pname and/or cut")
def countSignals(self, index):
count = 0
while ( float(self.sortedSignals[count]) >= index):
count += 1
if count >= len(self.sortedSignals):
break
return count
def generateDataFrameFromFile(self, filename):
return pd.read_csv(filename, skiprows=2, delimiter='\t', header=0)
``` |
{
"source": "jmelis/api-machine-stacks",
"score": 2
} |
#### File: api-machine-stacks/src/generate_manifest.py
```python
from lxml.etree import SubElement, Element, ElementTree, tostring
class PomXMLTemplate:
def __init__(self, json_data):
self._data = json_data
self.root = Element(
'project',
xmlns="http://maven.apache.org/POM/4.0.0",
)
self.tree = ElementTree(self.root)
self.create()
def create(self):
self._packages = self._data.get('packages')
SubElement(self.root, 'modelVersion').text = '4.0.0'
SubElement(self.root, 'packaging').text = 'pom'
SubElement(self.root, 'url').text = 'https://example.com'
self.licenses = SubElement(self.root, 'licenses')
self.license = SubElement(self.licenses, 'license')
SubElement(
self.license, 'name').text = "Apache License, Version 2.0"
SubElement(
self.license, 'url').text = "http://www.apache.org/licenses"
self.add_dependencies(self._packages)
def add_dependencies(self, dependencies):
if dependencies:
self.dpmanage = SubElement(self.root, "dependencyManagement")
self.dps = SubElement(self.dpmanage, "dependencies")
for item in dependencies:
dp = SubElement(self.dps, 'dependency')
for child, data in zip(('groupID', 'artifactID', 'version'),
item.split(':')):
SubElement(dp, child).text = data
def xml_string(self):
return tostring(self.root, encoding='utf-8',
xml_declaration=True, pretty_print=True)
``` |
{
"source": "jmelis/openshift-routers-load-test",
"score": 3
} |
#### File: openshift-routers-load-test/utils/create-gnuplot-values-file.py
```python
import sys
import lzma
import statistics
from os import listdir
from os.path import isfile, join, basename
ROUND_PRECISION = 2
# Process an xz file
def process(xz_file):
with lzma.open(xz_file) as f:
lines = f.read().decode().strip('\n').split('\n')
first_request = lines[0].split(",")
last_request = lines[-1].split(",")
latencies = []
end_times = []
status = {}
errors = 0
for line in lines:
fields = line.split(",")
if len(fields) != 15:
raise Exception("Unknown line format [%s])" % line)
latencies.append(int(fields[1]))
end_times.append(int(fields[0]) + int(fields[1]))
if fields[2] in status:
status[fields[2]] += 1
else:
status[fields[2]] = 1
if fields[14]:
errors += 1
total_requests = len(latencies)
attack_duration = int(last_request[0]) - int(first_request[0])
request_rate = round(total_requests / (attack_duration/1e6),
ROUND_PRECISION)
median = statistics.median(latencies) / 1e3
for code, i in status.items():
if code[0] == "4" or code[0] == 5:
errors += i
error_rate = round(errors / (attack_duration/1e6), ROUND_PRECISION)
print("%s %s %s %s" % (basename(xz_file)[0:-3], request_rate, median,
error_rate))
# We may want to reorder this afterwards
directory = sys.argv[1]
xz_files = sorted([f for f in listdir(directory) if f[-3:] == ".xz"])
for f in xz_files:
process(join(directory, f))
``` |
{
"source": "jmelis/reconcile",
"score": 2
} |
#### File: reconcile/reconcile/gql.py
```python
import json
from graphqlclient import GraphQLClient
from reconcile.config import get_config
_gqlapi = None
class GqlApiError(Exception):
pass
class GqlApi(object):
def __init__(self, url, token=None):
self.url = url
self.token = token
self.client = GraphQLClient(self.url)
if token:
self.client.inject_token(token)
def query(self, query, variables=None):
result_json = self.client.execute(query, variables)
result = json.loads(result_json)
if 'errors' in result:
raise GqlApiError(result['errors'])
if 'data' not in result:
raise GqlApiError((
"`data` field missing from GraphQL"
"server response."))
return result['data']
def get_resource(self, path):
query = """
query Resource($path: String) {
resources(path: $path) {
path
content
sha256sum
}
}
"""
resources = self.query(query, {'path': path})['resources']
if len(resources) != 1:
raise GqlApiError('Expecting one and only one resource.')
return resources[0]
def init(url, token=None):
global _gqlapi
_gqlapi = GqlApi(url, token)
return _gqlapi
def init_from_config():
config = get_config()
server = config['graphql']['server']
token = config['graphql'].get('token')
return init(server, token)
def get_api():
global _gqlapi
if not _gqlapi:
raise GqlApiError("gql module has not been initialized.")
return _gqlapi
```
#### File: reconcile/test/test_aggregated_list.py
```python
import pytest
from reconcile.aggregated_list import AggregatedList
from reconcile.aggregated_list import AggregatedDiffRunner
class TestAggregatedList(object):
def test_add_item(self):
alist = AggregatedList()
params = {'a': 1, 'b': 2}
items = ["qwerty"]
alist.add(params, items)
assert len(alist.dump()) == 1
assert alist.dump()[0]['items'] == items
assert alist.dump()[0]['params'] == params
def test_add_repeated_item(self):
alist = AggregatedList()
params = {'a': 1, 'b': 2}
item = "qwerty"
items = [item, item]
alist.add(params, items)
assert len(alist.dump()) == 1
assert alist.dump()[0]['items'] == [item]
assert alist.dump()[0]['params'] == params
def test_add_different_params(self):
alist = AggregatedList()
params1 = {'b': 1, 'a': 2}
items1 = ["qwerty1"]
params2 = {'a': 1, 'b': 3}
items2 = ["qwerty2"]
alist.add(params1, items1)
alist.add(params2, items2)
assert len(alist.dump()) == 2
hp1 = AggregatedList.hash_params(params1)
hp2 = AggregatedList.hash_params(params2)
assert alist.get_by_params_hash(hp1)['items'] == items1
assert alist.get_by_params_hash(hp2)['items'] == items2
def test_get_py_params_hash(self):
alist = AggregatedList()
params1 = {'a': 1, 'b': 2, 'c': 3}
params2 = {'b': 2, 'c': 3, 'a': 1}
params3 = {'c': 3, 'a': 1, 'b': 2}
params4 = {'a': 1, 'c': 3, 'b': 2}
params5 = {'a': 1}
items1 = ["qwerty1"]
items2 = ["qwerty2"]
alist.add(params1, items1)
alist.add(params2, items1)
alist.add(params3, items1)
alist.add(params4, items1)
alist.add(params5, items2)
hp1 = AggregatedList.hash_params(params1)
hp2 = AggregatedList.hash_params(params2)
hp3 = AggregatedList.hash_params(params3)
hp4 = AggregatedList.hash_params(params4)
hp5 = AggregatedList.hash_params(params5)
assert hp1 == hp2
assert hp1 == hp2
assert hp1 == hp3
assert hp1 == hp4
assert hp1 != hp5
assert alist.get_by_params_hash(hp1)['items'] == items1
assert alist.get_by_params_hash(hp5)['items'] == items2
def test_diff_insert(self):
left = AggregatedList()
right = AggregatedList()
right.add({'a': 1}, ["qwerty"])
diff = left.diff(right)
assert diff['delete'] == []
assert diff['update-insert'] == []
assert diff['update-delete'] == []
assert diff['insert'] == [{'params': {'a': 1}, 'items': ['qwerty']}]
def test_diff_delete(self):
left = AggregatedList()
right = AggregatedList()
left.add({'a': 1}, ["qwerty"])
diff = left.diff(right)
assert diff['insert'] == []
assert diff['update-insert'] == []
assert diff['update-delete'] == []
assert diff['delete'] == [{'params': {'a': 1}, 'items': ['qwerty']}]
def test_diff_update_insert(self):
left = AggregatedList()
right = AggregatedList()
left.add({'a': 1}, ["qwerty1"])
right.add({'a': 1}, ["qwerty1", "qwerty2"])
diff = left.diff(right)
assert diff['insert'] == []
assert diff['delete'] == []
assert diff['update-delete'] == []
assert diff['update-insert'] == [
{'items': ['qwerty2'], 'params': {'a': 1}}
]
def test_diff_update_delete(self):
left = AggregatedList()
right = AggregatedList()
left.add({'a': 1}, ["qwerty1", "qwerty2"])
right.add({'a': 1}, ["qwerty1"])
diff = left.diff(right)
assert diff['insert'] == []
assert diff['delete'] == []
assert diff['update-insert'] == []
assert diff['update-delete'] == [
{'items': ['qwerty2'], 'params': {'a': 1}}
]
class TestAggregatedDiffRunner(object):
def test_run(self):
left = AggregatedList()
right = AggregatedList()
# test insert
right.add({'on': 'insert'}, ["i"])
# test delete
left.add({'on': 'delete'}, ["d"])
# test update-insert
left.add({'on': 'update-insert'}, ["ui1"])
right.add({'on': 'update-insert'}, ["ui1", "ui2"])
# test update-delete
left.add({'on': 'update-delete'}, ["ud1", "ud2"])
right.add({'on': 'update-delete'}, ["ud1"])
on_insert = []
on_delete = []
on_update_insert = []
on_update_delete = []
def recorder(l):
return lambda p, i: l.append([p, i])
runner = AggregatedDiffRunner(left.diff(right))
runner.register("insert", recorder(on_insert))
runner.register("delete", recorder(on_delete))
runner.register("update-insert", recorder(on_update_insert))
runner.register("update-delete", recorder(on_update_delete))
runner.run()
assert on_insert == [[{'on': 'insert'}, ['i']]]
assert on_delete == [[{'on': 'delete'}, ['d']]]
assert on_update_insert == [[{'on': 'update-insert'}, ['ui2']]]
assert on_update_delete == [[{'on': 'update-delete'}, ['ud2']]]
def test_run_cond_true(self):
left = AggregatedList()
right = AggregatedList()
right.add({'on': 'insert'}, ["qwerty"])
runner = AggregatedDiffRunner(left.diff(right))
recorder = []
runner.register(
"insert",
lambda p, i: recorder.append('True'),
lambda p: True
)
runner.run()
assert recorder == ['True']
def test_run_cond_false(self):
left = AggregatedList()
right = AggregatedList()
right.add({'on': 'insert'}, ["qwerty"])
runner = AggregatedDiffRunner(left.diff(right))
recorder = []
runner.register(
"insert",
lambda p, i: recorder.append('True'),
lambda p: False
)
runner.run()
assert recorder == []
def test_unknown_diff_on(self):
left = AggregatedList()
right = AggregatedList()
runner = AggregatedDiffRunner(left.diff(right))
with pytest.raises(Exception):
runner.register("qwerty", lambda p, i: True, lambda p: True)
```
#### File: reconcile/utils/openshift_api.py
```python
import requests
class OpenshiftRestApi(object):
"""A class to simply Openshift API"""
DEFAULT_CONNECT_TIMEOUT = 5
DEFAULT_READ_TIMEOUT = 15
def __init__(self,
host='https://127.0.0.1',
headers=None,
verify_ssl=True):
self.api_host = host
self.headers = headers
self.verify_ssl = verify_ssl
def get(self, api_path, **kwargs):
return self.req(requests.get, api_path, **kwargs)
def post(self, api_path, **kwargs):
return self.req(requests.post, api_path, **kwargs)
def put(self, api_path, **kwargs):
return self.req(requests.put, api_path, **kwargs)
def delete(self, api_path, **kwargs):
return self.req(requests.delete, api_path, **kwargs)
def req(self, method, api_path, **kwargs):
"""Do API query, return requested type"""
if self.api_host.endswith('/') and api_path.startswith('/'):
api_path = api_path[1:]
if not kwargs.get('timeout'):
timeout = (self.DEFAULT_CONNECT_TIMEOUT, self.DEFAULT_READ_TIMEOUT)
kwargs['timeout'] = timeout
if not kwargs.get('verify'):
kwargs['verify'] = self.verify_ssl
if not kwargs.get('headers'):
kwargs['headers'] = self.headers.copy()
else:
headers = self.headers.copy()
headers.update(kwargs['headers'])
kwargs['headers'] = headers
response = method(self.api_host + api_path, **kwargs)
response.raise_for_status()
return response.json()
class Openshift(object):
"""Wrapper around OpenShift API calls"""
ora = None
namespace = None
def __init__(self, openshift_api_url='', openshift_api_token='',
verify_ssl=True):
headers = {'Authorization': 'Bearer ' + openshift_api_token}
self.ora = OpenshiftRestApi(
host=openshift_api_url, headers=headers, verify_ssl=verify_ssl)
def __oapi_get(self, api_path, **kwargs):
res = self.ora.get(api_path, **kwargs)
return res
def __oapi_post(self, api_path, **kwargs):
res = self.ora.post(api_path, **kwargs)
return res
def __oapi_put(self, api_path, **kwargs):
res = self.ora.put(api_path, **kwargs)
return res
def __oapi_delete(self, api_path, **kwargs):
res = self.ora.delete(api_path, **kwargs)
return res
def get_version(self):
"""Get cluster version"""
uri = '/oapi/v1'
return self.__oapi_get(uri)
def get_user(self, username="~"):
"""Get logged in user details
Default to currently logged in user
"""
uri = '/apis/user.openshift.io/v1/users/' + username
return self.__oapi_get(uri)
def delete_user(self, username):
"""Delete a user"""
uri = '/apis/user.openshift.io/v1/users/' + username
res = self.__oapi_delete(uri)
return res
def delete_identity(self, identity):
"""Delete an identity"""
uri = '/apis/user.openshift.io/v1/identities/' + identity
res = self.__oapi_delete(uri)
return res
def get_users(self):
"""Get logged in user details
Default to currently logged in user
"""
uri = '/apis/user.openshift.io/v1/users'
return self.__oapi_get(uri)
def get_project(self, projectname):
"""Get list of projects"""
uri = '/oapi/v1/projects/' + projectname
return self.__oapi_get(uri)
def get_projects(self):
"""Get list of projects"""
uri = '/oapi/v1/projects'
res = self.__oapi_get(uri)
return res.get('items', [])
def get_pods(self, namespace=None, labelSelector=None):
"""Get pods details"""
if namespace:
uri = '/api/v1/namespaces/' + namespace + '/pods'
else:
uri = '/api/v1/pods'
params = None
if labelSelector:
params = {'labelSelector': labelSelector}
res = self.__oapi_get(uri, params=params)
return res.get('items', [])
def get_buildconfigs(self, namespace):
"""Get buildconfigs for a namespace"""
uri = '/oapi/v1/namespaces/' + namespace + '/buildconfigs'
res = self.__oapi_get(uri)
return res.get('items', [])
def get_builds(self, namespace):
"""Get builds for a namespace"""
uri = '/oapi/v1/namespaces/' + namespace + '/builds'
res = self.__oapi_get(uri)
return res.get('items', [])
def get_configmaps(self, namespace):
"""Get configmaps for a namespace"""
uri = '/api/v1/namespaces/' + namespace + '/configmaps'
res = self.__oapi_get(uri)
return res.get('items', [])
def get_secrets(self, namespace):
"""Get secrets for a namespace"""
uri = '/api/v1/namespaces/' + namespace + '/secrets'
res = self.__oapi_get(uri)
return res.get('items', [])
def get_deploymentconfig(self, namespace, dcname):
"""Get a single deploymentconfig details"""
uri = '/oapi/v1/namespaces/{}/deploymentconfigs/{}'.format(
namespace, dcname
)
return self.__oapi_get(uri)
def get_deploymentconfigs(self, namespace):
"""Get deploymentconfigs details"""
uri = '/oapi/v1/namespaces/' + namespace + '/deploymentconfigs'
res = self.__oapi_get(uri)
return res.get('items', [])
def get_quota(self, namespace, qtype=None):
"""Get specific ResourceQuota details"""
uri = '/api/v1/namespaces/' + namespace + '/resourcequotas'
if qtype:
uri = uri + '/' + qtype
res = self.__oapi_get(uri)
if 'items' in res:
return res['items']
return res['status']
def get_quotas(self, namespace):
"""Get ResourceQuotas details"""
uri = '/api/v1/namespaces/' + namespace + '/resourcequotas'
res = self.__oapi_get(uri)
return res.get('items', [])
def get_services(self, namespace):
"""Get services details"""
uri = '/api/v1/namespaces/' + namespace + '/services'
res = self.__oapi_get(uri)
return res.get('items', [])
def get_routes(self, namespace):
"""Get status of routes"""
uri = '/oapi/v1/namespaces/' + namespace + '/routes'
res = self.__oapi_get(uri)
return res.get('items', [])
def get_rolebindings(self, namespace, role=None):
"""
Get rolebindings for a namespace
If role is supplied it will filter by role
"""
uri = '/apis/authorization.openshift.io/v1/namespaces/' + \
namespace + '/rolebindings'
res = self.__oapi_get(uri)
items = res['items']
if role:
items = [r for r in items if r[u'roleRef'][u'name'] == role]
return items
def remove_role_from_user(self, namespace, role, user):
"""
Remove a user from a role
This method finds the roleBinding that grants that permissions and
either removes the roleBinding entirely (if the user is the only
subject in the roleBinding) or it updates the roleBinding to remove
the user from the roleBinding
"""
# fetch all roleBindings in the ns for that specific role
rbs = self.get_rolebindings(namespace, role)
# find the roleBinding for the user
rb = None
subject = None
for r in rbs:
for s in r[u'subjects']:
if s[u'kind'] == 'User' and s[u'name'] == user:
rb = r
subject = s
break
if rb is None:
raise Exception(
"Could not find roleBinding for ns: '%s', role: '%s', "
"user: '%s'" % (namespace, role, user))
uri = "/apis/authorization.openshift.io/v1/namespaces/" + \
namespace + "/rolebindings/" + rb['metadata']['name']
if len(rb[u'subjects']) == 1:
# if user is the only subject in the roleBinding, we can just
# remove the rb
return self.__oapi_delete(uri)
else:
# remove the user from 'subects' and 'userNames' and then update
# (PUT) the roleBinding.
rb[u'subjects'].remove(subject)
rb[u'userNames'].remove(user)
return self.__oapi_put(uri, json=rb)
def add_role_to_user(self, namespace, role, user):
"""
Add role to user
Creates a rolebinding that grants the requested role to the user. It
will be a rolebinding with a single subject in it.
"""
# fetch all roleBindings in the ns for that specific role
rbs = self.get_rolebindings(namespace, role)
# calculate the name of the rolebinding
rb_names = [rb[u'metadata'][u'name'] for rb in rbs]
if role not in rb_names:
rb_name = role
else:
i = 0
while True:
temp_rb_name = u"%s-%s" % (role, i)
if temp_rb_name not in rb_names:
rb_name = temp_rb_name
break
else:
i += 1
uri = "/apis/authorization.openshift.io/v1/namespaces/" + \
namespace + "/rolebindings"
rb = {u'groupNames': None,
u'metadata': {u'name': rb_name, u'namespace': namespace},
u'roleRef': {u'name': role},
u'subjects': [{u'kind': u'User', u'name': user}],
u'userNames': [user]}
return self.__oapi_post(uri, json=rb)
def get_pvs(self):
"""Get persistentvolumes"""
uri = '/api/v1/persistentvolumes'
res = self.__oapi_get(uri)
return res.get('items', [])
def get_pvcs(self, namespace=None):
"""Get persistentvolumeclaims"""
if namespace:
uri = '/api/v1/namespaces/' + namespace + '/persistentvolumeclaims'
else:
uri = '/api/v1/persistentvolumeclaims'
res = self.__oapi_get(uri)
return res.get('items', [])
def get_storageclasses(self):
"""Get storageclasses"""
uri = '/apis/storage.k8s.io/v1/storageclasses'
res = self.__oapi_get(uri)
return res.get('items', None)
def get_nodes(self):
"""Get openshift cluster nodes"""
uri = '/api/v1/nodes'
res = self.__oapi_get(uri)
return res.get('items', [])
```
#### File: reconcile/utils/openshift_resource.py
```python
import copy
import hashlib
import json
class OpenshiftResource(object):
def __init__(self, body, integration, integration_version):
self.body = body
self.integration = integration
self.integration_version = integration_version
@property
def name(self):
return self.body['metadata']['name']
@property
def kind(self):
return self.body['kind']
def verify_valid_k8s_object(self):
self.name
self.kind
def has_qontract_annotations(self):
try:
annotations = self.body['metadata']['annotations']
assert annotations['qontract.integration'] == self.integration
assert annotations['qontract.integration_version'] == \
self.integration_version
assert annotations['qontract.sha256sum'] is not None
except KeyError:
return False
except AssertionError:
return False
return True
def annotate(self):
"""
Creates a OpenshiftResource with the qontract annotations, and removes
unneeded Openshift fields.
Returns:
openshift_resource: new OpenshiftResource object with
annotations.
"""
# calculate sha256sum of canonical body
canonical_body = self.canonicalize(self.body)
sha256sum = self.calculate_sha256sum(self.serialize(canonical_body))
# create new body object
body = copy.deepcopy(self.body)
# create annotations if not present
body['metadata'].setdefault('annotations', {})
annotations = body['metadata']['annotations']
# add qontract annotations
annotations['qontract.integration'] = self.integration
annotations['qontract.integration_version'] = \
self.integration_version
annotations['qontract.sha256sum'] = sha256sum
return OpenshiftResource(body, self.integration,
self.integration_version)
def sha256sum(self):
if self.has_qontract_annotations():
body = self.body
else:
body = self.annotate().body
annotations = body['metadata']['annotations']
return annotations['qontract.sha256sum']
def toJSON(self):
return self.serialize(self.body)
@staticmethod
def canonicalize(body):
body = copy.deepcopy(body)
# create annotations if not present
body['metadata'].setdefault('annotations', {})
annotations = body['metadata']['annotations']
# remove openshift specific params
body['metadata'].pop('creationTimestamp', None)
body['metadata'].pop('resourceVersion', None)
body['metadata'].pop('selfLink', None)
body['metadata'].pop('uid', None)
body['metadata'].pop('namespace', None)
annotations.pop('kubectl.kubernetes.io/last-applied-configuration',
None)
# Default fields for specific resource types
# ConfigMaps are by default Opaque
if body['kind'] == 'ConfigMap' and body.get('type') == 'Opaque':
body.pop('type')
# remove qontract specific params
annotations.pop('qontract.integration', None)
annotations.pop('qontract.integration_version', None)
annotations.pop('qontract.sha256sum', None)
return body
@staticmethod
def serialize(body):
return json.dumps(body, sort_keys=True)
@staticmethod
def calculate_sha256sum(body):
m = hashlib.sha256()
m.update(body.encode('utf-8'))
return m.hexdigest()
``` |
{
"source": "jmelloy/bootcamp",
"score": 2
} |
#### File: bootcamp/photologue/views.py
```python
from django.views.generic.dates import ArchiveIndexView, DateDetailView, DayArchiveView, MonthArchiveView, \
YearArchiveView
from django.views.generic.detail import DetailView
from django.views.generic.list import ListView
from django.views.generic.edit import CreateView
from .models import Photo, Gallery
# Gallery views.
class GalleryCreateView(CreateView):
model = Gallery
fields = ["title", "description", "photos"]
def form_valid(self, form):
form.instance.created_by = self.request.user
return super().form_valid(form)
class GalleryListView(ListView):
queryset = Gallery.objects.on_site().is_public()
paginate_by = 20
class GalleryDetailView(DetailView):
queryset = Gallery.objects.on_site().is_public()
class GalleryDateView:
queryset = Gallery.objects.on_site().is_public()
date_field = 'date_added'
allow_empty = True
class GalleryDateDetailView(GalleryDateView, DateDetailView):
pass
class GalleryArchiveIndexView(GalleryDateView, ArchiveIndexView):
pass
class GalleryDayArchiveView(GalleryDateView, DayArchiveView):
pass
class GalleryMonthArchiveView(GalleryDateView, MonthArchiveView):
pass
class GalleryYearArchiveView(GalleryDateView, YearArchiveView):
make_object_list = True
# Photo views.
class PhotoListView(ListView):
queryset = Photo.objects.on_site().is_public()
paginate_by = 20
class PhotoDetailView(DetailView):
queryset = Photo.objects.on_site().is_public()
class PhotoDateView:
queryset = Photo.objects.on_site().is_public()
date_field = 'date_added'
allow_empty = True
class PhotoDateDetailView(PhotoDateView, DateDetailView):
pass
class PhotoArchiveIndexView(PhotoDateView, ArchiveIndexView):
pass
class PhotoDayArchiveView(PhotoDateView, DayArchiveView):
pass
class PhotoMonthArchiveView(PhotoDateView, MonthArchiveView):
pass
class PhotoYearArchiveView(PhotoDateView, YearArchiveView):
make_object_list = True
``` |
{
"source": "jmelo11/QuantLib-SWIG",
"score": 2
} |
#### File: Python/examples/european-option.py
```python
import QuantLib as ql
# global data
todaysDate = ql.Date(15, ql.May, 1998)
ql.Settings.instance().evaluationDate = todaysDate
settlementDate = ql.Date(17, ql.May, 1998)
riskFreeRate = ql.FlatForward(settlementDate, 0.05, ql.Actual365Fixed())
# option parameters
exercise = ql.EuropeanExercise(ql.Date(17, ql.May, 1999))
payoff = ql.PlainVanillaPayoff(ql.Option.Call, 8.0)
# market data
underlying = ql.SimpleQuote(7.0)
volatility = ql.BlackConstantVol(settlementDate, ql.TARGET(), 0.10, ql.Actual365Fixed())
dividendYield = ql.FlatForward(settlementDate, 0.05, ql.Actual365Fixed())
# report
header = " |".join(["%17s" % tag for tag in ["method", "value", "estimated error", "actual error"]])
print("")
print(header)
print("-" * len(header))
refValue = None
def report(method, x, dx=None):
e = "%.4f" % abs(x - refValue)
x = "%.5f" % x
if dx:
dx = "%.4f" % dx
else:
dx = "n/a"
print(" |".join(["%17s" % y for y in [method, x, dx, e]]))
# good to go
process = ql.BlackScholesMertonProcess(
ql.QuoteHandle(underlying),
ql.YieldTermStructureHandle(dividendYield),
ql.YieldTermStructureHandle(riskFreeRate),
ql.BlackVolTermStructureHandle(volatility),
)
hestonProcess = ql.HestonProcess(
ql.YieldTermStructureHandle(riskFreeRate),
ql.YieldTermStructureHandle(dividendYield),
ql.QuoteHandle(underlying),
0.1 * 0.1,
1.0,
0.1 * 0.1,
0.0001,
0.0,
)
hestonModel = ql.HestonModel(hestonProcess)
option = ql.VanillaOption(payoff, exercise)
# method: analytic
option.setPricingEngine(ql.AnalyticEuropeanEngine(process))
value = option.NPV()
refValue = value
report("analytic", value)
# method: Heston semi-analytic
option.setPricingEngine(ql.AnalyticHestonEngine(hestonModel))
report("Heston analytic", option.NPV())
# method: Heston COS method
option.setPricingEngine(ql.COSHestonEngine(hestonModel))
report("Heston COS Method", option.NPV())
# method: integral
option.setPricingEngine(ql.IntegralEngine(process))
report("integral", option.NPV())
# method: finite differences
timeSteps = 801
gridPoints = 800
option.setPricingEngine(ql.FDEuropeanEngine(process, timeSteps, gridPoints))
report("finite diff.", option.NPV())
# method: binomial
timeSteps = 801
option.setPricingEngine(ql.BinomialVanillaEngine(process, "JR", timeSteps))
report("binomial (JR)", option.NPV())
option.setPricingEngine(ql.BinomialVanillaEngine(process, "CRR", timeSteps))
report("binomial (CRR)", option.NPV())
option.setPricingEngine(ql.BinomialVanillaEngine(process, "EQP", timeSteps))
report("binomial (EQP)", option.NPV())
option.setPricingEngine(ql.BinomialVanillaEngine(process, "Trigeorgis", timeSteps))
report("bin. (Trigeorgis)", option.NPV())
option.setPricingEngine(ql.BinomialVanillaEngine(process, "Tian", timeSteps))
report("binomial (Tian)", option.NPV())
option.setPricingEngine(ql.BinomialVanillaEngine(process, "LR", timeSteps))
report("binomial (LR)", option.NPV())
option.setPricingEngine(ql.BinomialVanillaEngine(process, "Joshi4", timeSteps))
report("binomial (Joshi)", option.NPV())
# method: finite differences
# not yet implemented
# method: Monte Carlo
option.setPricingEngine(ql.MCEuropeanEngine(process, "pseudorandom", timeSteps=1, requiredTolerance=0.02, seed=42))
report("MC (crude)", option.NPV(), option.errorEstimate())
option.setPricingEngine(ql.MCEuropeanEngine(process, "lowdiscrepancy", timeSteps=1, requiredSamples=32768))
report("MC (Sobol)", option.NPV())
```
#### File: Python/test/QuantLibTestSuite.py
```python
import sys
import unittest
from date import DateTest
from daycounters import DayCountersTest
from instruments import InstrumentTest
from marketelements import MarketElementTest
from integrals import IntegralTest
from solvers1d import Solver1DTest
from termstructures import TermStructureTest
from bonds import FixedRateBondTest
from ratehelpers import FixedRateBondHelperTest, FxSwapRateHelperTest
from cms import CmsTest
from assetswap import AssetSwapTest
from capfloor import CapFloorTest
from blackformula import BlackFormulaTest
from blackformula import BlackDeltaCalculatorTest
from iborindex import IborIndexTest
from sabr import SabrTest
from slv import SlvTest
def test():
import QuantLib
print('testing QuantLib ' + QuantLib.__version__)
suite = unittest.TestSuite()
suite.addTest(unittest.makeSuite(DateTest, 'test'))
suite.addTest(DayCountersTest())
suite.addTest(unittest.makeSuite(InstrumentTest, 'test'))
suite.addTest(unittest.makeSuite(MarketElementTest, 'test'))
suite.addTest(unittest.makeSuite(IntegralTest, 'test'))
suite.addTest(Solver1DTest())
suite.addTest(unittest.makeSuite(TermStructureTest, 'test'))
suite.addTest(unittest.makeSuite(FixedRateBondTest, 'test'))
suite.addTest(unittest.makeSuite(FixedRateBondHelperTest, 'test'))
suite.addTest(unittest.makeSuite(CmsTest, 'test'))
suite.addTest(unittest.makeSuite(AssetSwapTest, 'test'))
suite.addTest(unittest.makeSuite(FxSwapRateHelperTest, 'test'))
suite.addTest(unittest.makeSuite(CapFloorTest, 'test'))
suite.addTest(unittest.makeSuite(BlackFormulaTest, 'test'))
suite.addTest(unittest.makeSuite(BlackDeltaCalculatorTest, 'test'))
suite.addTest(unittest.makeSuite(IborIndexTest, 'test'))
suite.addTest(unittest.makeSuite(SabrTest, 'test'))
suite.addTest(unittest.makeSuite(SlvTest, 'test'))
result = unittest.TextTestRunner(verbosity=2).run(suite)
if not result.wasSuccessful:
sys.exit(1)
if __name__ == '__main__':
test()
``` |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.